diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..027bdb1
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,14 @@
+# EditorConfig is awesome: https://EditorConfig.org
+
+root = true
+
+[*]
+end_of_line = lf
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+[*.py]
+charset = utf-8
+indent_style = space
+indent_size = 4
+max_line_length = 88
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 0000000..befa060
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,5 @@
+# Run code through yapf
+19a821d5f1ff9079f9a40d27553182a433a27834
+
+# Run code through black
+0d9e3581d57f376865f49ae62fe9171789beca56
diff --git a/.gitignore b/.gitignore
index 029341d..9321436 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,90 +1,48 @@
+#
+# OS-specific
+#
+
 .DS_Store
-# Byte-compiled / optimized / DLL files
-__pycache__/
+
+#
+# Language specific
+#
+
+# Python
 *.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-env/
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
 *.egg-info/
-.installed.cfg
-*.egg
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*,cover
-.hypothesis/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-#Ipython Notebook
-.ipynb_checkpoints
-
-# pyenv
-.python-version
-
-# PyCharm
-.idea/
-
-# IntelliJ
-*.iml
-
-# VSCode
-/.vscode
-
-# Python virtual environment
+/build/
 /.venv
+/.mypy_cache
 
-# antlion configuration files
+#
+# Editors
+#
+
+/.idea/
+/.vscode/
+
+#
+# antlion
+#
+
+# Configuration
 /*.json
 /*.yaml
 /config/
 
-# antlion runtime files
+# Generated during run-time
 /logs
 
 # Local development scripts
 /*.sh
+!/format.sh
+
+#
+# third_party
+#
+
+/third_party/*
+!/third_party/github.com/
+!/third_party/github.com/jd/tenacity
+/third_party/github.com/jd/tenacity/src
diff --git a/BUILD.gn b/BUILD.gn
new file mode 100644
index 0000000..7ee2411
--- /dev/null
+++ b/BUILD.gn
@@ -0,0 +1,217 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Declare Fuchsia build targets for using antlion from the Fuchsia tree.
+# Requires additional configuration of jiri fetch attributes from your Fuchsia
+# checkout:
+#   `jiri init -fetch-optional=antlion`
+
+import("//build/python/python_library.gni")
+
+# Tests for full build validation
+group("e2e_tests") {
+  testonly = true
+  public_deps = [ "tests:e2e_tests" ]
+}
+
+# Subset of tests to validate builds in under 15 minutes.
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [ "tests:e2e_tests_quick" ]
+}
+
+# Tests for at-desk custom validation
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [ "tests:e2e_tests_manual" ]
+}
+
+# deprecated: prefer e2e_tests_quick
+group("smoke_tests") {
+  testonly = true
+  public_deps = [ ":e2e_tests_quick" ]
+}
+
+# Unit tests only
+group("tests") {
+  testonly = true
+  public_deps = [ "runner:tests" ]
+}
+
+python_library("antlion") {
+  source_root = "//third_party/antlion/packages/antlion"
+  testonly = true
+  sources = [
+    "__init__.py",
+    "base_test.py",
+    "capabilities/__init__.py",
+    "capabilities/ssh.py",
+    "context.py",
+    "controllers/__init__.py",
+    "controllers/access_point.py",
+    "controllers/adb.py",
+    "controllers/adb_lib/__init__.py",
+    "controllers/adb_lib/error.py",
+    "controllers/android_device.py",
+    "controllers/android_lib/__init__.py",
+    "controllers/android_lib/errors.py",
+    "controllers/android_lib/events.py",
+    "controllers/android_lib/logcat.py",
+    "controllers/android_lib/services.py",
+    "controllers/ap_lib/__init__.py",
+    "controllers/ap_lib/ap_get_interface.py",
+    "controllers/ap_lib/ap_iwconfig.py",
+    "controllers/ap_lib/bridge_interface.py",
+    "controllers/ap_lib/dhcp_config.py",
+    "controllers/ap_lib/dhcp_server.py",
+    "controllers/ap_lib/extended_capabilities.py",
+    "controllers/ap_lib/hostapd.py",
+    "controllers/ap_lib/hostapd_ap_preset.py",
+    "controllers/ap_lib/hostapd_bss_settings.py",
+    "controllers/ap_lib/hostapd_config.py",
+    "controllers/ap_lib/hostapd_constants.py",
+    "controllers/ap_lib/hostapd_security.py",
+    "controllers/ap_lib/hostapd_utils.py",
+    "controllers/ap_lib/radio_measurement.py",
+    "controllers/ap_lib/radvd.py",
+    "controllers/ap_lib/radvd_config.py",
+    "controllers/ap_lib/radvd_constants.py",
+    "controllers/ap_lib/regulatory_channels.py",
+    "controllers/ap_lib/third_party_ap_profiles/__init__.py",
+    "controllers/ap_lib/third_party_ap_profiles/actiontec.py",
+    "controllers/ap_lib/third_party_ap_profiles/asus.py",
+    "controllers/ap_lib/third_party_ap_profiles/belkin.py",
+    "controllers/ap_lib/third_party_ap_profiles/linksys.py",
+    "controllers/ap_lib/third_party_ap_profiles/netgear.py",
+    "controllers/ap_lib/third_party_ap_profiles/securifi.py",
+    "controllers/ap_lib/third_party_ap_profiles/tplink.py",
+    "controllers/ap_lib/wireless_network_management.py",
+    "controllers/attenuator.py",
+    "controllers/attenuator_lib/__init__.py",
+    "controllers/attenuator_lib/_tnhelper.py",
+    "controllers/attenuator_lib/aeroflex/__init__.py",
+    "controllers/attenuator_lib/aeroflex/telnet.py",
+    "controllers/attenuator_lib/minicircuits/__init__.py",
+    "controllers/attenuator_lib/minicircuits/http.py",
+    "controllers/attenuator_lib/minicircuits/telnet.py",
+    "controllers/fastboot.py",
+    "controllers/fuchsia_device.py",
+    "controllers/fuchsia_lib/__init__.py",
+    "controllers/fuchsia_lib/base_lib.py",
+    "controllers/fuchsia_lib/device_lib.py",
+    "controllers/fuchsia_lib/ffx.py",
+    "controllers/fuchsia_lib/hardware_power_statecontrol_lib.py",
+    "controllers/fuchsia_lib/lib_controllers/__init__.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_controller.py",
+    "controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py",
+    "controllers/fuchsia_lib/logging_lib.py",
+    "controllers/fuchsia_lib/netstack/__init__.py",
+    "controllers/fuchsia_lib/netstack/netstack_lib.py",
+    "controllers/fuchsia_lib/package_server.py",
+    "controllers/fuchsia_lib/sl4f.py",
+    "controllers/fuchsia_lib/ssh.py",
+    "controllers/fuchsia_lib/utils_lib.py",
+    "controllers/fuchsia_lib/wlan_ap_policy_lib.py",
+    "controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py",
+    "controllers/fuchsia_lib/wlan_lib.py",
+    "controllers/fuchsia_lib/wlan_policy_lib.py",
+    "controllers/iperf_client.py",
+    "controllers/iperf_server.py",
+    "controllers/openwrt_ap.py",
+    "controllers/openwrt_lib/__init__.py",
+    "controllers/openwrt_lib/network_const.py",
+    "controllers/openwrt_lib/network_settings.py",
+    "controllers/openwrt_lib/openwrt_constants.py",
+    "controllers/openwrt_lib/wireless_config.py",
+    "controllers/openwrt_lib/wireless_settings_applier.py",
+    "controllers/packet_capture.py",
+    "controllers/packet_sender.py",
+    "controllers/pdu.py",
+    "controllers/pdu_lib/__init__.py",
+    "controllers/pdu_lib/digital_loggers/__init__.py",
+    "controllers/pdu_lib/digital_loggers/webpowerswitch.py",
+    "controllers/pdu_lib/synaccess/__init__.py",
+    "controllers/pdu_lib/synaccess/np02b.py",
+    "controllers/sl4a_lib/__init__.py",
+    "controllers/sl4a_lib/error_reporter.py",
+    "controllers/sl4a_lib/event_dispatcher.py",
+    "controllers/sl4a_lib/rpc_client.py",
+    "controllers/sl4a_lib/rpc_connection.py",
+    "controllers/sl4a_lib/sl4a_manager.py",
+    "controllers/sl4a_lib/sl4a_ports.py",
+    "controllers/sl4a_lib/sl4a_session.py",
+    "controllers/sniffer.py",
+    "controllers/sniffer_lib/__init__.py",
+    "controllers/sniffer_lib/local/__init__.py",
+    "controllers/sniffer_lib/local/local_base.py",
+    "controllers/sniffer_lib/local/tcpdump.py",
+    "controllers/sniffer_lib/local/tshark.py",
+    "controllers/utils_lib/__init__.py",
+    "controllers/utils_lib/commands/__init__.py",
+    "controllers/utils_lib/commands/command.py",
+    "controllers/utils_lib/commands/ip.py",
+    "controllers/utils_lib/commands/journalctl.py",
+    "controllers/utils_lib/commands/pgrep.py",
+    "controllers/utils_lib/commands/route.py",
+    "controllers/utils_lib/commands/shell.py",
+    "controllers/utils_lib/host_utils.py",
+    "controllers/utils_lib/ssh/__init__.py",
+    "controllers/utils_lib/ssh/connection.py",
+    "controllers/utils_lib/ssh/formatter.py",
+    "controllers/utils_lib/ssh/settings.py",
+    "decorators.py",
+    "error.py",
+    "event/__init__.py",
+    "event/decorators.py",
+    "event/event.py",
+    "event/event_bus.py",
+    "event/event_subscription.py",
+    "event/subscription_handle.py",
+    "keys.py",
+    "libs/__init__.py",
+    "libs/logging/__init__.py",
+    "libs/logging/log_stream.py",
+    "libs/ota/__init__.py",
+    "libs/ota/ota_runners/__init__.py",
+    "libs/ota/ota_runners/ota_runner.py",
+    "libs/ota/ota_runners/ota_runner_factory.py",
+    "libs/ota/ota_tools/__init__.py",
+    "libs/ota/ota_tools/adb_sideload_ota_tool.py",
+    "libs/ota/ota_tools/ota_tool.py",
+    "libs/ota/ota_tools/ota_tool_factory.py",
+    "libs/ota/ota_tools/update_device_ota_tool.py",
+    "libs/ota/ota_updater.py",
+    "libs/proc/__init__.py",
+    "libs/proc/job.py",
+    "libs/proc/process.py",
+    "logger.py",
+    "net.py",
+    "runner.py",
+    "test_utils/__init__.py",
+    "test_utils/abstract_devices/__init__.py",
+    "test_utils/abstract_devices/wlan_device.py",
+    "test_utils/abstract_devices/wmm_transceiver.py",
+    "test_utils/dhcp/__init__.py",
+    "test_utils/dhcp/base_test.py",
+    "test_utils/fuchsia/__init__.py",
+    "test_utils/fuchsia/utils.py",
+    "test_utils/fuchsia/wmm_test_cases.py",
+    "test_utils/net/__init__.py",
+    "test_utils/net/connectivity_const.py",
+    "test_utils/net/net_test_utils.py",
+    "test_utils/wifi/__init__.py",
+    "test_utils/wifi/base_test.py",
+    "test_utils/wifi/wifi_constants.py",
+    "test_utils/wifi/wifi_test_utils.py",
+    "types.py",
+    "utils.py",
+    "validation.py",
+  ]
+  library_deps = [
+    "third_party/github.com/jd/tenacity",
+    "//src/testing/end_to_end/honeydew",
+    "//third_party/mobly",
+    "//third_party/pyyaml:yaml",
+  ]
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a9c7f67..0c36022 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,20 +10,79 @@
 
 ## [Unreleased]
 
-### Added
-
-### Changed
+[unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.3.0..refs/heads/main
 
 ### Removed
 
-### Fixed
+- [BREAKING CHANGE] Support for Python 3.8, 3.9, and 3.10. The minimum supported
+version of Python is now 3.11. If running antlion as part of the Fuchsia tree,
+nothing is required; Python 3.11 is vendored with Fuchsia and will be found by
+GN. If running antlion out of tree, ensure your Python version is at least 3.11.
+- `WlanRvrTest` user params `debug_pre_traffic_cmd` and `debug_post_traffic_cmd`
 
-[unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/heads/main
+## [0.3.0] - 2023-05-17
 
-## [0.2.0] - 2022-01-03
+[0.3.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/tags/v0.3.0
+
+### Deprecated
+
+- **Support for ACTS JSON configs; instead, use Mobly YAML configs.** To
+ease this transition, upon running `act.py`, a compatible YAML config will be
+generated for you and placed next to your JSON config.
+- **The `act.py` binary; instead, invoke tests directly.** Upon running
+`act.py`, a deprecation warning will provide instructions for how to invoke
+antlion tests without act.py and with the newly generated YAML config.
 
 ### Added
 
+- Presubmit testing in [CV] (aka CQ). All tests specified with the `qemu_env`
+environment will run before every antlion CL is submitted.
+- Postsubmit testing in [CI]. See [Milo] for an exhaustive list of builders.
+- [EditorConfig] file for consistent coding styles.
+Installing an EditorConfig plugin for your editor is highly recommended.
+
+[CV]: https://chromium.googlesource.com/infra/luci/luci-go/+/refs/heads/main/cv/README.md
+[CI]: https://chromium.googlesource.com/chromium/src/+/master/docs/tour_of_luci_ui.md
+[Milo]: https://luci-milo.appspot.com/ui/search?q=antlion
+[EditorConfig]: https://editorconfig.org
+
+### Changed
+
+- Default test execution from ACTS to Mobly. `antlion_host_test()` now invokes
+the test file directly using the Mobly test runner, rather than using `act.py`.
+  - All tests have been refactored to allow direct running with the Mobly test
+  runner.
+  - `act.py` now converts ACTS JSON config to compatible Mobly YAML config. The
+  resulting config is passed directly to Mobly's config parser. See notes for
+  this release's deprecations above.
+- Generate YAML config instead of JSON config from antlion-runner.
+- `FuchsiaDevice.authorized_file_loc` config field is now optional. This field
+is only used during `FlashTest`; it is not used when the device is already
+provisioned (e.g. when tests are dispatched in Fuchsia infrastructure).
+
+### Removed
+
+- Unused controllers and tests (full list)
+
+### Fixed
+
+- Failure to stop session_manager using ffx in `WlanRebootTest` ([@patricklu],
+[bug](http://b/267330535))
+- Failure to parse 'test_name' in DHCP configuration file in `Dhcpv4InteropTest`
+(invalid option) introduced by previous refactor ([@patricklu],
+[bug](http://b/232574848))
+- Logging for `Dhcpv4InteropTest` changed to utilize a temp file instead of
+/var/log/messages to fix test error with duplicate PID log messages
+([@patricklu], [bug](http://b/232574848))
+
+## [0.2.0] - 2023-01-03
+
+[0.2.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0..refs/tags/v0.2.0
+
+### Added
+
+- Added snapshots before reboot and during test teardown in `WlanRebootTest`
+([@patricklu], [bug](http://b/273923552))
 - Download radvd logs from AP for debugging IPv6 address allocation
 - Optional `wlan_features` config field to `FuchsiaDevice` for declaring which
 WLAN features the device supports, such as BSS Transition Management
@@ -32,12 +91,12 @@
 
 - All path config options in `FuchsiaDevice` expand the home directory (`~`) and
 environmental variables
-	- Used by `ssh_priv_key`, `authorized_file_loc`, and `ffx_binary_path` for
-	sensible defaults using `$FUCHSIA_DIR`
+  - Used by `ssh_priv_key`, `authorized_file_loc`, and `ffx_binary_path` for
+  sensible defaults using `$FUCHSIA_DIR`
 - Running tests works out of the box without specifying `--testpaths`
-	- Moved `tests` and `unit_tests` to the `antlion` package, enabling
-	straight-forward packaging of tests.
-	- Merged `antlion` and `antlion_contrib` packages
+  - Moved `tests` and `unit_tests` to the `antlion` package, enabling
+  straight-forward packaging of tests.
+  - Merged `antlion` and `antlion_contrib` packages
 - Converted several required dependencies to optional dependencies:
   - `bokeh` is only needed for producing HTML graphing. If this feature is
   desired, install antlion with the bokeh option: `pip install ".[bokeh]"`
@@ -57,19 +116,19 @@
 - Failure to acquire IPv6 address in `WlanRebootTest` ([bug](http://b/256009189))
 - Typo in `ChannelSweepTest` preventing use of iPerf ([@patricklu])
 - "Country code never updated" error affecting all Fuchsia ToT builds
-([@karlward], [bug](https://fxbug.dev/116500))
+([@karlward], [bug](https://fxbug.dev/42067674))
 - Parsing new stderr format from `ffx component destroy` ([@karlward],
-[bug](https://fxbug.dev/116544))
+[bug](https://fxbug.dev/42067722))
 - "Socket operation on non-socket" error during initialization of ffx on MacOS
-([@karlward], [bug](https://fxbug.dev/116626))
+([@karlward], [bug](https://fxbug.dev/42067812))
 - Python 3.8 support for IPv6 scope IDs ([bug](http://b/261746355))
 
-[0.2.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0..refs/tags/v0.2.0
-
 ## [0.1.0] - 2022-11-28
 
 Forked from ACTS with the following changes
 
+[0.1.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0
+
 ### Added
 
 - A modern approach to installation using `pyproject.toml` via `pip install .`
@@ -80,6 +139,8 @@
 - Package and import names from ACTS to antlion
 - Copyright notice from AOSP to Fuchsia Authors
 
+[src-layout]: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout
+
 ### Deprecated
 
 - Use of the `setup.py` script. This is only used to keep infrastructure
@@ -98,9 +159,6 @@
 - KeyError for 'mac_addr' in WlanDeprecatedConfigurationTest ([@sakuma],
 [bug](http://b/237709921))
 
-[0.1.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0
-[src-layout]: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout
-
 [@sakuma]: https://fuchsia-review.git.corp.google.com/q/owner:sakuma%2540google.com
 [@patricklu]: https://fuchsia-review.git.corp.google.com/q/owner:patricklu%2540google.com
 [@karlward]: https://fuchsia-review.git.corp.google.com/q/owner:karlward%2540google.com
diff --git a/MANIFEST.in b/MANIFEST.in
index a8ad1bb..a6caf7f 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,4 @@
 include setup.py README.md
-recursive-include src/antlion *
+recursive-include packages/antlion *
 global-exclude .DS_Store
 global-exclude *.pyc
diff --git a/README.md b/README.md
index be529cf..74c5a6d 100644
--- a/README.md
+++ b/README.md
@@ -7,13 +7,91 @@
 
 [TOC]
 
-[Docs]: http://go/fxca
+[Docs]: http://go/antlion
 [Report Bug]: http://go/conn-test-bug
 [Request Feature]: http://b/issues/new?component=1182297&template=1680893
 
-## Getting Started
+## Getting started with QEMU
 
-Requires Python 3.8+
+The quickest way to run antlion is by using the Fuchsia QEMU emulator. This
+enables antlion tests that do not require hardware-specific capabilities like
+WLAN. This is especially useful to verify if antlion builds and runs without
+syntax errors. If you require WLAN capabilities, see
+[below](#running-with-a-local-physical-device).
+
+1. [Checkout Fuchsia](https://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source)
+
+2. Configure and build Fuchsia to run antlion tests virtually on QEMU
+
+   ```sh
+   fx set core.qemu-x64 \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args 'core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests_quick
+   fx build
+   ```
+
+3. In a separate terminal, run the emulator with networking enabled
+
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+
+4. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+5. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/tests/examples:sl4f_sanity_test
+   ```
+
+## Running with a local physical device
+
+A physical device is required for most antlion tests, which rely on physical I/O
+such as WLAN and Bluetooth. Antlion is designed to make testing physical devices
+as easy, reliable, and reproducible as possible. The device will be discovered
+using mDNS, so make sure your host machine has a network connection to the
+device.
+
+1. Configure and build Fuchsia for your target with the following extra
+   arguments:
+
+   ```sh
+   fx set core.my-super-cool-product \
+      --with //src/testing/sl4f \
+      --with //src/sys/bin/start_sl4f \
+      --args='core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
+      --with-host //third_party/antlion:e2e_tests
+   fx build
+   ```
+
+2. Flash your device with the new build
+
+3. In a separate terminal, run a package server
+
+   ```sh
+   fx serve
+   ```
+
+4. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/tests/functional:ping_stress_test
+   ```
+
+> Local auxiliary devices are not yet support by `antlion-runner`, which is
+> responsible for generating Mobly configs. In the meantime, see the
+> section below for manually crafting Mobly configs to support auxiliary
+> devices.
+
+## Running without a Fuchsia checkout
+
+Requires Python 3.11+
 
 1. Clone the repo
 
@@ -25,52 +103,81 @@
 
    ```sh
    cd antlion
-   python3 -m venv .venv  # creates a "virtual environment" in the `.venv` directory
-   source .venv/bin/activate  # activates the virtual environment. Run `deactivate` to exit it later
-   pip install --editable ".[dev,test]"
+   python3 -m venv .venv      # Create a virtual environment in the `.venv` directory
+   source .venv/bin/activate  # Activate the virtual environment
+   pip install --editable ".[mdns]"
+   # Run `deactivate` later to exit the virtual environment
    ```
 
 3. Write the sample config and update the Fuchsia controller to match your
    development environment
 
    ```sh
-   mkdir -p config
-   cat <<EOF > config/simple.json
-   {
-      "testbed": [{
-         "name": "simple_testbed",
-         "FuchsiaDevice": [{
-            "ip": "fuchsia-00e0-4c01-04df"
-         }]
-      }],
-      "logpath": "logs"
-   }
+   cat <<EOF > simple-config.yaml
+   TestBeds:
+   - Name: antlion-runner
+     Controllers:
+       FuchsiaDevice:
+       - ip: fuchsia-00e0-4c01-04df
+   MoblyParams:
+     LogPath: logs
    EOF
    ```
 
+   Replace `fuchsia-00e0-4c01-04df` with your device's nodename, or
+   `fuchsia-emulator` if using an emulator. The nodename can be found by looking
+   for a log similar to the one below.
+
+   ```text
+   [0.524][klog][klog][I] netsvc: nodename='fuchsia-emulator'
+   ```
+
 4. Run the sanity test
 
    ```sh
-   antlion -c config/simple.json -tc Sl4fSanityTest
+   python tests/examples/Sl4fSanityTest.py -c simple-config.yaml
    ```
 
-See `antlion -h` for more full usage.
-
 ## Contributing
 
-Contributions are what make open source a great place to learn, inspire, and
-create. Any contributions you make are **greatly appreciated**.
+Contributions are what make open source projects a great place to learn,
+inspire, and create. Any contributions you make are **greatly appreciated**.
+If you have a suggestion that would make this better, please create a CL.
 
-If you have a suggestion that would make this better, please create a pull
-request.
+Before contributing, additional setup is necessary:
 
-1. Create a feature branch (`git checkout -b feature/amazing-feature`)
-2. Document your change in `CHANGELOG.md`
-3. Commit changes (`git commit -m 'Add some amazing feature'`)
-4. Upload CL (`git push origin HEAD:refs/for/main`)
+- Install developer Python packages for formatting and linting
+
+  ```sh
+  pip install --editable ".[dev]"
+  ```
+
+- Install an [EditorConfig](https://editorconfig.org/) plugin for consistent
+  whitespace
+
+- Complete the steps in '[Contribute source changes]' to gain authorization to
+  upload CLs to Fuchsia's Gerrit.
+
+To create a CL:
+
+1. Create a branch (`git checkout -b feature/amazing-feature`)
+2. Make changes
+3. Document the changes in `CHANGELOG.md`
+4. Auto-format changes (`./format.sh`)
+
+   > Note: antlion follows the [Black code style] (rather than the
+   > [Google Python Style Guide])
+
+5. Verify no typing errors (`mypy .`)
+6. Commit changes (`git add . && git commit -m 'Add some amazing feature'`)
+7. Upload CL (`git push origin HEAD:refs/for/main`)
 
 > A public bug tracker is not (yet) available.
 
+[Black code style]: https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html
+[Google Python Style Guide]: https://google.github.io/styleguide/pyguide.html
+[Contribute source changes]: https://fuchsia.dev/fuchsia-src/development/source_code/contribute_changes#prerequisites
+
 ### Recommended git aliases
 
 There are a handful of git commands that will be commonly used throughout the
@@ -87,6 +194,13 @@
   uc = push origin HEAD:refs/for/main%l=Commit-Queue+1,l=Fuchsia-Auto-Submit+1,publish-comments,r=sbalana
 ```
 
+You may also want to add a section to ignore the project's large formatting changes:
+
+```gitconfig
+[blame]
+  ignoreRevsFile = .git-blame-ignore-revs
+```
+
 ## License
 
 Distributed under the Apache 2.0 License. See `LICENSE` for more information.
diff --git a/antlion_host_test.gni b/antlion_host_test.gni
new file mode 100644
index 0000000..d9bdd89
--- /dev/null
+++ b/antlion_host_test.gni
@@ -0,0 +1,195 @@
+# Copyright 2024 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/host.gni")
+import("//build/python/python_binary.gni")
+import("//build/rust/rustc_binary.gni")
+import("//build/testing/host_test.gni")
+import("//build/testing/host_test_data.gni")
+
+# Declares a host-side antlion test.
+#
+# Examples
+#
+# ```
+# antlion_host_test("sl4f_sanity_test") {
+#   main_source = "Sl4fSanityTest.py"
+# }
+#
+# antlion_host_test("wlan_rvr_test_2g") {
+#   main_source = "WlanRvrTest.py"
+#   test_params = "rvr_settings.yaml"
+#   test_cases = [ "test_rvr_11n_2g_*" ]
+# }
+# ```
+#
+# Parameters
+#
+#  main_source
+#    The .py file defining the antlion test.
+#    Type: path
+#
+#  sources (optional)
+#    Other files that are used in the test.
+#    Type: list(path)
+#    Default: empty list
+#
+#  test_params (optional)
+#    Path to a YAML file with additional test parameters. This will be provided
+#    to the test in the antlion config under the "test_params" key.
+#    Type: string
+#
+#  test_cases (optional)
+#    List of test cases to run. Defaults to running all test cases.
+#    Type: list(string)
+#
+#  enable_honeydew (optional)
+#    Flag to enable using honeydew package
+#    Type: bool
+#
+#  test_data_deps (optional)
+#    List of test data GN targets that are needed at runtime.
+#    Type: list(string)
+#    Default: empty list
+#
+#   deps
+#   environments
+#   visibility
+template("antlion_host_test") {
+  assert(defined(invoker.main_source), "main_source is required")
+
+  #
+  # Define antlion test python_binary().
+  #
+  _python_binary_name = "${target_name}.pyz"
+  _python_binary_target = "${target_name}_python_binary"
+  python_binary(_python_binary_target) {
+    forward_variables_from(invoker,
+                           [
+                             "main_source",
+                             "sources",
+                           ])
+    output_name = _python_binary_name
+    main_callable = "test_runner.main" # Mobly-specific entry point.
+    deps = [ "//third_party/antlion" ]
+    testonly = true
+    visibility = [ ":*" ]
+  }
+
+  _test_dir = "${root_out_dir}/test_data/" + get_label_info(target_name, "dir")
+
+  #
+  # Define antlion test host_test_data().
+  #
+  _host_test_data_target = "${target_name}_test_data"
+  host_test_data(_host_test_data_target) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info(":${_python_binary_target}", "target_out_dir") +
+                "/${_python_binary_name}" ]
+    outputs = [ "${_test_dir}/${_python_binary_name}" ]
+    deps = [ ":${_python_binary_target}" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  #
+  # Define SSH binary host_test_data().
+  #
+  _host_test_data_ssh = "${target_name}_test_data_ssh"
+  host_test_data(_host_test_data_ssh) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh" ]
+    outputs = [ "${_test_dir}/ssh" ]
+  }
+
+  #
+  # Define Mobly test params YAML host_test_data().
+  #
+  if (defined(invoker.test_params)) {
+    _host_test_data_test_params = "${target_name}_test_data_test_params"
+    host_test_data(_host_test_data_test_params) {
+      testonly = true
+      visibility = [ ":*" ]
+      sources = [ invoker.test_params ]
+      outputs = [ "${_test_dir}/${invoker.test_params}" ]
+    }
+  }
+
+  #
+  # Define FFX binary host_test_data().
+  #
+  _host_test_data_ffx = "${target_name}_test_data_ffx"
+  host_test_data(_host_test_data_ffx) {
+    testonly = true
+    visibility = [ ":*" ]
+    sources = [ get_label_info("//src/developer/ffx", "root_out_dir") + "/ffx" ]
+    outputs = [ "${_test_dir}/ffx" ]
+    deps = [ "//src/developer/ffx:ffx_bin($host_toolchain)" ]
+  }
+
+  #
+  # Define the antlion host_test() using antlion-runner.
+  #
+  host_test(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "environments",
+                             "visibility",
+                           ])
+
+    binary_path = "${root_out_dir}/antlion-runner"
+
+    args = [
+      "--python-bin",
+      rebase_path(python_exe_src, root_build_dir),
+      "--antlion-pyz",
+      rebase_path("${_test_dir}/${_python_binary_name}", root_build_dir),
+      "--out-dir",
+      rebase_path("${_test_dir}", root_build_dir),
+      "--ffx-binary",
+      rebase_path("${_test_dir}/ffx", root_build_dir),
+      "--ffx-subtools-search-path",
+      rebase_path(host_tools_dir, root_build_dir),
+      "--ssh-binary",
+      rebase_path("${_test_dir}/ssh", root_build_dir),
+    ]
+
+    if (defined(invoker.test_cases)) {
+      args += invoker.test_cases
+    }
+
+    if (defined(invoker.enable_honeydew) && invoker.enable_honeydew) {
+      args += ["--enable-honeydew"]
+    }
+
+    data_deps = [ "//src/developer/ffx:suite_test_data" ]
+
+    deps = [
+      ":${_host_test_data_ffx}",
+      ":${_host_test_data_ssh}",
+      ":${_host_test_data_target}",
+      "//build/python:interpreter",
+      "//third_party/antlion/runner",
+    ]
+
+    if (defined(invoker.test_params)) {
+      args += [
+        "--test-params",
+        rebase_path("${_test_dir}/${invoker.test_params}", root_build_dir),
+      ]
+      deps += [ ":${_host_test_data_test_params}" ]
+    }
+
+    if (defined(invoker.enable_honeydew) && invoker.enable_honeydew) {
+      deps += [ "//src/testing/end_to_end/honeydew" ]
+    }
+
+    if (defined(invoker.test_data_deps)) {
+      deps += invoker.test_data_deps
+    }
+  }
+}
diff --git a/environments.gni b/environments.gni
new file mode 100644
index 0000000..d19b903
--- /dev/null
+++ b/environments.gni
@@ -0,0 +1,188 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/testing/environments.gni")
+
+astro_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+astro_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Astro"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+sherlock_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Sherlock"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nelson_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Nelson"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nuc11_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nuc11_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nuc11_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Vim3"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Vim3"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Vim3"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+# Display environments supported by antlion.
+display_envs = [
+  astro_env,
+  sherlock_env,
+  nelson_env,
+  nuc11_env,
+  vim3_env,
+]
+
+display_ap_envs = [
+  astro_ap_env,
+  sherlock_ap_env,
+  nelson_ap_env,
+  nuc11_ap_env,
+  vim3_ap_env,
+]
+
+display_ap_iperf_envs = [
+  astro_ap_iperf_env,
+  sherlock_ap_iperf_env,
+  nelson_ap_iperf_env,
+  nuc11_ap_iperf_env,
+  vim3_ap_iperf_env,
+]
+
+display_ap_iperf_attenuator_envs = [
+  astro_ap_iperf_attenuator_env,
+  sherlock_ap_iperf_attenuator_env,
+  nelson_ap_iperf_attenuator_env,
+  nuc11_ap_iperf_attenuator_env,
+  vim3_ap_iperf_attenuator_env,
+]
diff --git a/format.sh b/format.sh
new file mode 100755
index 0000000..d6341f1
--- /dev/null
+++ b/format.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# Detect trivial unused code.
+#
+# Automatically removal is possible, but is considered an unsafe operation. When a
+# change hasn't been commited, automatic removal could cause unintended irreversible
+# loss of in-progress code.
+#
+# Note: This cannot detect unused code between modules or packages. For complex unused
+# code detection, vulture should be used.
+autoflake \
+	--quiet \
+	--check-diff \
+	--remove-duplicate-keys \
+	--remove-unused-variables \
+	--remove-all-unused-imports \
+	--recursive .
+
+if [ $? -eq 0 ]; then
+	echo "No unused code found"
+else
+	echo ""
+	echo "====================="
+	echo "Unused code detected!"
+	echo "====================="
+	echo ""
+	echo "If these changes are trivial, consider running:"
+	echo "\"autoflake --in-place --remove-unused-variables --remove-all-unused-imports -r .\""
+	echo ""
+	read -p "Run this command to remove all unused code? [y/n] " -n 1 -r
+	echo ""
+	echo ""
+
+	if [[ $REPLY =~ ^[Yy]$ ]]; then
+		autoflake --in-place --remove-unused-variables --remove-all-unused-imports -r .
+	else
+		exit 1
+	fi
+fi
+
+# Sort imports to avoid bikeshedding.
+isort .
+
+# Format code; also to avoid bikeshedding.
+black .
+
diff --git a/src/antlion/__init__.py b/packages/antlion/__init__.py
similarity index 100%
rename from src/antlion/__init__.py
rename to packages/antlion/__init__.py
diff --git a/packages/antlion/base_test.py b/packages/antlion/base_test.py
new file mode 100755
index 0000000..9e539ca
--- /dev/null
+++ b/packages/antlion/base_test.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import inspect
+import re
+from typing import Callable
+
+from mobly.base_test import BaseTestClass
+from mobly.base_test import Error as MoblyError
+
+
+class AntlionBaseTest(BaseTestClass):
+    # TODO(https://github.com/google/mobly/issues/887): Remove this once similar
+    # functionality is merged into Mobly.
+    def _get_test_methods(
+        self, test_names: list[str]
+    ) -> list[tuple[str, Callable[[], None]]]:
+        """Resolves test method names to bound test methods.
+
+        Args:
+            test_names: Test method names.
+
+        Returns:
+            List of tuples containing the test method name and the function implementing
+            its logic.
+
+        Raises:
+            MoblyError: test_names does not match any tests.
+        """
+
+        test_table: dict[str, Callable[[], None]] = {**self._generated_test_table}
+        for name, _ in inspect.getmembers(type(self), callable):
+            if name.startswith("test_"):
+                test_table[name] = getattr(self, name)
+
+        test_methods: list[tuple[str, Callable[[], None]]] = []
+        for test_name in test_names:
+            if test_name in test_table:
+                test_methods.append((test_name, test_table[test_name]))
+            else:
+                try:
+                    pattern = re.compile(test_name)
+                except Exception as e:
+                    raise MoblyError(
+                        f'"{test_name}" is not a valid regular expression'
+                    ) from e
+                for name in test_table:
+                    if pattern.fullmatch(name.strip()):
+                        test_methods.append((name, test_table[name]))
+
+        if len(test_methods) == 0:
+            all_patterns = '" or "'.join(test_names)
+            all_tests = "\n - ".join(test_table.keys())
+            raise MoblyError(
+                f"{self.TAG} does not declare any tests matching "
+                f'"{all_patterns}". Please verify the correctness of '
+                f"{self.TAG} test names: \n - {all_tests}"
+            )
+
+        return test_methods
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py b/packages/antlion/capabilities/__init__.py
similarity index 100%
copy from src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
copy to packages/antlion/capabilities/__init__.py
diff --git a/packages/antlion/capabilities/ssh.py b/packages/antlion/capabilities/ssh.py
new file mode 100644
index 0000000..24ce107
--- /dev/null
+++ b/packages/antlion/capabilities/ssh.py
@@ -0,0 +1,431 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import shutil
+import subprocess
+import time
+from dataclasses import dataclass
+from typing import BinaryIO, Mapping
+
+from mobly import logger, signals
+
+from antlion.net import wait_for_port
+from antlion.types import Json
+from antlion.validation import MapValidator
+
+DEFAULT_SSH_PORT: int = 22
+DEFAULT_SSH_TIMEOUT_SEC: int = 60
+DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90
+DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
+# The default package repository for all components.
+
+
+class SSHResult:
+    """Result of an SSH command."""
+
+    def __init__(
+        self,
+        process: (
+            subprocess.CompletedProcess[bytes]
+            | subprocess.CompletedProcess[str]
+            | subprocess.CalledProcessError
+        ),
+    ) -> None:
+        if isinstance(process.stdout, bytes):
+            self._stdout_bytes = process.stdout
+        elif isinstance(process.stdout, str):
+            self._stdout = process.stdout
+        else:
+            raise TypeError(
+                "Expected process.stdout to be either bytes or str, "
+                f"got {type(process.stdout)}"
+            )
+
+        if isinstance(process.stderr, bytes):
+            self._stderr_bytes = process.stderr
+        elif isinstance(process.stderr, str):
+            self._stderr = process.stderr
+        else:
+            raise TypeError(
+                "Expected process.stderr to be either bytes or str, "
+                f"got {type(process.stderr)}"
+            )
+
+        self._exit_status = process.returncode
+
+    def __str__(self) -> str:
+        if self.exit_status == 0:
+            return self.stdout
+        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
+
+    @property
+    def stdout(self) -> str:
+        if not hasattr(self, "_stdout"):
+            self._stdout = self._stdout_bytes.decode("utf-8", errors="replace")
+        return self._stdout
+
+    @property
+    def stdout_bytes(self) -> bytes:
+        if not hasattr(self, "_stdout_bytes"):
+            self._stdout_bytes = self._stdout.encode()
+        return self._stdout_bytes
+
+    @property
+    def stderr(self) -> str:
+        if not hasattr(self, "_stderr"):
+            self._stderr = self._stderr_bytes.decode("utf-8", errors="replace")
+        return self._stderr
+
+    @property
+    def exit_status(self) -> int:
+        return self._exit_status
+
+
+class SSHError(signals.TestError):
+    """A SSH command returned with a non-zero status code."""
+
+    def __init__(self, command: str, result: SSHResult):
+        super().__init__(f'SSH command "{command}" unexpectedly returned {result}')
+        self.result = result
+
+
+class SSHTimeout(signals.TestError):
+    """A SSH command timed out."""
+
+    def __init__(self, err: subprocess.TimeoutExpired):
+        super().__init__(
+            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
+            f"stdout={err.stdout!r}, stderr={err.stderr!r}"
+        )
+
+
+class SSHTransportError(signals.TestError):
+    """Failure to send an SSH command."""
+
+
+@dataclass
+class SSHConfig:
+    """SSH client config."""
+
+    # SSH flags. See ssh(1) for full details.
+    user: str
+    host_name: str
+    identity_file: str
+
+    ssh_binary: str = "ssh"
+    config_file: str = "/dev/null"
+    port: int = 22
+
+    # SSH options. See ssh_config(5) for full details.
+    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
+    strict_host_key_checking: bool = False
+    user_known_hosts_file: str = "/dev/null"
+    log_level: str = "ERROR"
+
+    def full_command(self, command: str, force_tty: bool = False) -> list[str]:
+        """Generate the complete command to execute command over SSH.
+
+        Args:
+            command: The command to run over SSH
+            force_tty: Force pseudo-terminal allocation. This can be used to
+                execute arbitrary screen-based programs on a remote machine,
+                which can be very useful, e.g. when implementing menu services.
+
+        Returns:
+            Arguments composing the complete call to SSH.
+        """
+        optional_flags = []
+        if force_tty:
+            # Multiple -t options force tty allocation, even if ssh has no local
+            # tty. This is necessary for launching ssh with subprocess without
+            # shell=True.
+            optional_flags.append("-tt")
+
+        return (
+            [
+                self.ssh_binary,
+                # SSH flags
+                "-i",
+                self.identity_file,
+                "-F",
+                self.config_file,
+                "-p",
+                str(self.port),
+                # SSH configuration options
+                "-o",
+                f"ConnectTimeout={self.connect_timeout}",
+                "-o",
+                f"ServerAliveInterval={self.server_alive_interval}",
+                "-o",
+                f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
+                "-o",
+                f"UserKnownHostsFile={self.user_known_hosts_file}",
+                "-o",
+                f"LogLevel={self.log_level}",
+            ]
+            + optional_flags
+            + [f"{self.user}@{self.host_name}"]
+            + command.split()
+        )
+
+    @staticmethod
+    def from_config(config: Mapping[str, Json]) -> "SSHConfig":
+        c = MapValidator(config)
+        ssh_binary_path = c.get(str, "ssh_binary_path", None)
+        if ssh_binary_path is None:
+            found_path = shutil.which("ssh")
+            if not isinstance(found_path, str):
+                raise ValueError("Failed to find ssh in $PATH")
+            ssh_binary_path = found_path
+
+        return SSHConfig(
+            user=c.get(str, "user"),
+            host_name=c.get(str, "host"),
+            identity_file=c.get(str, "identity_file"),
+            ssh_binary=ssh_binary_path,
+            config_file=c.get(str, "ssh_config", "/dev/null"),
+            port=c.get(int, "port", 22),
+            connect_timeout=c.get(int, "connect_timeout", 30),
+        )
+
+
+class SSHProvider:
+    """Device-specific provider for SSH clients."""
+
+    def __init__(self, config: SSHConfig) -> None:
+        """
+        Args:
+            config: SSH client config
+        """
+        logger_tag = f"ssh | {config.host_name}"
+        if config.port != DEFAULT_SSH_PORT:
+            logger_tag += f":{config.port}"
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[{logger_tag}]",
+            },
+        )
+
+        self.config = config
+
+        try:
+            self.wait_until_reachable()
+            self.log.info("sshd is reachable")
+        except Exception as e:
+            raise TimeoutError("sshd is unreachable") from e
+
+    def wait_until_reachable(self) -> None:
+        """Wait for the device to become reachable via SSH.
+
+        Raises:
+            TimeoutError: connect_timeout has expired without a successful SSH
+                connection to the device
+            SSHTransportError: SSH is available on the device but
+                connect_timeout has expired and SSH fails to run
+            SSHTimeout: SSH is available on the device but connect_timeout has
+                expired and SSH takes too long to run a command
+        """
+        timeout_sec = self.config.connect_timeout
+        timeout = time.time() + timeout_sec
+        wait_for_port(self.config.host_name, self.config.port, timeout_sec=timeout_sec)
+
+        while True:
+            try:
+                self._run("echo", timeout_sec, False, None)
+                return
+            except SSHTransportError as e:
+                # Repeat if necessary; _run() can exit prematurely by receiving
+                # SSH transport errors. These errors can be caused by sshd not
+                # being fully initialized yet.
+                if time.time() < timeout:
+                    continue
+                else:
+                    raise e
+
+    def wait_until_unreachable(
+        self, interval_sec: int = 1, timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    ) -> None:
+        """Wait for the device to become unreachable via SSH.
+
+        Args:
+            interval_sec: Seconds to wait between unreachability attempts
+            timeout_sec: Seconds to wait until raising TimeoutError
+
+        Raises:
+            TimeoutError: when timeout_sec has expired without an unsuccessful
+                SSH connection to the device
+        """
+        timeout = time.time() + timeout_sec
+
+        while True:
+            try:
+                wait_for_port(
+                    self.config.host_name, self.config.port, timeout_sec=interval_sec
+                )
+            except TimeoutError:
+                return
+
+            if time.time() < timeout:
+                raise TimeoutError(
+                    f"Connection to {self.config.host_name} is still reachable "
+                    f"after {timeout_sec}s"
+                )
+
+    def run(
+        self,
+        command: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+        force_tty: bool = False,
+    ) -> SSHResult:
+        """Run a command on the device then exit.
+
+        Args:
+            command: String to send to the device.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+            force_tty: Force pseudo-terminal allocation.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+
+        Returns:
+            SSHResults from the executed command.
+        """
+        return self._run_with_retry(
+            command, timeout_sec, connect_retries, force_tty, stdin=None
+        )
+
+    def _run_with_retry(
+        self,
+        command: str,
+        timeout_sec: int,
+        connect_retries: int,
+        force_tty: bool,
+        stdin: BinaryIO | None,
+    ) -> SSHResult:
+        err: Exception = ValueError("connect_retries cannot be 0")
+        for i in range(0, connect_retries):
+            try:
+                return self._run(command, timeout_sec, force_tty, stdin)
+            except SSHTransportError as e:
+                err = e
+                self.log.warn(f"Connect failed: {e}")
+        raise err
+
+    def _run(
+        self, command: str, timeout_sec: int, force_tty: bool, stdin: BinaryIO | None
+    ) -> SSHResult:
+        full_command = self.config.full_command(command, force_tty)
+        self.log.debug(
+            f'Running "{command}" (full command: "{" ".join(full_command)}")'
+        )
+        try:
+            process = subprocess.run(
+                full_command,
+                capture_output=True,
+                timeout=timeout_sec,
+                check=True,
+                stdin=stdin,
+            )
+        except subprocess.CalledProcessError as e:
+            if e.returncode == 255:
+                stderr = e.stderr.decode("utf-8", errors="replace")
+                if (
+                    "Name or service not known" in stderr
+                    or "Host does not exist" in stderr
+                ):
+                    raise SSHTransportError(
+                        f"Hostname {self.config.host_name} cannot be resolved to an address"
+                    ) from e
+                if "Connection timed out" in stderr:
+                    raise SSHTransportError(
+                        f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s"
+                    ) from e
+                if "Connection refused" in stderr:
+                    raise SSHTransportError(
+                        f"Connection refused by {self.config.host_name}"
+                    ) from e
+
+            raise SSHError(command, SSHResult(e)) from e
+        except subprocess.TimeoutExpired as e:
+            raise SSHTimeout(e) from e
+
+        return SSHResult(process)
+
+    def upload_file(
+        self,
+        local_path: str,
+        remote_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            local_path: Path to the file to upload
+            remote_path: Path on the remote device to place the uploaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH upload returns a non-zero status code
+            SSHTransportError: if SSH fails to run the upload command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        with open(local_path, "rb") as file:
+            self._run_with_retry(
+                f"cat > {remote_path}",
+                timeout_sec,
+                connect_retries,
+                force_tty=False,
+                stdin=file,
+            )
+
+    def download_file(
+        self,
+        remote_path: str,
+        local_path: str,
+        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
+        connect_retries: int = 3,
+    ) -> None:
+        """Upload a file to the device.
+
+        Args:
+            remote_path: Path on the remote device to download.
+            local_path: Path on the host to the place the downloaded file.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            SSHError: if the SSH command returns a non-zero status code
+            SSHTransportError: if SSH fails to run the command
+            SSHTimeout: if there is no response within timeout_sec
+        """
+        with open(local_path, "rb") as file:
+            self._run_with_retry(
+                f"cat > {remote_path}",
+                timeout_sec,
+                connect_retries,
+                force_tty=False,
+                stdin=file,
+            )
diff --git a/packages/antlion/context.py b/packages/antlion/context.py
new file mode 100644
index 0000000..3f2481f
--- /dev/null
+++ b/packages/antlion/context.py
@@ -0,0 +1,337 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+import logging
+import os
+
+from antlion.event import event_bus
+from antlion.event.event import (
+    Event,
+    TestCaseBeginEvent,
+    TestCaseEndEvent,
+    TestClassBeginEvent,
+    TestClassEndEvent,
+    TestClassEvent,
+)
+
+
+class ContextLevel(enum.IntEnum):
+    ROOT = 0
+    TESTCLASS = 1
+    TESTCASE = 2
+
+
+def get_current_context(depth=None):
+    """Get the current test context at the specified depth.
+    Pulls the most recently created context, with a level at or below the given
+    depth, from the _contexts stack.
+
+    Args:
+        depth: The desired context level. For example, the TESTCLASS level would
+            yield the current test class context, even if the test is currently
+            within a test case.
+
+    Returns: An instance of TestContext.
+    """
+    if depth is None:
+        return _contexts[-1]
+    return _contexts[min(depth, len(_contexts) - 1)]
+
+
+def _get_context_for_test_case_event(event):
+    """Generate a TestCaseContext from the given TestCaseEvent."""
+    return TestCaseContext(event.test_class, event.test_case)
+
+
+def _get_context_for_test_class_event(event):
+    """Generate a TestClassContext from the given TestClassEvent."""
+    return TestClassContext(event.test_class)
+
+
+class NewContextEvent(Event):
+    """The event posted when a test context has changed."""
+
+
+class NewTestClassContextEvent(NewContextEvent):
+    """The event posted when the test class context has changed."""
+
+
+class NewTestCaseContextEvent(NewContextEvent):
+    """The event posted when the test case context has changed."""
+
+
+def _update_test_class_context(event):
+    """Pushes a new TestClassContext to the _contexts stack upon a
+    TestClassBeginEvent. Pops the most recent context off the stack upon a
+    TestClassEndEvent. Posts the context change to the event bus.
+
+    Args:
+        event: An instance of TestClassBeginEvent or TestClassEndEvent.
+    """
+    if isinstance(event, TestClassBeginEvent):
+        _contexts.append(_get_context_for_test_class_event(event))
+    if isinstance(event, TestClassEndEvent):
+        if _contexts:
+            _contexts.pop()
+    event_bus.post(NewTestClassContextEvent())
+
+
+def _update_test_case_context(event):
+    """Pushes a new TestCaseContext to the _contexts stack upon a
+    TestCaseBeginEvent. Pops the most recent context off the stack upon a
+    TestCaseEndEvent. Posts the context change to the event bus.
+
+    Args:
+        event: An instance of TestCaseBeginEvent or TestCaseEndEvent.
+    """
+    if isinstance(event, TestCaseBeginEvent):
+        _contexts.append(_get_context_for_test_case_event(event))
+    if isinstance(event, TestCaseEndEvent):
+        if _contexts:
+            _contexts.pop()
+    event_bus.post(NewTestCaseContextEvent())
+
+
+event_bus.register(TestClassEvent, _update_test_class_context)
+event_bus.register(TestCaseBeginEvent, _update_test_case_context, order=-100)
+event_bus.register(TestCaseEndEvent, _update_test_case_context, order=100)
+
+
+class TestContext(object):
+    """An object representing the current context in which a test is executing.
+
+    The context encodes the current state of the test runner with respect to a
+    particular scenario in which code is being executed. For example, if some
+    code is being executed as part of a test case, then the context should
+    encode information about that test case such as its name or enclosing
+    class.
+
+    The subcontext specifies a relative path in which certain outputs,
+    e.g. logcat, should be kept for the given context.
+
+    The full output path is given by
+    <base_output_path>/<context_dir>/<subcontext>.
+
+    Attributes:
+        _base_output_paths: a dictionary mapping a logger's name to its base
+                            output path
+        _subcontexts: a dictionary mapping a logger's name to its
+                      subcontext-level output directory
+    """
+
+    _base_output_paths = {}
+    _subcontexts = {}
+
+    def get_base_output_path(self, log_name=None):
+        """Gets the base output path for this logger.
+
+        The base output path is interpreted as the reporting root for the
+        entire test runner.
+
+        If a path has been added with add_base_output_path, it is returned.
+        Otherwise, a default is determined by _get_default_base_output_path().
+
+        Args:
+            log_name: The name of the logger.
+
+        Returns:
+            The output path.
+        """
+        if log_name in self._base_output_paths:
+            return self._base_output_paths[log_name]
+        return self._get_default_base_output_path()
+
+    @classmethod
+    def add_base_output_path(cls, log_name, base_output_path):
+        """Store the base path for this logger.
+
+        Args:
+            log_name: The name of the logger.
+            base_output_path: The base path of output files for this logger.
+        """
+        cls._base_output_paths[log_name] = base_output_path
+
+    def get_subcontext(self, log_name=None):
+        """Gets the subcontext for this logger.
+
+        The subcontext is interpreted as the directory, relative to the
+        context-level path, where all outputs of the given logger are stored.
+
+        If a path has been added with add_subcontext, it is returned.
+        Otherwise, the empty string is returned.
+
+        Args:
+            log_name: The name of the logger.
+
+        Returns:
+            The output path.
+        """
+        return self._subcontexts.get(log_name, "")
+
+    @classmethod
+    def add_subcontext(cls, log_name, subcontext):
+        """Store the subcontext path for this logger.
+
+        Args:
+            log_name: The name of the logger.
+            subcontext: The relative subcontext path of output files for this
+                        logger.
+        """
+        cls._subcontexts[log_name] = subcontext
+
+    def get_full_output_path(self, log_name=None):
+        """Gets the full output path for this context.
+
+        The full path represents the absolute path to the output directory,
+        as given by <base_output_path>/<context_dir>/<subcontext>
+
+        Args:
+            log_name: The name of the logger. Used to specify the base output
+                      path and the subcontext.
+
+        Returns:
+            The output path.
+        """
+
+        path = os.path.join(
+            self.get_base_output_path(log_name),
+            self._get_default_context_dir(),
+            self.get_subcontext(log_name),
+        )
+        os.makedirs(path, exist_ok=True)
+        return path
+
+    @property
+    def identifier(self):
+        raise NotImplementedError()
+
+    def _get_default_base_output_path(self):
+        """Gets the default base output path.
+
+        This will attempt to use the ACTS logging path set up in the global
+        logger.
+
+        Returns:
+            The logging path.
+
+        Raises:
+            EnvironmentError: If the ACTS logger has not been initialized.
+        """
+        try:
+            return logging.log_path
+        except AttributeError as e:
+            raise EnvironmentError(
+                "The ACTS logger has not been set up and"
+                ' "base_output_path" has not been set.'
+            ) from e
+
+    def _get_default_context_dir(self):
+        """Gets the default output directory for this context."""
+        raise NotImplementedError()
+
+
+class RootContext(TestContext):
+    """A TestContext that represents a test run."""
+
+    @property
+    def identifier(self):
+        return "root"
+
+    def _get_default_context_dir(self):
+        """Gets the default output directory for this context.
+
+        Logs at the root level context are placed directly in the base level
+        directory, so no context-level path exists."""
+        return ""
+
+
+class TestClassContext(TestContext):
+    """A TestContext that represents a test class.
+
+    Attributes:
+        test_class: The test class instance that this context represents.
+    """
+
+    def __init__(self, test_class):
+        """Initializes a TestClassContext for the given test class.
+
+        Args:
+            test_class: A test class object. Must be an instance of the test
+                        class, not the class object itself.
+        """
+        self.test_class = test_class
+
+    @property
+    def test_class_name(self):
+        return self.test_class.__class__.__name__
+
+    @property
+    def identifier(self):
+        return self.test_class_name
+
+    def _get_default_context_dir(self):
+        """Gets the default output directory for this context.
+
+        For TestClassContexts, this will be the name of the test class. This is
+        in line with the ACTS logger itself.
+        """
+        return self.test_class_name
+
+
+class TestCaseContext(TestContext):
+    """A TestContext that represents a test case.
+
+    Attributes:
+        test_case: The string name of the test case.
+        test_class: The test class instance enclosing the test case.
+    """
+
+    def __init__(self, test_class, test_case):
+        """Initializes a TestCaseContext for the given test case.
+
+        Args:
+            test_class: A test class object. Must be an instance of the test
+                        class, not the class object itself.
+            test_case: The string name of the test case.
+        """
+        self.test_class = test_class
+        self.test_case = test_case
+
+    @property
+    def test_case_name(self):
+        return self.test_case
+
+    @property
+    def test_class_name(self):
+        return self.test_class.__class__.__name__
+
+    @property
+    def identifier(self):
+        return f"{self.test_class_name}.{self.test_case_name}"
+
+    def _get_default_context_dir(self):
+        """Gets the default output directory for this context.
+
+        For TestCaseContexts, this will be the name of the test class followed
+        by the name of the test case. This is in line with the ACTS logger
+        itself.
+        """
+        return os.path.join(self.test_class_name, self.test_case_name)
+
+
+# stack for keeping track of the current test context
+_contexts = [RootContext()]
diff --git a/src/antlion/controllers/OWNERS b/packages/antlion/controllers/OWNERS
similarity index 100%
rename from src/antlion/controllers/OWNERS
rename to packages/antlion/controllers/OWNERS
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py b/packages/antlion/controllers/__init__.py
similarity index 100%
copy from src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
copy to packages/antlion/controllers/__init__.py
diff --git a/packages/antlion/controllers/access_point.py b/packages/antlion/controllers/access_point.py
new file mode 100755
index 0000000..386e5aa
--- /dev/null
+++ b/packages/antlion/controllers/access_point.py
@@ -0,0 +1,865 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import datetime
+import ipaddress
+import logging
+import time
+from dataclasses import dataclass
+from typing import Any, FrozenSet
+
+from mobly import logger
+
+from antlion import utils
+from antlion.capabilities.ssh import SSHConfig, SSHProvider
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.ap_get_interface import ApInterfaces
+from antlion.controllers.ap_lib.ap_iwconfig import ApIwconfig
+from antlion.controllers.ap_lib.bridge_interface import BridgeInterface
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
+from antlion.controllers.ap_lib.dhcp_server import DhcpServer, NoInterfaceError
+from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
+from antlion.controllers.ap_lib.hostapd import Hostapd
+from antlion.controllers.ap_lib.hostapd_ap_preset import create_ap_preset
+from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.radvd import Radvd
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.controllers.pdu import PduDevice, get_pdu_port_for_device
+from antlion.controllers.utils_lib.commands import command, ip, journalctl, route
+from antlion.controllers.utils_lib.ssh import connection, settings
+from antlion.libs.proc import job
+from antlion.types import ControllerConfig
+from antlion.validation import MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME = "AccessPoint"
+ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
+
+
+class Error(Exception):
+    """Error raised when there is a problem with the access point."""
+
+
+@dataclass
+class _ApInstance:
+    hostapd: Hostapd
+    subnet: Subnet
+
+
+# These ranges were split this way since each physical radio can have up
+# to 8 SSIDs so for the 2GHz radio the DHCP range will be
+# 192.168.1 - 8 and the 5Ghz radio will be 192.168.9 - 16
+_AP_2GHZ_SUBNET_STR_DEFAULT = "192.168.1.0/24"
+_AP_5GHZ_SUBNET_STR_DEFAULT = "192.168.9.0/24"
+
+# The last digit of the ip for the bridge interface
+BRIDGE_IP_LAST = "100"
+
+
+def create(configs: list[ControllerConfig]) -> list[AccessPoint]:
+    """Creates ap controllers from a json config.
+
+    Creates an ap controller from either a list, or a single
+    element. The element can either be just the hostname or a dictionary
+    containing the hostname and username of the ap to connect to over ssh.
+
+    Args:
+        The json configs that represent this controller.
+
+    Returns:
+        A new AccessPoint.
+    """
+    return [AccessPoint(c) for c in configs]
+
+
+def destroy(aps: list[AccessPoint]) -> None:
+    """Destroys a list of access points.
+
+    Args:
+        aps: The list of access points to destroy.
+    """
+    for ap in aps:
+        ap.close()
+
+
+def get_info(aps: list[AccessPoint]) -> list[str]:
+    """Get information on a list of access points.
+
+    Args:
+        aps: A list of AccessPoints.
+
+    Returns:
+        A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in aps]
+
+
+class AccessPoint:
+    """An access point controller.
+
+    Attributes:
+        ssh: The ssh connection to this ap.
+        ssh_settings: The ssh settings being used by the ssh connection.
+        dhcp_settings: The dhcp server settings being used.
+    """
+
+    def __init__(self, config: ControllerConfig) -> None:
+        """
+        Args:
+            configs: configs for the access point from config file.
+        """
+        c = MapValidator(config)
+        self.ssh_settings = settings.from_config(c.get(dict, "ssh_config"))
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[Access Point|{self.ssh_settings.hostname}]",
+            },
+        )
+        self.device_pdu_config = c.get(dict, "PduDevice", None)
+        self.identifier = self.ssh_settings.hostname
+
+        subnet = MapValidator(c.get(dict, "ap_subnet", {}))
+        self._AP_2G_SUBNET_STR = subnet.get(str, "2g", _AP_2GHZ_SUBNET_STR_DEFAULT)
+        self._AP_5G_SUBNET_STR = subnet.get(str, "5g", _AP_5GHZ_SUBNET_STR_DEFAULT)
+
+        self._AP_2G_SUBNET = Subnet(ipaddress.IPv4Network(self._AP_2G_SUBNET_STR))
+        self._AP_5G_SUBNET = Subnet(ipaddress.IPv4Network(self._AP_5G_SUBNET_STR))
+
+        self.ssh = connection.SshConnection(self.ssh_settings)
+
+        # TODO(http://b/278758876): Replace self.ssh with self.ssh_provider
+        self.ssh_provider = SSHProvider(
+            SSHConfig(
+                self.ssh_settings.username,
+                self.ssh_settings.hostname,
+                self.ssh_settings.identity_file,
+                port=self.ssh_settings.port,
+                ssh_binary=self.ssh_settings.executable,
+                connect_timeout=90,
+            )
+        )
+
+        # Singleton utilities for running various commands.
+        self._ip_cmd = command.require(ip.LinuxIpCommand(self.ssh))
+        self._route_cmd = command.require(route.LinuxRouteCommand(self.ssh))
+        self._journalctl_cmd = command.require(
+            journalctl.LinuxJournalctlCommand(self.ssh)
+        )
+
+        # A map from network interface name to _ApInstance objects representing
+        # the hostapd instance running against the interface.
+        self._aps: dict[str, _ApInstance] = dict()
+        self._dhcp: DhcpServer | None = None
+        self._dhcp_bss: dict[str, Subnet] = dict()
+        self._radvd: Radvd | None = None
+        self.bridge = BridgeInterface(self)
+        self.iwconfig = ApIwconfig(self)
+
+        # Check to see if wan_interface is specified in acts_config for tests
+        # isolated from the internet and set this override.
+        self.interfaces = ApInterfaces(self, c.get(str, "wan_interface", None))
+
+        # Get needed interface names and initialize the unnecessary ones.
+        self.wan = self.interfaces.get_wan_interface()
+        self.wlan = self.interfaces.get_wlan_interface()
+        self.wlan_2g = self.wlan[0]
+        self.wlan_5g = self.wlan[1]
+        self.lan = self.interfaces.get_lan_interface()
+        self._initial_ap()
+        self.setup_bridge = False
+
+        # Access points are not given internet access, so their system time needs to be
+        # manually set to be accurate.
+        self._sync_time()
+
+    def _initial_ap(self) -> None:
+        """Initial AP interfaces.
+
+        Bring down hostapd if instance is running, bring down all bridge
+        interfaces.
+        """
+        # This is necessary for Gale/Whirlwind flashed with dev channel image
+        # Unused interfaces such as existing hostapd daemon, guest, mesh
+        # interfaces need to be brought down as part of the AP initialization
+        # process, otherwise test would fail.
+        try:
+            self.ssh.run("stop wpasupplicant")
+        except job.Error:
+            self.log.info("No wpasupplicant running")
+        try:
+            self.ssh.run("stop hostapd")
+        except job.Error:
+            self.log.info("No hostapd running")
+        # Bring down all wireless interfaces
+        for iface in self.wlan:
+            WLAN_DOWN = f"ip link set {iface} down"
+            self.ssh.run(WLAN_DOWN)
+        # Bring down all bridge interfaces
+        bridge_interfaces = self.interfaces.get_bridge_interface()
+        for iface in bridge_interfaces:
+            BRIDGE_DOWN = f"ip link set {iface} down"
+            BRIDGE_DEL = f"brctl delbr {iface}"
+            self.ssh.run(BRIDGE_DOWN)
+            self.ssh.run(BRIDGE_DEL)
+
+    def _sync_time(self) -> None:
+        """Synchronize the system time.
+
+        Allows for better synchronization between antlion host logs and AP logs.
+        Useful for when the device does not have internet connection.
+        """
+        now = datetime.datetime.now().astimezone().isoformat()
+        self.ssh.run(f'date -s "{now}"')
+
+    def start_ap(
+        self,
+        hostapd_config: HostapdConfig,
+        radvd_config: RadvdConfig | None = None,
+        setup_bridge: bool = False,
+        is_nat_enabled: bool = True,
+        additional_parameters: dict[str, Any] | None = None,
+    ) -> list[str]:
+        """Starts as an ap using a set of configurations.
+
+        This will start an ap on this host. To start an ap the controller
+        selects a network interface to use based on the configs given. It then
+        will start up hostapd on that interface. Next a subnet is created for
+        the network interface and dhcp server is refreshed to give out ips
+        for that subnet for any device that connects through that interface.
+
+        Args:
+            hostapd_config: The configurations to use when starting up the ap.
+            radvd_config: The IPv6 configuration to use when starting up the ap.
+            setup_bridge: Whether to bridge the LAN interface WLAN interface.
+                Only one WLAN interface can be bridged with the LAN interface
+                and none of the guest networks can be bridged.
+            is_nat_enabled: If True, start NAT on the AP to allow the DUT to be
+                able to access the internet if the WAN port is connected to the
+                internet.
+            additional_parameters: Parameters that can sent directly into the
+                hostapd config file.  This can be used for debugging and or
+                adding one off parameters into the config.
+
+        Returns:
+            An identifier for each ssid being started. These identifiers can be
+            used later by this controller to control the ap.
+
+        Raises:
+            Error: When the ap can't be brought up.
+        """
+        if additional_parameters is None:
+            additional_parameters = {}
+
+        if hostapd_config.frequency < 5000:
+            interface = self.wlan_2g
+            subnet = self._AP_2G_SUBNET
+        else:
+            interface = self.wlan_5g
+            subnet = self._AP_5G_SUBNET
+
+        # radvd requires the interface to have a IPv6 link-local address.
+        if radvd_config:
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0")
+            self.ssh.run(f"sysctl -w net.ipv6.conf.{interface}.forwarding=1")
+
+        # In order to handle dhcp servers on any interface, the initiation of
+        # the dhcp server must be done after the wlan interfaces are figured
+        # out as opposed to being in __init__
+        self._dhcp = DhcpServer(self.ssh, interface=interface)
+
+        # For multi bssid configurations the mac address
+        # of the wireless interface needs to have enough space to mask out
+        # up to 8 different mac addresses. So in for one interface the range is
+        # hex 0-7 and for the other the range is hex 8-f.
+        interface_mac_orig = None
+        cmd = f"ip link show {interface}|grep ether|awk -F' ' '{{print $2}}'"
+        interface_mac_orig = self.ssh.run(cmd)
+        if interface == self.wlan_5g:
+            hostapd_config.bssid = f"{interface_mac_orig.stdout[:-1]}0"
+            last_octet = 1
+        if interface == self.wlan_2g:
+            hostapd_config.bssid = f"{interface_mac_orig.stdout[:-1]}8"
+            last_octet = 9
+        if interface in self._aps:
+            raise ValueError(
+                "No WiFi interface available for AP on "
+                f"channel {hostapd_config.channel}"
+            )
+
+        apd = Hostapd(self.ssh, interface)
+        new_instance = _ApInstance(hostapd=apd, subnet=subnet)
+        self._aps[interface] = new_instance
+
+        # Turn off the DHCP server, we're going to change its settings.
+        self.stop_dhcp()
+        # Clear all routes to prevent old routes from interfering.
+        self._route_cmd.clear_routes(net_interface=interface)
+        # Add IPv6 link-local route so packets destined to the AP will be
+        # processed by the AP. This is necessary if an iperf server is running
+        # on the AP, but not for traffic handled by the Linux networking stack
+        # such as ping.
+        if radvd_config:
+            self._route_cmd.add_route(interface, ipaddress.IPv6Interface("fe80::/64"))
+
+        self._dhcp_bss = dict()
+        if hostapd_config.bss_lookup:
+            # The self._dhcp_bss dictionary is created to hold the key/value
+            # pair of the interface name and the ip scope that will be
+            # used for the particular interface.  The a, b, c, d
+            # variables below are the octets for the ip address.  The
+            # third octet is then incremented for each interface that
+            # is requested.  This part is designed to bring up the
+            # hostapd interfaces and not the DHCP servers for each
+            # interface.
+            counter = 1
+            for iface in hostapd_config.bss_lookup:
+                if interface_mac_orig:
+                    hostapd_config.bss_lookup[iface].bssid = (
+                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:]
+                    )
+                self._route_cmd.clear_routes(net_interface=str(iface))
+                if interface is self.wlan_2g:
+                    starting_ip_range = self._AP_2G_SUBNET_STR
+                else:
+                    starting_ip_range = self._AP_5G_SUBNET_STR
+                a, b, c, d = starting_ip_range.split(".")
+                self._dhcp_bss[iface] = Subnet(
+                    ipaddress.IPv4Network(f"{a}.{b}.{int(c) + counter}.{d}")
+                )
+                counter = counter + 1
+                last_octet = last_octet + 1
+
+        apd.start(hostapd_config, additional_parameters=additional_parameters)
+
+        # The DHCP serer requires interfaces to have ips and routes before
+        # the server will come up.
+        interface_ip = ipaddress.IPv4Interface(
+            f"{subnet.router}/{subnet.network.prefixlen}"
+        )
+        if setup_bridge is True:
+            bridge_interface_name = "eth_test"
+            interfaces = [interface]
+            if self.lan:
+                interfaces.append(self.lan)
+            self.create_bridge(bridge_interface_name, interfaces)
+            self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip)
+        else:
+            self._ip_cmd.set_ipv4_address(interface, interface_ip)
+        if hostapd_config.bss_lookup:
+            # This loop goes through each interface that was setup for
+            # hostapd and assigns the DHCP scopes that were defined but
+            # not used during the hostapd loop above.  The k and v
+            # variables represent the interface name, k, and dhcp info, v.
+            for iface, subnet in self._dhcp_bss.items():
+                bss_interface_ip = ipaddress.IPv4Interface(
+                    f"{subnet.router}/{subnet.network.prefixlen}"
+                )
+                self._ip_cmd.set_ipv4_address(iface, bss_interface_ip)
+
+        # Restart the DHCP server with our updated list of subnets.
+        configured_subnets = self.get_configured_subnets()
+        dhcp_conf = DhcpConfig(subnets=configured_subnets)
+        self.start_dhcp(dhcp_conf=dhcp_conf)
+        if is_nat_enabled:
+            self.start_nat()
+            self.enable_forwarding()
+        else:
+            self.stop_nat()
+            self.enable_forwarding()
+        if radvd_config:
+            radvd_interface = bridge_interface_name if setup_bridge else interface
+            self._radvd = Radvd(self.ssh, radvd_interface)
+            self._radvd.start(radvd_config)
+        else:
+            self._radvd = None
+
+        bss_interfaces = [bss for bss in hostapd_config.bss_lookup]
+        bss_interfaces.append(interface)
+
+        return bss_interfaces
+
+    def get_configured_subnets(self) -> list[Subnet]:
+        """Get the list of configured subnets on the access point.
+
+        This allows consumers of the access point objects create custom DHCP
+        configs with the correct subnets.
+
+        Returns: a list of Subnet objects
+        """
+        configured_subnets = [x.subnet for x in self._aps.values()]
+        for k, v in self._dhcp_bss.items():
+            configured_subnets.append(v)
+        return configured_subnets
+
+    def start_dhcp(self, dhcp_conf: DhcpConfig) -> None:
+        """Start a DHCP server for the specified subnets.
+
+        This allows consumers of the access point objects to control DHCP.
+
+        Args:
+            dhcp_conf: A DhcpConfig object.
+
+        Raises:
+            Error: Raised when a dhcp server error is found.
+        """
+        if self._dhcp is not None:
+            self._dhcp.start(config=dhcp_conf)
+
+    def stop_dhcp(self) -> None:
+        """Stop DHCP for this AP object.
+
+        This allows consumers of the access point objects to control DHCP.
+        """
+        if self._dhcp is not None:
+            self._dhcp.stop()
+
+    def get_systemd_journal(self) -> str:
+        """Get systemd journal logs from this current boot."""
+        return self._journalctl_cmd.logs()
+
+    def get_dhcp_logs(self) -> str | None:
+        """Get DHCP logs for this AP object.
+
+        This allows consumers of the access point objects to validate DHCP
+        behavior.
+
+        Returns:
+            A string of the dhcp server logs, or None is a DHCP server has not
+            been started.
+        """
+        if self._dhcp is not None:
+            return self._dhcp.get_logs()
+        return None
+
+    def get_hostapd_logs(self) -> dict[str, str]:
+        """Get hostapd logs for all interfaces on AP object.
+
+        This allows consumers of the access point objects to validate hostapd
+        behavior.
+
+        Returns: A dict with {interface: log} from hostapd instances.
+        """
+        hostapd_logs: dict[str, str] = dict()
+        for iface, ap in self._aps.items():
+            hostapd_logs[iface] = ap.hostapd.pull_logs()
+        return hostapd_logs
+
+    def get_radvd_logs(self) -> str | None:
+        """Get radvd logs for this AP object.
+
+        This allows consumers of the access point objects to validate radvd
+        behavior.
+
+        Returns:
+            A string of the radvd logs, or None is a radvd server has not been
+            started.
+        """
+        if self._radvd:
+            return self._radvd.pull_logs()
+        return None
+
+    def enable_forwarding(self) -> None:
+        """Enable IPv4 and IPv6 forwarding on the AP.
+
+        When forwarding is enabled, the access point is able to route IP packets
+        between devices in the same subnet.
+        """
+        self.ssh.run("echo 1 > /proc/sys/net/ipv4/ip_forward")
+        self.ssh.run("echo 1 > /proc/sys/net/ipv6/conf/all/forwarding")
+
+    def start_nat(self) -> None:
+        """Start NAT on the AP.
+
+        This allows consumers of the access point objects to enable NAT
+        on the AP.
+
+        Note that this is currently a global setting, since we don't
+        have per-interface masquerade rules.
+        """
+        # The following three commands are needed to enable NAT between
+        # the WAN and LAN/WLAN ports.  This means anyone connecting to the
+        # WLAN/LAN ports will be able to access the internet if the WAN port
+        # is connected to the internet.
+        self.ssh.run("iptables -t nat -F")
+        self.ssh.run(f"iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE")
+
+    def stop_nat(self) -> None:
+        """Stop NAT on the AP.
+
+        This allows consumers of the access point objects to disable NAT on the
+        AP.
+
+        Note that this is currently a global setting, since we don't have
+        per-interface masquerade rules.
+        """
+        self.ssh.run("iptables -t nat -F")
+
+    def create_bridge(self, bridge_name: str, interfaces: list[str]) -> None:
+        """Create the specified bridge and bridge the specified interfaces.
+
+        Args:
+            bridge_name: The name of the bridge to create.
+            interfaces: A list of interfaces to add to the bridge.
+        """
+
+        # Create the bridge interface
+        self.ssh.run(f"brctl addbr {bridge_name}")
+
+        for interface in interfaces:
+            self.ssh.run(f"brctl addif {bridge_name} {interface}")
+
+        self.ssh.run(f"ip link set {bridge_name} up")
+
+    def remove_bridge(self, bridge_name: str) -> None:
+        """Removes the specified bridge
+
+        Args:
+            bridge_name: The name of the bridge to remove.
+        """
+        # Check if the bridge exists.
+        #
+        # Cases where it may not are if we failed to initialize properly
+        #
+        # Or if we're doing 2.4Ghz and 5Ghz SSIDs and we've already torn
+        # down the bridge once, but we got called for each band.
+        result = self.ssh.run(f"brctl show {bridge_name}", ignore_status=True)
+
+        # If the bridge exists, we'll get an exit_status of 0, indicating
+        # success, so we can continue and remove the bridge.
+        if result.returncode == 0:
+            self.ssh.run(f"ip link set {bridge_name} down")
+            self.ssh.run(f"brctl delbr {bridge_name}")
+
+    def get_bssid_from_ssid(self, ssid: str, band: str) -> str | None:
+        """Gets the BSSID from a provided SSID
+
+        Args:
+            ssid: An SSID string.
+            band: 2G or 5G Wifi band.
+        Returns: The BSSID if on the AP or None if SSID could not be found.
+        """
+        if band == hostapd_constants.BAND_2G:
+            interfaces = [self.wlan_2g, ssid]
+        else:
+            interfaces = [self.wlan_5g, ssid]
+
+        # Get the interface name associated with the given ssid.
+        for interface in interfaces:
+            iw_output = self.ssh.run(
+                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'"
+            )
+            if "command failed: No such device" in iw_output.stderr:
+                continue
+            else:
+                # If the configured ssid is equal to the given ssid, we found
+                # the right interface.
+                if iw_output.stdout == ssid:
+                    iw_output = self.ssh.run(
+                        f"iw dev {interface} info|grep addr|awk -F' ' '{{print $2}}'"
+                    )
+                    return iw_output.stdout
+        return None
+
+    def stop_ap(self, identifier: str) -> None:
+        """Stops a running ap on this controller.
+
+        Args:
+            identifier: The identify of the ap that should be taken down.
+        """
+
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+
+        if self._radvd:
+            self._radvd.stop()
+        try:
+            self.stop_dhcp()
+        except NoInterfaceError:
+            pass
+        self.stop_nat()
+        instance.hostapd.stop()
+        self._ip_cmd.clear_ipv4_addresses(identifier)
+
+        del self._aps[identifier]
+        bridge_interfaces = self.interfaces.get_bridge_interface()
+        for iface in bridge_interfaces:
+            BRIDGE_DOWN = f"ip link set {iface} down"
+            BRIDGE_DEL = f"brctl delbr {iface}"
+            self.ssh.run(BRIDGE_DOWN)
+            self.ssh.run(BRIDGE_DEL)
+
+    def stop_all_aps(self) -> None:
+        """Stops all running aps on this device."""
+
+        for ap in list(self._aps.keys()):
+            self.stop_ap(ap)
+
+    def close(self) -> None:
+        """Called to take down the entire access point.
+
+        When called will stop all aps running on this host, shutdown the dhcp
+        server, and stop the ssh connection.
+        """
+
+        if self._aps:
+            self.stop_all_aps()
+        self.ssh.close()
+
+    def generate_bridge_configs(self, channel: int) -> tuple[str, str | None, str]:
+        """Generate a list of configs for a bridge between LAN and WLAN.
+
+        Args:
+            channel: the channel WLAN interface is brought up on
+            iface_lan: the LAN interface to bridge
+        Returns:
+            configs: tuple containing iface_wlan, iface_lan and bridge_ip
+        """
+
+        if channel < 15:
+            iface_wlan = self.wlan_2g
+            subnet_str = self._AP_2G_SUBNET_STR
+        else:
+            iface_wlan = self.wlan_5g
+            subnet_str = self._AP_5G_SUBNET_STR
+
+        iface_lan = self.lan
+
+        a, b, c, _ = subnet_str.strip("/24").split(".")
+        bridge_ip = f"{a}.{b}.{c}.{BRIDGE_IP_LAST}"
+
+        return (iface_wlan, iface_lan, bridge_ip)
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 56,
+        additional_ping_params: str = "",
+    ) -> utils.PingResult:
+        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)"""
+        return utils.ping(
+            self.ssh,
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
+
+    def hard_power_cycle(
+        self,
+        pdus: list[PduDevice],
+    ) -> None:
+        """Kills, then restores power to AccessPoint, verifying it goes down and
+        comes back online cleanly.
+
+        Args:
+            pdus: PDUs in the testbed
+        Raise:
+            Error, if no PduDevice is provided in AccessPoint config.
+            ConnectionError, if AccessPoint fails to go offline or come back.
+        """
+        if not self.device_pdu_config:
+            raise Error("No PduDevice provided in AccessPoint config.")
+
+        self._journalctl_cmd.save_and_reset()
+
+        self.log.info("Power cycling")
+        ap_pdu, ap_pdu_port = get_pdu_port_for_device(self.device_pdu_config, pdus)
+
+        self.log.info("Killing power")
+        ap_pdu.off(ap_pdu_port)
+
+        self.log.info("Verifying AccessPoint is unreachable.")
+        self.ssh_provider.wait_until_unreachable()
+        self.log.info("AccessPoint is unreachable as expected.")
+
+        self._aps.clear()
+
+        self.log.info("Restoring power")
+        ap_pdu.on(ap_pdu_port)
+
+        self.log.info("Waiting for AccessPoint to become available via SSH.")
+        self.ssh_provider.wait_until_reachable()
+        self.log.info("AccessPoint responded to SSH.")
+
+        # Allow 5 seconds for OS to finish getting set up
+        time.sleep(5)
+        self._initial_ap()
+        self.log.info("Power cycled successfully")
+
+    def channel_switch(self, identifier: str, channel_num: int) -> None:
+        """Switch to a different channel on the given AP."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        self.log.info(f"channel switch to channel {channel_num}")
+        instance.hostapd.channel_switch(channel_num)
+
+    def get_current_channel(self, identifier: str) -> int:
+        """Find the current channel on the given AP."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.get_current_channel()
+
+    def get_stas(self, identifier: str) -> set[str]:
+        """Return MAC addresses of all associated STAs on the given AP."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.get_stas()
+
+    def sta_authenticated(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA authenticated?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_authenticated(sta_mac)
+
+    def sta_associated(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA associated?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_associated(sta_mac)
+
+    def sta_authorized(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA authorized (802.1X controlled port open)?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_authorized(sta_mac)
+
+    def get_sta_extended_capabilities(
+        self, identifier: str, sta_mac: str
+    ) -> ExtendedCapabilities:
+        """Get extended capabilities for the given STA, as seen by the AP."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.get_sta_extended_capabilities(sta_mac)
+
+    def send_bss_transition_management_req(
+        self, identifier: str, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> None:
+        """Send a BSS Transition Management request to an associated STA."""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        instance.hostapd.send_bss_transition_management_req(sta_mac, request)
+
+
+def setup_ap(
+    access_point: AccessPoint,
+    profile_name: str,
+    channel: int,
+    ssid: str,
+    mode: str | None = None,
+    preamble: bool | None = None,
+    beacon_interval: int | None = None,
+    dtim_period: int | None = None,
+    frag_threshold: int | None = None,
+    rts_threshold: int | None = None,
+    force_wmm: bool | None = None,
+    hidden: bool | None = False,
+    security: Security | None = None,
+    pmf_support: int | None = None,
+    additional_ap_parameters: dict[str, Any] | None = None,
+    n_capabilities: list[Any] | None = None,
+    ac_capabilities: list[Any] | None = None,
+    vht_bandwidth: int | None = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    setup_bridge: bool = False,
+    is_ipv6_enabled: bool = False,
+    is_nat_enabled: bool = True,
+) -> list[str]:
+    """Creates a hostapd profile and runs it on an ap. This is a convenience
+    function that allows us to start an ap with a single function, without first
+    creating a hostapd config.
+
+    Args:
+        access_point: An ACTS access_point controller
+        profile_name: The profile name of one of the hostapd ap presets.
+        channel: What channel to set the AP to.
+        preamble: Whether to set short or long preamble
+        beacon_interval: The beacon interval
+        dtim_period: Length of dtim period
+        frag_threshold: Fragmentation threshold
+        rts_threshold: RTS threshold
+        force_wmm: Enable WMM or not
+        hidden: Advertise the SSID or not
+        security: What security to enable.
+        pmf_support: Whether pmf is not disabled, enabled, or required
+        additional_ap_parameters: Additional parameters to send the AP.
+        check_connectivity: Whether to check for internet connectivity.
+        wnm_features: WNM features to enable on the AP.
+        setup_bridge: Whether to bridge the LAN interface WLAN interface.
+            Only one WLAN interface can be bridged with the LAN interface
+            and none of the guest networks can be bridged.
+        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
+        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
+            to access the internet if the WAN port is connected to the internet.
+
+    Returns:
+        An identifier for each ssid being started. These identifiers can be
+        used later by this controller to control the ap.
+
+    Raises:
+        Error: When the ap can't be brought up.
+    """
+    if additional_ap_parameters is None:
+        additional_ap_parameters = {}
+
+    ap = create_ap_preset(
+        profile_name=profile_name,
+        iface_wlan_2g=access_point.wlan_2g,
+        iface_wlan_5g=access_point.wlan_5g,
+        channel=channel,
+        ssid=ssid,
+        mode=mode,
+        short_preamble=preamble,
+        beacon_interval=beacon_interval,
+        dtim_period=dtim_period,
+        frag_threshold=frag_threshold,
+        rts_threshold=rts_threshold,
+        force_wmm=force_wmm,
+        hidden=hidden,
+        bss_settings=[],
+        security=security,
+        pmf_support=pmf_support,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_bandwidth=vht_bandwidth,
+        wnm_features=wnm_features,
+    )
+    return access_point.start_ap(
+        hostapd_config=ap,
+        radvd_config=RadvdConfig() if is_ipv6_enabled else None,
+        setup_bridge=setup_bridge,
+        is_nat_enabled=is_nat_enabled,
+        additional_parameters=additional_ap_parameters,
+    )
diff --git a/packages/antlion/controllers/adb.py b/packages/antlion/controllers/adb.py
new file mode 100644
index 0000000..61597ff
--- /dev/null
+++ b/packages/antlion/controllers/adb.py
@@ -0,0 +1,294 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+import shlex
+import shutil
+
+from antlion.controllers.adb_lib.error import AdbCommandError, AdbError
+from antlion.libs.proc import job
+
+DEFAULT_ADB_TIMEOUT = 60
+DEFAULT_ADB_PULL_TIMEOUT = 180
+
+ADB_REGEX = re.compile("adb:")
+# Uses a regex to be backwards compatible with previous versions of ADB
+# (N and above add the serial to the error msg).
+DEVICE_NOT_FOUND_REGEX = re.compile("error: device (?:'.*?' )?not found")
+DEVICE_OFFLINE_REGEX = re.compile("error: device offline")
+# Raised when adb forward commands fail to forward a port.
+CANNOT_BIND_LISTENER_REGEX = re.compile("error: cannot bind listener:")
+# Expected output is "Android Debug Bridge version 1.0.XX
+ADB_VERSION_REGEX = re.compile("Android Debug Bridge version 1.0.(\d+)")
+GREP_REGEX = re.compile("grep(\s+)")
+
+ROOT_USER_ID = "0"
+SHELL_USER_ID = "2000"
+
+
+def parsing_parcel_output(output):
+    """Parsing the adb output in Parcel format.
+
+    Parsing the adb output in format:
+      Result: Parcel(
+        0x00000000: 00000000 00000014 00390038 00340031 '........8.9.1.4.'
+        0x00000010: 00300038 00300030 00300030 00340032 '8.0.0.0.0.0.2.4.'
+        0x00000020: 00350034 00330035 00320038 00310033 '4.5.5.3.8.2.3.1.'
+        0x00000030: 00000000                            '....            ')
+    """
+    output = "".join(re.findall(r"'(.*)'", output))
+    return re.sub(r"[.\s]", "", output)
+
+
+class AdbProxy(object):
+    """Proxy class for ADB.
+
+    For syntactic reasons, the '-' in adb commands need to be replaced with
+    '_'. Can directly execute adb commands on an object:
+    >> adb = AdbProxy(<serial>)
+    >> adb.start_server()
+    >> adb.devices() # will return the console output of "adb devices".
+    """
+
+    def __init__(self, serial="", ssh_connection=None):
+        """Construct an instance of AdbProxy.
+
+        Args:
+            serial: str serial number of Android device from `adb devices`
+            ssh_connection: SshConnection instance if the Android device is
+                            connected to a remote host that we can reach via SSH.
+        """
+        self.serial = serial
+        self._server_local_port = None
+        adb_path = shutil.which("adb")
+        adb_cmd = [shlex.quote(adb_path)]
+        if serial:
+            adb_cmd.append(f"-s {serial}")
+        if ssh_connection is not None:
+            # Kill all existing adb processes on the remote host (if any)
+            # Note that if there are none, then pkill exits with non-zero status
+            ssh_connection.run("pkill adb", ignore_status=True)
+            # Copy over the adb binary to a temp dir
+            temp_dir = ssh_connection.run("mktemp -d").stdout.strip()
+            ssh_connection.send_file(adb_path, temp_dir)
+            # Start up a new adb server running as root from the copied binary.
+            remote_adb_cmd = "%s/adb %s root" % (
+                temp_dir,
+                "-s %s" % serial if serial else "",
+            )
+            ssh_connection.run(remote_adb_cmd)
+            # Proxy a local port to the adb server port
+            local_port = ssh_connection.create_ssh_tunnel(5037)
+            self._server_local_port = local_port
+
+        if self._server_local_port:
+            adb_cmd.append(f"-P {local_port}")
+        self.adb_str = " ".join(adb_cmd)
+        self._ssh_connection = ssh_connection
+
+    def get_user_id(self):
+        """Returns the adb user. Either 2000 (shell) or 0 (root)."""
+        return self.shell("id -u")
+
+    def is_root(self, user_id=None):
+        """Checks if the user is root.
+
+        Args:
+            user_id: if supplied, the id to check against.
+        Returns:
+            True if the user is root. False otherwise.
+        """
+        if not user_id:
+            user_id = self.get_user_id()
+        return user_id == ROOT_USER_ID
+
+    def ensure_root(self):
+        """Ensures the user is root after making this call.
+
+        Note that this will still fail if the device is a user build, as root
+        is not accessible from a user build.
+
+        Returns:
+            False if the device is a user build. True otherwise.
+        """
+        self.ensure_user(ROOT_USER_ID)
+        return self.is_root()
+
+    def ensure_user(self, user_id=SHELL_USER_ID):
+        """Ensures the user is set to the given user.
+
+        Args:
+            user_id: The id of the user.
+        """
+        if self.is_root(user_id):
+            self.root()
+        else:
+            self.unroot()
+        self.wait_for_device()
+        return self.get_user_id() == user_id
+
+    def _exec_cmd(self, cmd, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
+        """Executes adb commands in a new shell.
+
+        This is specific to executing adb commands.
+
+        Args:
+            cmd: A string or list that is the adb command to execute.
+
+        Returns:
+            The stdout of the adb command.
+
+        Raises:
+            AdbError for errors in ADB operations.
+            AdbCommandError for errors from commands executed through ADB.
+        """
+        if isinstance(cmd, list):
+            cmd = " ".join(cmd)
+        result = job.run(cmd, ignore_status=True, timeout_sec=timeout)
+        ret, out, err = result.exit_status, result.stdout, result.stderr
+
+        if any(
+            pattern.match(err)
+            for pattern in [
+                ADB_REGEX,
+                DEVICE_OFFLINE_REGEX,
+                DEVICE_NOT_FOUND_REGEX,
+                CANNOT_BIND_LISTENER_REGEX,
+            ]
+        ):
+            raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
+        if "Result: Parcel" in out:
+            return parsing_parcel_output(out)
+        if ignore_status or (ret == 1 and GREP_REGEX.search(cmd)):
+            return out or err
+        if ret != 0:
+            raise AdbCommandError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
+        return out
+
+    def _exec_adb_cmd(self, name, arg_str, **kwargs):
+        return self._exec_cmd(f"{self.adb_str} {name} {arg_str}", **kwargs)
+
+    def _exec_cmd_nb(self, cmd, **kwargs):
+        """Executes adb commands in a new shell, non blocking.
+
+        Args:
+            cmds: A string that is the adb command to execute.
+
+        """
+        return job.run_async(cmd, **kwargs)
+
+    def _exec_adb_cmd_nb(self, name, arg_str, **kwargs):
+        return self._exec_cmd_nb(f"{self.adb_str} {name} {arg_str}", **kwargs)
+
+    def tcp_forward(self, host_port, device_port):
+        """Starts tcp forwarding from localhost to this android device.
+
+        Args:
+            host_port: Port number to use on localhost
+            device_port: Port number to use on the android device.
+
+        Returns:
+            Forwarded port on host as int or command output string on error
+        """
+        if self._ssh_connection:
+            # We have to hop through a remote host first.
+            #  1) Find some free port on the remote host's localhost
+            #  2) Setup forwarding between that remote port and the requested
+            #     device port
+            remote_port = self._ssh_connection.find_free_port()
+            host_port = self._ssh_connection.create_ssh_tunnel(
+                remote_port, local_port=host_port
+            )
+        output = self.forward(f"tcp:{host_port} tcp:{device_port}", ignore_status=True)
+        # If hinted_port is 0, the output will be the selected port.
+        # Otherwise, there will be no output upon successfully
+        # forwarding the hinted port.
+        if not output:
+            return host_port
+        try:
+            output_int = int(output)
+        except ValueError:
+            return output
+        return output_int
+
+    def remove_tcp_forward(self, host_port):
+        """Stop tcp forwarding a port from localhost to this android device.
+
+        Args:
+            host_port: Port number to use on localhost
+        """
+        if self._ssh_connection:
+            remote_port = self._ssh_connection.close_ssh_tunnel(host_port)
+            if remote_port is None:
+                logging.warning(
+                    "Cannot close unknown forwarded tcp port: %d", host_port
+                )
+                return
+            # The actual port we need to disable via adb is on the remote host.
+            host_port = remote_port
+        self.forward(f"--remove tcp:{host_port}")
+
+    def getprop(self, prop_name):
+        """Get a property of the device.
+
+        This is a convenience wrapper for "adb shell getprop xxx".
+
+        Args:
+            prop_name: A string that is the name of the property to get.
+
+        Returns:
+            A string that is the value of the property, or None if the property
+            doesn't exist.
+        """
+        return self.shell(f"getprop {prop_name}")
+
+    # TODO: This should be abstracted out into an object like the other shell
+    # command.
+    def shell(self, command, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
+        return self._exec_adb_cmd(
+            "shell", shlex.quote(command), ignore_status=ignore_status, timeout=timeout
+        )
+
+    def shell_nb(self, command):
+        return self._exec_adb_cmd_nb("shell", shlex.quote(command))
+
+    def __getattr__(self, name):
+        def adb_call(*args, **kwargs):
+            clean_name = name.replace("_", "-")
+            if clean_name in ["pull", "push", "remount"] and "timeout" not in kwargs:
+                kwargs["timeout"] = DEFAULT_ADB_PULL_TIMEOUT
+            arg_str = " ".join(str(elem) for elem in args)
+            return self._exec_adb_cmd(clean_name, arg_str, **kwargs)
+
+        return adb_call
+
+    def get_version_number(self):
+        """Returns the version number of ADB as an int (XX in 1.0.XX).
+
+        Raises:
+            AdbError if the version number is not found/parsable.
+        """
+        version_output = self.version()
+        match = re.search(ADB_VERSION_REGEX, version_output)
+
+        if not match:
+            logging.error(
+                "Unable to capture ADB version from adb version "
+                "output: %s" % version_output
+            )
+            raise AdbError("adb version", version_output, "", "")
+        return int(match.group(1))
diff --git a/src/antlion/controllers/adb_lib/__init__.py b/packages/antlion/controllers/adb_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/adb_lib/__init__.py
rename to packages/antlion/controllers/adb_lib/__init__.py
diff --git a/packages/antlion/controllers/adb_lib/error.py b/packages/antlion/controllers/adb_lib/error.py
new file mode 100644
index 0000000..9599214
--- /dev/null
+++ b/packages/antlion/controllers/adb_lib/error.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion import error
+
+
+class AdbError(error.ActsError):
+    """Raised when there is an error in adb operations."""
+
+    def __init__(self, cmd, stdout, stderr, ret_code):
+        super().__init__()
+        self.cmd = cmd
+        self.stdout = stdout
+        self.stderr = stderr
+        self.ret_code = ret_code
+
+    def __str__(self):
+        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s") % (
+            self.cmd,
+            self.ret_code,
+            self.stdout,
+            self.stderr,
+        )
+
+
+class AdbCommandError(AdbError):
+    """Raised when there is an error in the command being run through ADB."""
diff --git a/packages/antlion/controllers/android_device.py b/packages/antlion/controllers/android_device.py
new file mode 100755
index 0000000..e32f3bf
--- /dev/null
+++ b/packages/antlion/controllers/android_device.py
@@ -0,0 +1,1806 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import logging
+import math
+import os
+import re
+import shutil
+import socket
+import time
+from datetime import datetime
+
+from antlion import context
+from antlion import logger as acts_logger
+from antlion import utils
+from antlion.controllers import adb, fastboot
+from antlion.controllers.adb_lib.error import AdbError
+from antlion.controllers.android_lib import errors
+from antlion.controllers.android_lib import events as android_events
+from antlion.controllers.android_lib import logcat, services
+from antlion.controllers.sl4a_lib import sl4a_manager
+from antlion.controllers.utils_lib.ssh import connection, settings
+from antlion.event import event_bus
+from antlion.libs.proc import job
+from antlion.runner import Runner
+
+MOBLY_CONTROLLER_CONFIG_NAME = "AndroidDevice"
+ACTS_CONTROLLER_REFERENCE_NAME = "android_devices"
+
+ANDROID_DEVICE_PICK_ALL_TOKEN = "*"
+# Key name for SL4A extra params in config file
+ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY = "sl4a_client_port"
+ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY = "sl4a_forwarded_port"
+ANDROID_DEVICE_SL4A_SERVER_PORT_KEY = "sl4a_server_port"
+# Key name for adb logcat extra params in config file.
+ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = "adb_logcat_param"
+ANDROID_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
+ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
+CRASH_REPORT_PATHS = (
+    "/data/tombstones/",
+    "/data/vendor/ramdump/",
+    "/data/ramdump/",
+    "/data/vendor/ssrdump",
+    "/data/vendor/ramdump/bluetooth",
+    "/data/vendor/log/cbd",
+)
+CRASH_REPORT_SKIPS = (
+    "RAMDUMP_RESERVED",
+    "RAMDUMP_STATUS",
+    "RAMDUMP_OUTPUT",
+    "bluetooth",
+)
+ALWAYS_ON_LOG_PATH = "/data/vendor/radio/logs/always-on"
+DEFAULT_QXDM_LOG_PATH = "/data/vendor/radio/diag_logs"
+DEFAULT_SDM_LOG_PATH = "/data/vendor/slog/"
+DEFAULT_SCREENSHOT_PATH = "/sdcard/Pictures/screencap"
+BUG_REPORT_TIMEOUT = 1800
+PULL_TIMEOUT = 300
+PORT_RETRY_COUNT = 3
+ADB_ROOT_RETRY_COUNT = 2
+ADB_ROOT_RETRY_INTERVAL = 10
+IPERF_TIMEOUT = 60
+SL4A_APK_NAME = "com.googlecode.android_scripting"
+WAIT_FOR_DEVICE_TIMEOUT = 180
+ENCRYPTION_WINDOW = "CryptKeeper"
+DEFAULT_DEVICE_PASSWORD = "1111"
+RELEASE_ID_REGEXES = [re.compile(r"\w+\.\d+\.\d+"), re.compile(r"N\w+")]
+
+
+def create(configs):
+    """Creates AndroidDevice controller objects.
+
+    Args:
+        configs: A list of dicts, each representing a configuration for an
+                 Android device.
+
+    Returns:
+        A list of AndroidDevice objects.
+    """
+    if not configs:
+        raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_EMPTY_CONFIG_MSG)
+    elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
+        ads = get_all_instances()
+    elif not isinstance(configs, list):
+        raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
+    elif isinstance(configs[0], str):
+        # Configs is a list of serials.
+        ads = get_instances(configs)
+    else:
+        # Configs is a list of dicts.
+        ads = get_instances_with_configs(configs)
+
+    ads[0].log.info(f'The primary device under test is "{ads[0].serial}".')
+
+    for ad in ads:
+        if not ad.is_connected():
+            raise errors.AndroidDeviceError(
+                ("Android device %s is specified in config" " but is not attached.")
+                % ad.serial,
+                serial=ad.serial,
+            )
+    _start_services_on_ads(ads)
+    for ad in ads:
+        if ad.droid:
+            utils.set_location_service(ad, False)
+            utils.sync_device_time(ad)
+    return ads
+
+
+def destroy(ads):
+    """Cleans up AndroidDevice objects.
+
+    Args:
+        ads: A list of AndroidDevice objects.
+    """
+    for ad in ads:
+        try:
+            ad.clean_up()
+        except:
+            ad.log.exception("Failed to clean up properly.")
+
+
+def get_info(ads):
+    """Get information on a list of AndroidDevice objects.
+
+    Args:
+        ads: A list of AndroidDevice objects.
+
+    Returns:
+        A list of dict, each representing info for an AndroidDevice objects.
+    """
+    device_info = []
+    for ad in ads:
+        info = {"serial": ad.serial, "model": ad.model}
+        info.update(ad.build_info)
+        device_info.append(info)
+    return device_info
+
+
+def _start_services_on_ads(ads):
+    """Starts long running services on multiple AndroidDevice objects.
+
+    If any one AndroidDevice object fails to start services, cleans up all
+    existing AndroidDevice objects and their services.
+
+    Args:
+        ads: A list of AndroidDevice objects whose services to start.
+    """
+    running_ads = []
+    for ad in ads:
+        running_ads.append(ad)
+        try:
+            ad.start_services()
+        except:
+            ad.log.exception("Failed to start some services, abort!")
+            destroy(running_ads)
+            raise
+
+
+def _parse_device_list(device_list_str, key):
+    """Parses a byte string representing a list of devices. The string is
+    generated by calling either adb or fastboot.
+
+    Args:
+        device_list_str: Output of adb or fastboot.
+        key: The token that signifies a device in device_list_str.
+
+    Returns:
+        A list of android device serial numbers.
+    """
+    return re.findall(r"(\S+)\t%s" % key, device_list_str)
+
+
+def list_adb_devices():
+    """List all android devices connected to the computer that are detected by
+    adb.
+
+    Returns:
+        A list of android device serials. Empty if there's none.
+    """
+    out = adb.AdbProxy().devices()
+    return _parse_device_list(out, "device")
+
+
+def list_fastboot_devices():
+    """List all android devices connected to the computer that are in in
+    fastboot mode. These are detected by fastboot.
+
+    Returns:
+        A list of android device serials. Empty if there's none.
+    """
+    out = fastboot.FastbootProxy().devices()
+    return _parse_device_list(out, "fastboot")
+
+
+def get_instances(serials):
+    """Create AndroidDevice instances from a list of serials.
+
+    Args:
+        serials: A list of android device serials.
+
+    Returns:
+        A list of AndroidDevice objects.
+    """
+    results = []
+    for s in serials:
+        results.append(AndroidDevice(s))
+    return results
+
+
+def get_instances_with_configs(configs):
+    """Create AndroidDevice instances from a list of json configs.
+
+    Each config should have the required key-value pair "serial".
+
+    Args:
+        configs: A list of dicts each representing the configuration of one
+            android device.
+
+    Returns:
+        A list of AndroidDevice objects.
+    """
+    results = []
+    for c in configs:
+        try:
+            serial = c.pop("serial")
+        except KeyError:
+            raise errors.AndroidDeviceConfigError(
+                f"Required value 'serial' is missing in AndroidDevice config {c}."
+            )
+        client_port = 0
+        if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c:
+            try:
+                client_port = int(c.pop(ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY))
+            except ValueError:
+                raise errors.AndroidDeviceConfigError(
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c)
+                )
+        server_port = None
+        if ANDROID_DEVICE_SL4A_SERVER_PORT_KEY in c:
+            try:
+                server_port = int(c.pop(ANDROID_DEVICE_SL4A_SERVER_PORT_KEY))
+            except ValueError:
+                raise errors.AndroidDeviceConfigError(
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c)
+                )
+        forwarded_port = 0
+        if ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY in c:
+            try:
+                forwarded_port = int(c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
+            except ValueError:
+                raise errors.AndroidDeviceConfigError(
+                    "'%s' is not a valid number for config %s"
+                    % (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c)
+                )
+        ssh_config = c.pop("ssh_config", None)
+        ssh_connection = None
+        if ssh_config is not None:
+            ssh_settings = settings.from_config(ssh_config)
+            ssh_connection = connection.SshConnection(ssh_settings)
+        ad = AndroidDevice(
+            serial,
+            ssh_connection=ssh_connection,
+            client_port=client_port,
+            forwarded_port=forwarded_port,
+            server_port=server_port,
+        )
+        ad.load_config(c)
+        results.append(ad)
+    return results
+
+
+def get_all_instances(include_fastboot=False):
+    """Create AndroidDevice instances for all attached android devices.
+
+    Args:
+        include_fastboot: Whether to include devices in bootloader mode or not.
+
+    Returns:
+        A list of AndroidDevice objects each representing an android device
+        attached to the computer.
+    """
+    if include_fastboot:
+        serial_list = list_adb_devices() + list_fastboot_devices()
+        return get_instances(serial_list)
+    return get_instances(list_adb_devices())
+
+
+def filter_devices(ads, func):
+    """Finds the AndroidDevice instances from a list that match certain
+    conditions.
+
+    Args:
+        ads: A list of AndroidDevice instances.
+        func: A function that takes an AndroidDevice object and returns True
+            if the device satisfies the filter condition.
+
+    Returns:
+        A list of AndroidDevice instances that satisfy the filter condition.
+    """
+    results = []
+    for ad in ads:
+        if func(ad):
+            results.append(ad)
+    return results
+
+
+def get_device(ads, **kwargs):
+    """Finds a unique AndroidDevice instance from a list that has specific
+    attributes of certain values.
+
+    Example:
+        get_device(android_devices, label="foo", phone_number="1234567890")
+        get_device(android_devices, model="angler")
+
+    Args:
+        ads: A list of AndroidDevice instances.
+        kwargs: keyword arguments used to filter AndroidDevice instances.
+
+    Returns:
+        The target AndroidDevice instance.
+
+    Raises:
+        AndroidDeviceError is raised if none or more than one device is
+        matched.
+    """
+
+    def _get_device_filter(ad):
+        for k, v in kwargs.items():
+            if not hasattr(ad, k):
+                return False
+            elif getattr(ad, k) != v:
+                return False
+        return True
+
+    filtered = filter_devices(ads, _get_device_filter)
+    if not filtered:
+        raise ValueError(
+            f"Could not find a target device that matches condition: {kwargs}."
+        )
+    elif len(filtered) == 1:
+        return filtered[0]
+    else:
+        serials = [ad.serial for ad in filtered]
+        raise ValueError(f"More than one device matched: {serials}")
+
+
+def take_bug_reports(ads, test_name, begin_time):
+    """Takes bug reports on a list of android devices.
+
+    If you want to take a bug report, call this function with a list of
+    android_device objects in on_fail. But reports will be taken on all the
+    devices in the list concurrently. Bug report takes a relative long
+    time to take, so use this cautiously.
+
+    Args:
+        ads: A list of AndroidDevice instances.
+        test_name: Name of the test case that triggered this bug report.
+        begin_time: Logline format timestamp taken when the test started.
+    """
+
+    def take_br(test_name, begin_time, ad):
+        ad.take_bug_report(test_name, begin_time)
+
+    args = [(test_name, begin_time, ad) for ad in ads]
+    utils.concurrent_exec(take_br, args)
+
+
+class AndroidDevice:
+    """Class representing an android device.
+
+    Each object of this class represents one Android device in ACTS, including
+    handles to adb, fastboot, and sl4a clients. In addition to direct adb
+    commands, this object also uses adb port forwarding to talk to the Android
+    device.
+
+    Attributes:
+        serial: A string that's the serial number of the Android device.
+        log_path: A string that is the path where all logs collected on this
+                  android device should be stored.
+        log: A logger adapted from root logger with added token specific to an
+             AndroidDevice instance.
+        adb_logcat_process: A process that collects the adb logcat.
+        adb: An AdbProxy object used for interacting with the device via adb.
+        fastboot: A FastbootProxy object used for interacting with the device
+                  via fastboot.
+        client_port: Preferred client port number on the PC host side for SL4A
+        forwarded_port: Preferred server port number forwarded from Android
+                        to the host PC via adb for SL4A connections
+        server_port: Preferred server port used by SL4A on Android device
+
+    """
+
+    def __init__(
+        self,
+        serial: str = "",
+        ssh_connection: Runner | None = None,
+        client_port: int = 0,
+        forwarded_port: int = 0,
+        server_port: int | None = None,
+    ):
+        self.serial = serial
+        # logging.log_path only exists when this is used in an ACTS test run.
+        log_path_base = getattr(logging, "log_path", "/tmp/logs")
+        self.log_dir = f"AndroidDevice{serial}"
+        self.log_path = os.path.join(log_path_base, self.log_dir)
+        self.client_port = client_port
+        self.forwarded_port = forwarded_port
+        self.server_port = server_port
+        self.log = AndroidDeviceLoggerAdapter(logging.getLogger(), {"serial": serial})
+        self._event_dispatchers = {}
+        self._services = []
+        self.register_service(services.AdbLogcatService(self))
+        self.register_service(services.Sl4aService(self))
+        self.adb_logcat_process = None
+        self.adb = adb.AdbProxy(serial, ssh_connection=ssh_connection)
+        self.fastboot = fastboot.FastbootProxy(serial, ssh_connection=ssh_connection)
+        if not self.is_bootloader:
+            self.root_adb()
+        self._ssh_connection = ssh_connection
+        self.skip_sl4a = False
+        self.crash_report = None
+        self.data_accounting = collections.defaultdict(int)
+        self._sl4a_manager = sl4a_manager.create_sl4a_manager(self.adb)
+        self.last_logcat_timestamp = None
+        # Device info cache.
+        self._user_added_device_info = {}
+        self._sdk_api_level = None
+
+    def clean_up(self):
+        """Cleans up the AndroidDevice object and releases any resources it
+        claimed.
+        """
+        self.stop_services()
+        for service in self._services:
+            service.unregister()
+        self._services.clear()
+        if self._ssh_connection:
+            self._ssh_connection.close()
+
+    def recreate_services(self, serial):
+        """Clean up the AndroidDevice object and re-create adb/sl4a services.
+
+        Unregister the existing services and re-create adb and sl4a services,
+        call this method when the connection break after certain API call
+        (e.g., enable USB tethering by #startTethering)
+
+        Args:
+            serial: the serial number of the AndroidDevice
+        """
+        # Clean the old services
+        for service in self._services:
+            service.unregister()
+        self._services.clear()
+        if self._ssh_connection:
+            self._ssh_connection.close()
+        self._sl4a_manager.stop_service()
+
+        # Wait for old services to stop
+        time.sleep(5)
+
+        # Re-create the new adb and sl4a services
+        self.register_service(services.AdbLogcatService(self))
+        self.register_service(services.Sl4aService(self))
+        self.adb.wait_for_device()
+        self.terminate_all_sessions()
+        self.start_services()
+
+    def register_service(self, service):
+        """Registers the service on the device."""
+        service.register()
+        self._services.append(service)
+
+    # TODO(angli): This function shall be refactored to accommodate all services
+    # and not have hard coded switch for SL4A when b/29157104 is done.
+    def start_services(self, skip_setup_wizard=True):
+        """Starts long running services on the android device.
+
+        1. Start adb logcat capture.
+        2. Start SL4A if not skipped.
+
+        Args:
+            skip_setup_wizard: Whether or not to skip the setup wizard.
+        """
+        if skip_setup_wizard:
+            self.exit_setup_wizard()
+
+        event_bus.post(android_events.AndroidStartServicesEvent(self))
+
+    def stop_services(self):
+        """Stops long running services on the android device.
+
+        Stop adb logcat and terminate sl4a sessions if exist.
+        """
+        event_bus.post(
+            android_events.AndroidStopServicesEvent(self), ignore_errors=True
+        )
+
+    def is_connected(self):
+        out = self.adb.devices()
+        devices = _parse_device_list(out, "device")
+        return self.serial in devices
+
+    @property
+    def build_info(self):
+        """Get the build info of this Android device, including build id and
+        build type.
+
+        This is not available if the device is in bootloader mode.
+
+        Returns:
+            A dict with the build info of this Android device, or None if the
+            device is in bootloader mode.
+        """
+        if self.is_bootloader:
+            self.log.error("Device is in fastboot mode, could not get build " "info.")
+            return
+
+        build_id = self.adb.getprop("ro.build.id")
+        incremental_build_id = self.adb.getprop("ro.build.version.incremental")
+        valid_build_id = False
+        for regex in RELEASE_ID_REGEXES:
+            if re.match(regex, build_id):
+                valid_build_id = True
+                break
+        if not valid_build_id:
+            build_id = incremental_build_id
+
+        info = {
+            "build_id": build_id,
+            "incremental_build_id": incremental_build_id,
+            "build_type": self.adb.getprop("ro.build.type"),
+        }
+        return info
+
+    @property
+    def device_info(self):
+        """Information to be pulled into controller info.
+
+        The latest serial, model, and build_info are included. Additional info
+        can be added via `add_device_info`.
+        """
+        info = {
+            "serial": self.serial,
+            "model": self.model,
+            "build_info": self.build_info,
+            "user_added_info": self._user_added_device_info,
+            "flavor": self.flavor,
+        }
+        return info
+
+    def add_device_info(self, name, info):
+        """Add custom device info to the user_added_info section.
+
+        Adding the same info name the second time will override existing info.
+
+        Args:
+          name: string, name of this info.
+          info: serializable, content of the info.
+        """
+        self._user_added_device_info.update({name: info})
+
+    def sdk_api_level(self):
+        if self._sdk_api_level is not None:
+            return self._sdk_api_level
+        if self.is_bootloader:
+            self.log.error("Device is in fastboot mode. Cannot get build info.")
+            return
+        self._sdk_api_level = int(self.adb.shell("getprop ro.build.version.sdk"))
+        return self._sdk_api_level
+
+    @property
+    def is_bootloader(self):
+        """True if the device is in bootloader mode."""
+        return self.serial in list_fastboot_devices()
+
+    @property
+    def is_adb_root(self):
+        """True if adb is running as root for this device."""
+        try:
+            return "0" == self.adb.shell("id -u")
+        except AdbError:
+            # Wait a bit and retry to work around adb flakiness for this cmd.
+            time.sleep(0.2)
+            return "0" == self.adb.shell("id -u")
+
+    @property
+    def model(self):
+        """The Android code name for the device."""
+        # If device is in bootloader mode, get mode name from fastboot.
+        if self.is_bootloader:
+            out = self.fastboot.getvar("product").strip()
+            # "out" is never empty because of the "total time" message fastboot
+            # writes to stderr.
+            lines = out.split("\n", 1)
+            if lines:
+                tokens = lines[0].split(" ")
+                if len(tokens) > 1:
+                    return tokens[1].lower()
+            return None
+        model = self.adb.getprop("ro.build.product").lower()
+        if model == "sprout":
+            return model
+        else:
+            return self.adb.getprop("ro.product.name").lower()
+
+    @property
+    def flavor(self):
+        """Returns the specific flavor of Android build the device is using."""
+        return self.adb.getprop("ro.build.flavor").lower()
+
+    @property
+    def droid(self):
+        """Returns the RPC Service of the first Sl4aSession created."""
+        if len(self._sl4a_manager.sessions) > 0:
+            session_id = sorted(self._sl4a_manager.sessions.keys())[0]
+            return self._sl4a_manager.sessions[session_id].rpc_client
+        else:
+            return None
+
+    @property
+    def ed(self):
+        """Returns the event dispatcher of the first Sl4aSession created."""
+        if len(self._sl4a_manager.sessions) > 0:
+            session_id = sorted(self._sl4a_manager.sessions.keys())[0]
+            return self._sl4a_manager.sessions[session_id].get_event_dispatcher()
+        else:
+            return None
+
+    @property
+    def sl4a_sessions(self):
+        """Returns a dictionary of session ids to sessions."""
+        return list(self._sl4a_manager.sessions)
+
+    @property
+    def is_adb_logcat_on(self):
+        """Whether there is an ongoing adb logcat collection."""
+        if self.adb_logcat_process:
+            if self.adb_logcat_process.is_running():
+                return True
+            else:
+                # if skip_sl4a is true, there is no sl4a session
+                # if logcat died due to device reboot and sl4a session has
+                # not restarted there is no droid.
+                if self.droid:
+                    self.droid.logI("Logcat died")
+                self.log.info("Logcat to %s died", self.log_path)
+                return False
+        return False
+
+    @property
+    def device_log_path(self):
+        """Returns the directory for all Android device logs for the current
+        test context and serial.
+        """
+        return context.get_current_context().get_full_output_path(self.serial)
+
+    def update_sdk_api_level(self):
+        self._sdk_api_level = None
+        self.sdk_api_level()
+
+    def load_config(self, config):
+        """Add attributes to the AndroidDevice object based on json config.
+
+        Args:
+            config: A dictionary representing the configs.
+
+        Raises:
+            AndroidDeviceError is raised if the config is trying to overwrite
+            an existing attribute.
+        """
+        for k, v in config.items():
+            # skip_sl4a value can be reset from config file
+            if hasattr(self, k) and k != "skip_sl4a":
+                raise errors.AndroidDeviceError(
+                    f"Attempting to set existing attribute {k} on {self.serial}",
+                    serial=self.serial,
+                )
+            setattr(self, k, v)
+
+    def root_adb(self):
+        """Change adb to root mode for this device if allowed.
+
+        If executed on a production build, adb will not be switched to root
+        mode per security restrictions.
+        """
+        if self.is_adb_root:
+            return
+
+        for attempt in range(ADB_ROOT_RETRY_COUNT):
+            try:
+                self.log.debug(f"Enabling ADB root mode: attempt {attempt}.")
+                self.adb.root()
+            except AdbError:
+                if attempt == ADB_ROOT_RETRY_COUNT:
+                    raise
+                time.sleep(ADB_ROOT_RETRY_INTERVAL)
+        self.adb.wait_for_device()
+
+    def get_droid(self, handle_event=True):
+        """Create an sl4a connection to the device.
+
+        Return the connection handler 'droid'. By default, another connection
+        on the same session is made for EventDispatcher, and the dispatcher is
+        returned to the caller as well.
+        If sl4a server is not started on the device, try to start it.
+
+        Args:
+            handle_event: True if this droid session will need to handle
+                events.
+
+        Returns:
+            droid: Android object used to communicate with sl4a on the android
+                device.
+            ed: An optional EventDispatcher to organize events for this droid.
+
+        Examples:
+            Don't need event handling:
+            >>> ad = AndroidDevice()
+            >>> droid = ad.get_droid(False)
+
+            Need event handling:
+            >>> ad = AndroidDevice()
+            >>> droid, ed = ad.get_droid()
+        """
+        self.log.debug(
+            "Creating RPC client_port={}, forwarded_port={}, server_port={}".format(
+                self.client_port, self.forwarded_port, self.server_port
+            )
+        )
+        session = self._sl4a_manager.create_session(
+            client_port=self.client_port,
+            forwarded_port=self.forwarded_port,
+            server_port=self.server_port,
+        )
+        droid = session.rpc_client
+        if handle_event:
+            ed = session.get_event_dispatcher()
+            return droid, ed
+        return droid
+
+    def get_package_pid(self, package_name):
+        """Gets the pid for a given package. Returns None if not running.
+        Args:
+            package_name: The name of the package.
+        Returns:
+            The first pid found under a given package name. None if no process
+            was found running the package.
+        Raises:
+            AndroidDeviceError if the output of the phone's process list was
+            in an unexpected format.
+        """
+        for cmd in ("ps -A", "ps"):
+            try:
+                out = self.adb.shell(
+                    f'{cmd} | grep "S {package_name}"', ignore_status=True
+                )
+                if package_name not in out:
+                    continue
+                try:
+                    pid = int(out.split()[1])
+                    self.log.info("apk %s has pid %s.", package_name, pid)
+                    return pid
+                except (IndexError, ValueError) as e:
+                    # Possible ValueError from string to int cast.
+                    # Possible IndexError from split.
+                    self.log.warning(
+                        'Command "%s" returned output line: ' '"%s".\nError: %s',
+                        cmd,
+                        out,
+                        e,
+                    )
+            except Exception as e:
+                self.log.warning(
+                    'Device fails to check if %s running with "%s"\n' "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
+        self.log.debug("apk %s is not running", package_name)
+        return None
+
+    def get_dispatcher(self, droid):
+        """Return an EventDispatcher for an sl4a session
+
+        Args:
+            droid: Session to create EventDispatcher for.
+
+        Returns:
+            ed: An EventDispatcher for specified session.
+        """
+        return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher()
+
+    def _is_timestamp_in_range(self, target, log_begin_time, log_end_time):
+        low = acts_logger.logline_timestamp_comparator(log_begin_time, target) <= 0
+        high = acts_logger.logline_timestamp_comparator(log_end_time, target) >= 0
+        return low and high
+
+    def cat_adb_log(self, tag, begin_time, end_time=None, dest_path="AdbLogExcerpts"):
+        """Takes an excerpt of the adb logcat log from a certain time point to
+        current time.
+
+        Args:
+            tag: An identifier of the time period, usually the name of a test.
+            begin_time: Epoch time of the beginning of the time period.
+            end_time: Epoch time of the ending of the time period, default None
+            dest_path: Destination path of the excerpt file.
+        """
+        log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+        if end_time is None:
+            log_end_time = acts_logger.get_log_line_timestamp()
+        else:
+            log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+        self.log.debug("Extracting adb log from logcat.")
+        logcat_path = os.path.join(
+            self.device_log_path, f"adblog_{self.serial}_debug.txt"
+        )
+        if not os.path.exists(logcat_path):
+            self.log.warning(f"Logcat file {logcat_path} does not exist.")
+            return
+        adb_excerpt_dir = os.path.join(self.log_path, dest_path)
+        os.makedirs(adb_excerpt_dir, exist_ok=True)
+        out_name = "%s,%s.txt" % (
+            acts_logger.normalize_log_line_timestamp(log_begin_time),
+            self.serial,
+        )
+        tag_len = utils.MAX_FILENAME_LEN - len(out_name)
+        out_name = f"{tag[:tag_len]},{out_name}"
+        adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name)
+        with open(adb_excerpt_path, "w", encoding="utf-8") as out:
+            in_file = logcat_path
+            with open(in_file, "r", encoding="utf-8", errors="replace") as f:
+                while True:
+                    line = None
+                    try:
+                        line = f.readline()
+                        if not line:
+                            break
+                    except:
+                        continue
+                    line_time = line[: acts_logger.log_line_timestamp_len]
+                    if not acts_logger.is_valid_logline_timestamp(line_time):
+                        continue
+                    if self._is_timestamp_in_range(
+                        line_time, log_begin_time, log_end_time
+                    ):
+                        if not line.endswith("\n"):
+                            line += "\n"
+                        out.write(line)
+        return adb_excerpt_path
+
+    def search_logcat(
+        self, matching_string, begin_time=None, end_time=None, logcat_path=None
+    ):
+        """Search logcat message with given string.
+
+        Args:
+            matching_string: matching_string to search.
+            begin_time: only the lines with time stamps later than begin_time
+                will be searched.
+            end_time: only the lines with time stamps earlier than end_time
+                will be searched.
+            logcat_path: the path of a specific file in which the search should
+                be performed. If None the path will be the default device log
+                path.
+
+        Returns:
+            A list of dictionaries with full log message, time stamp string,
+            time object and message ID. For example:
+            [{"log_message": "05-03 17:39:29.898   968  1001 D"
+                              "ActivityManager: Sending BOOT_COMPLETE user #0",
+              "time_stamp": "2017-05-03 17:39:29.898",
+              "datetime_obj": datetime object,
+              "message_id": None}]
+
+            [{"log_message": "08-12 14:26:42.611043  2360  2510 D RILJ    : "
+                             "[0853]< DEACTIVATE_DATA_CALL  [PHONE0]",
+              "time_stamp": "2020-08-12 14:26:42.611043",
+              "datetime_obj": datetime object},
+              "message_id": "0853"}]
+        """
+        if not logcat_path:
+            logcat_path = os.path.join(
+                self.device_log_path, f"adblog_{self.serial}_debug.txt"
+            )
+        if not os.path.exists(logcat_path):
+            self.log.warning(f"Logcat file {logcat_path} does not exist.")
+            return
+        output = job.run(f"grep '{matching_string}' {logcat_path}", ignore_status=True)
+        if not output.stdout or output.exit_status != 0:
+            return []
+        if begin_time:
+            if not isinstance(begin_time, datetime):
+                log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+                begin_time = datetime.strptime(log_begin_time, "%Y-%m-%d %H:%M:%S.%f")
+        if end_time:
+            if not isinstance(end_time, datetime):
+                log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+                end_time = datetime.strptime(log_end_time, "%Y-%m-%d %H:%M:%S.%f")
+        result = []
+        logs = re.findall(r"(\S+\s\S+)(.*)", output.stdout)
+        for log in logs:
+            time_stamp = log[0]
+            time_obj = datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S.%f")
+
+            if begin_time and time_obj < begin_time:
+                continue
+
+            if end_time and time_obj > end_time:
+                continue
+
+            res = re.findall(r".*\[(\d+)\]", log[1])
+            try:
+                message_id = res[0]
+            except:
+                message_id = None
+
+            result.append(
+                {
+                    "log_message": "".join(log),
+                    "time_stamp": time_stamp,
+                    "datetime_obj": time_obj,
+                    "message_id": message_id,
+                }
+            )
+        return result
+
+    def start_adb_logcat(self):
+        """Starts a standing adb logcat collection in separate subprocesses and
+        save the logcat in a file.
+        """
+        if self.is_adb_logcat_on:
+            self.log.warning(
+                "Android device %s already has a running adb logcat thread. "
+                % self.serial
+            )
+            return
+        # Disable adb log spam filter. Have to stop and clear settings first
+        # because 'start' doesn't support --clear option before Android N.
+        self.adb.shell("logpersist.stop --clear", ignore_status=True)
+        self.adb.shell("logpersist.start", ignore_status=True)
+        if hasattr(self, "adb_logcat_param"):
+            extra_params = self.adb_logcat_param
+        else:
+            extra_params = "-b all"
+
+        self.adb_logcat_process = logcat.create_logcat_keepalive_process(
+            self.serial, self.log_dir, extra_params
+        )
+        self.adb_logcat_process.start()
+
+    def stop_adb_logcat(self):
+        """Stops the adb logcat collection subprocess."""
+        if not self.is_adb_logcat_on:
+            self.log.warning(
+                f"Android device {self.serial} does not have an ongoing adb logcat "
+            )
+            return
+        # Set the last timestamp to the current timestamp. This may cause
+        # a race condition that allows the same line to be logged twice,
+        # but it does not pose a problem for our logging purposes.
+        self.adb_logcat_process.stop()
+        self.adb_logcat_process = None
+
+    def get_apk_uid(self, apk_name):
+        """Get the uid of the given apk.
+
+        Args:
+        apk_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+        Linux UID for the apk.
+        """
+        output = self.adb.shell(
+            f"dumpsys package {apk_name} | grep userId=", ignore_status=True
+        )
+        result = re.search(r"userId=(\d+)", output)
+        if result:
+            return result.group(1)
+        else:
+            None
+
+    def get_apk_version(self, package_name):
+        """Get the version of the given apk.
+
+        Args:
+            package_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+            Version of the given apk.
+        """
+        try:
+            output = self.adb.shell(
+                f"dumpsys package {package_name} | grep versionName"
+            )
+            pattern = re.compile(r"versionName=(.+)", re.I)
+            result = pattern.findall(output)
+            if result:
+                return result[0]
+        except Exception as e:
+            self.log.warning(
+                "Fail to get the version of package %s: %s", package_name, e
+            )
+        self.log.debug("apk %s is not found", package_name)
+        return None
+
+    def is_apk_installed(self, package_name):
+        """Check if the given apk is already installed.
+
+        Args:
+        package_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+        True if package is installed. False otherwise.
+        """
+
+        try:
+            return bool(
+                self.adb.shell(
+                    f'(pm list packages | grep -w "package:{package_name}") || true'
+                )
+            )
+
+        except Exception as err:
+            self.log.error(
+                "Could not determine if %s is installed. " "Received error:\n%s",
+                package_name,
+                err,
+            )
+            return False
+
+    def is_sl4a_installed(self):
+        return self.is_apk_installed(SL4A_APK_NAME)
+
+    def is_apk_running(self, package_name):
+        """Check if the given apk is running.
+
+        Args:
+            package_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+        True if package is installed. False otherwise.
+        """
+        for cmd in ("ps -A", "ps"):
+            try:
+                out = self.adb.shell(
+                    f'{cmd} | grep "S {package_name}"', ignore_status=True
+                )
+                if package_name in out:
+                    self.log.info("apk %s is running", package_name)
+                    return True
+            except Exception as e:
+                self.log.warning(
+                    "Device fails to check is %s running by %s " "Exception %s",
+                    package_name,
+                    cmd,
+                    e,
+                )
+                continue
+        self.log.debug("apk %s is not running", package_name)
+        return False
+
+    def is_sl4a_running(self):
+        return self.is_apk_running(SL4A_APK_NAME)
+
+    def force_stop_apk(self, package_name):
+        """Force stop the given apk.
+
+        Args:
+        package_name: Name of the package, e.g., com.android.phone.
+
+        Returns:
+        True if package is installed. False otherwise.
+        """
+        try:
+            self.adb.shell(f"am force-stop {package_name}", ignore_status=True)
+        except Exception as e:
+            self.log.warning("Fail to stop package %s: %s", package_name, e)
+
+    def take_bug_report(self, test_name=None, begin_time=None):
+        """Takes a bug report on the device and stores it in a file.
+
+        Args:
+            test_name: Name of the test case that triggered this bug report.
+            begin_time: Epoch time when the test started. If none is specified,
+                the current time will be used.
+        """
+        self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
+        new_br = True
+        try:
+            stdout = self.adb.shell("bugreportz -v")
+            # This check is necessary for builds before N, where adb shell's ret
+            # code and stderr are not propagated properly.
+            if "not found" in stdout:
+                new_br = False
+        except AdbError:
+            new_br = False
+        br_path = self.device_log_path
+        os.makedirs(br_path, exist_ok=True)
+        epoch = begin_time if begin_time else utils.get_current_epoch_time()
+        time_stamp = acts_logger.normalize_log_line_timestamp(
+            acts_logger.epoch_to_log_line_timestamp(epoch)
+        )
+        out_name = f"AndroidDevice{self.serial}_{time_stamp}"
+        out_name = f"{out_name}.zip" if new_br else f"{out_name}.txt"
+        full_out_path = os.path.join(br_path, out_name)
+        # in case device restarted, wait for adb interface to return
+        self.wait_for_boot_completion()
+        if test_name:
+            self.log.info("Taking bugreport for %s.", test_name)
+        else:
+            self.log.info("Taking bugreport.")
+        if new_br:
+            out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
+            if not out.startswith("OK"):
+                raise errors.AndroidDeviceError(
+                    f"Failed to take bugreport on {self.serial}: {out}",
+                    serial=self.serial,
+                )
+            br_out_path = out.split(":")[1].strip().split()[0]
+            self.adb.pull(f"{br_out_path} {full_out_path}")
+        else:
+            self.adb.bugreport(f" > {full_out_path}", timeout=BUG_REPORT_TIMEOUT)
+        if test_name:
+            self.log.info("Bugreport for %s taken at %s.", test_name, full_out_path)
+        else:
+            self.log.info("Bugreport taken at %s.", test_name, full_out_path)
+        self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
+
+    def get_file_names(
+        self, directory, begin_time=None, skip_files=[], match_string=None
+    ):
+        """Get files names with provided directory."""
+        cmd = f"find {directory} -type f"
+        if begin_time:
+            current_time = utils.get_current_epoch_time()
+            seconds = int(math.ceil((current_time - begin_time) / 1000.0))
+            cmd = f"{cmd} -mtime -{seconds}s"
+        if match_string:
+            cmd = f"{cmd} -iname {match_string}"
+        for skip_file in skip_files:
+            cmd = f"{cmd} ! -iname {skip_file}"
+        out = self.adb.shell(cmd, ignore_status=True)
+        if (
+            not out
+            or "No such" in out
+            or "Permission denied" in out
+            or "Not a directory" in out
+        ):
+            return []
+        files = out.split("\n")
+        self.log.debug("Find files in directory %s: %s", directory, files)
+        return files
+
+    @property
+    def external_storage_path(self):
+        """
+        The $EXTERNAL_STORAGE path on the device. Most commonly set to '/sdcard'
+        """
+        return self.adb.shell("echo $EXTERNAL_STORAGE")
+
+    def file_exists(self, file_path):
+        """Returns whether a file exists on a device.
+
+        Args:
+            file_path: The path of the file to check for.
+        """
+        cmd = f"(test -f {file_path} && echo yes) || echo no"
+        result = self.adb.shell(cmd)
+        if result == "yes":
+            return True
+        elif result == "no":
+            return False
+        raise ValueError(
+            "Couldn't determine if %s exists. "
+            "Expected yes/no, got %s" % (file_path, result[cmd])
+        )
+
+    def pull_files(self, device_paths, host_path=None):
+        """Pull files from devices.
+
+        Args:
+            device_paths: List of paths on the device to pull from.
+            host_path: Destination path
+        """
+        if isinstance(device_paths, str):
+            device_paths = [device_paths]
+        if not host_path:
+            host_path = self.log_path
+        for device_path in device_paths:
+            self.log.info(f"Pull from device: {device_path} -> {host_path}")
+            self.adb.pull(f"{device_path} {host_path}", timeout=PULL_TIMEOUT)
+
+    def check_crash_report(
+        self, test_name=None, begin_time=None, log_crash_report=False
+    ):
+        """check crash report on the device."""
+        crash_reports = []
+        for crash_path in CRASH_REPORT_PATHS:
+            try:
+                cmd = f"cd {crash_path}"
+                self.adb.shell(cmd)
+            except Exception as e:
+                self.log.debug("received exception %s", e)
+                continue
+            crashes = self.get_file_names(
+                crash_path, skip_files=CRASH_REPORT_SKIPS, begin_time=begin_time
+            )
+            if crash_path == "/data/tombstones/" and crashes:
+                tombstones = crashes[:]
+                for tombstone in tombstones:
+                    if self.adb.shell(
+                        f'cat {tombstone} | grep "crash_dump failed to dump process"'
+                    ):
+                        crashes.remove(tombstone)
+            if crashes:
+                crash_reports.extend(crashes)
+        if crash_reports and log_crash_report:
+            crash_log_path = os.path.join(
+                self.device_log_path, f"Crashes_{self.serial}"
+            )
+            os.makedirs(crash_log_path, exist_ok=True)
+            self.pull_files(crash_reports, crash_log_path)
+        return crash_reports
+
+    def get_qxdm_logs(self, test_name="", begin_time=None):
+        """Get qxdm logs."""
+        # Sleep 10 seconds for the buffered log to be written in qxdm log file
+        time.sleep(10)
+        log_path = getattr(self, "qxdm_log_path", DEFAULT_QXDM_LOG_PATH)
+        qxdm_logs = self.get_file_names(
+            log_path, begin_time=begin_time, match_string="*.qmdl"
+        )
+        if qxdm_logs:
+            qxdm_log_path = os.path.join(self.device_log_path, f"QXDM_{self.serial}")
+            os.makedirs(qxdm_log_path, exist_ok=True)
+
+            self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path)
+            self.pull_files(qxdm_logs, qxdm_log_path)
+
+            self.adb.pull(
+                f"/firmware/image/qdsp6m.qdb {qxdm_log_path}",
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
+            # Zip Folder
+            utils.zip_directory(f"{qxdm_log_path}.zip", qxdm_log_path)
+            shutil.rmtree(qxdm_log_path)
+        else:
+            self.log.error(f"Didn't find QXDM logs in {log_path}.")
+        if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
+            omadm_log_path = os.path.join(self.device_log_path, f"OMADM_{self.serial}")
+            os.makedirs(omadm_log_path, exist_ok=True)
+            self.log.info("Pull OMADM Log")
+            self.adb.pull(
+                f"/data/data/com.android.omadm.service/files/dm/log/ {omadm_log_path}",
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
+
+    def get_sdm_logs(self, test_name="", begin_time=None):
+        """Get sdm logs."""
+        # Sleep 10 seconds for the buffered log to be written in sdm log file
+        time.sleep(10)
+        log_paths = [
+            ALWAYS_ON_LOG_PATH,
+            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH),
+        ]
+        sdm_logs = []
+        for path in log_paths:
+            sdm_logs += self.get_file_names(
+                path, begin_time=begin_time, match_string="*.sdm*"
+            )
+        if sdm_logs:
+            sdm_log_path = os.path.join(self.device_log_path, f"SDM_{self.serial}")
+            os.makedirs(sdm_log_path, exist_ok=True)
+            self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path)
+            self.pull_files(sdm_logs, sdm_log_path)
+        else:
+            self.log.error(f"Didn't find SDM logs in {log_paths}.")
+        if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
+            omadm_log_path = os.path.join(self.device_log_path, f"OMADM_{self.serial}")
+            os.makedirs(omadm_log_path, exist_ok=True)
+            self.log.info("Pull OMADM Log")
+            self.adb.pull(
+                f"/data/data/com.android.omadm.service/files/dm/log/ {omadm_log_path}",
+                timeout=PULL_TIMEOUT,
+                ignore_status=True,
+            )
+
+    def start_new_session(self, max_connections=None, server_port=None):
+        """Start a new session in sl4a.
+
+        Also caches the droid in a dict with its uid being the key.
+
+        Returns:
+            An Android object used to communicate with sl4a on the android
+                device.
+
+        Raises:
+            Sl4aException: Something is wrong with sl4a and it returned an
+            existing uid to a new session.
+        """
+        session = self._sl4a_manager.create_session(
+            max_connections=max_connections, server_port=server_port
+        )
+
+        self._sl4a_manager.sessions[session.uid] = session
+        return session.rpc_client
+
+    def terminate_all_sessions(self):
+        """Terminate all sl4a sessions on the AndroidDevice instance.
+
+        Terminate all sessions and clear caches.
+        """
+        self._sl4a_manager.terminate_all_sessions()
+
+    def run_iperf_client_nb(
+        self, server_host, extra_args="", timeout=IPERF_TIMEOUT, log_file_path=None
+    ):
+        """Start iperf client on the device asynchronously.
+
+        Return status as true if iperf client start successfully.
+        And data flow information as results.
+
+        Args:
+            server_host: Address of the iperf server.
+            extra_args: A string representing extra arguments for iperf client,
+                e.g. "-i 1 -t 30".
+            log_file_path: The complete file path to log the results.
+
+        """
+        cmd = f"iperf3 -c {server_host} {extra_args}"
+        if log_file_path:
+            cmd += f" --logfile {log_file_path} &"
+        self.adb.shell_nb(cmd)
+
+    def run_iperf_client(self, server_host, extra_args="", timeout=IPERF_TIMEOUT):
+        """Start iperf client on the device.
+
+        Return status as true if iperf client start successfully.
+        And data flow information as results.
+
+        Args:
+            server_host: Address of the iperf server.
+            extra_args: A string representing extra arguments for iperf client,
+                e.g. "-i 1 -t 30".
+
+        Returns:
+            status: true if iperf client start successfully.
+            results: results have data flow information
+        """
+        out = self.adb.shell(f"iperf3 -c {server_host} {extra_args}", timeout=timeout)
+        clean_out = out.split("\n")
+        if "error" in clean_out[0].lower():
+            return False, clean_out
+        return True, clean_out
+
+    def run_iperf_server(self, extra_args=""):
+        """Start iperf server on the device
+
+        Return status as true if iperf server started successfully.
+
+        Args:
+            extra_args: A string representing extra arguments for iperf server.
+
+        Returns:
+            status: true if iperf server started successfully.
+            results: results have output of command
+        """
+        out = self.adb.shell(f"iperf3 -s {extra_args}")
+        clean_out = out.split("\n")
+        if "error" in clean_out[0].lower():
+            return False, clean_out
+        return True, clean_out
+
+    def wait_for_boot_completion(self, timeout=900.0):
+        """Waits for Android framework to broadcast ACTION_BOOT_COMPLETED.
+
+        Args:
+            timeout: Seconds to wait for the device to boot. Default value is
+            15 minutes.
+        """
+        timeout_start = time.time()
+
+        self.log.debug("ADB waiting for device")
+        self.adb.wait_for_device(timeout=timeout)
+        self.log.debug("Waiting for  sys.boot_completed")
+        while time.time() < timeout_start + timeout:
+            try:
+                completed = self.adb.getprop("sys.boot_completed")
+                if completed == "1":
+                    self.log.debug("Device has rebooted")
+                    return
+            except AdbError:
+                # adb shell calls may fail during certain period of booting
+                # process, which is normal. Ignoring these errors.
+                pass
+            time.sleep(5)
+        raise errors.AndroidDeviceError(
+            f"Device {self.serial} booting process timed out.", serial=self.serial
+        )
+
+    def reboot(
+        self, stop_at_lock_screen=False, timeout=180, wait_after_reboot_complete=1
+    ):
+        """Reboots the device.
+
+        Terminate all sl4a sessions, reboot the device, wait for device to
+        complete booting, and restart an sl4a session if restart_sl4a is True.
+
+        Args:
+            stop_at_lock_screen: whether to unlock after reboot. Set to False
+                if want to bring the device to reboot up to password locking
+                phase. Sl4a checking need the device unlocked after rebooting.
+            timeout: time in seconds to wait for the device to complete
+                rebooting.
+            wait_after_reboot_complete: time in seconds to wait after the boot
+                completion.
+        """
+        if self.is_bootloader:
+            self.fastboot.reboot()
+            return
+        self.stop_services()
+        self.log.info("Rebooting")
+        self.adb.reboot()
+
+        timeout_start = time.time()
+        # b/111791239: Newer versions of android sometimes return early after
+        # `adb reboot` is called. This means subsequent calls may make it to
+        # the device before the reboot goes through, return false positives for
+        # getprops such as sys.boot_completed.
+        while time.time() < timeout_start + timeout:
+            try:
+                self.adb.get_state()
+                time.sleep(0.1)
+            except AdbError:
+                # get_state will raise an error if the device is not found. We
+                # want the device to be missing to prove the device has kicked
+                # off the reboot.
+                break
+        self.wait_for_boot_completion(timeout=(timeout - time.time() + timeout_start))
+
+        self.log.debug("Wait for a while after boot completion.")
+        time.sleep(wait_after_reboot_complete)
+        self.root_adb()
+        skip_sl4a = self.skip_sl4a
+        self.skip_sl4a = self.skip_sl4a or stop_at_lock_screen
+        self.start_services()
+        self.skip_sl4a = skip_sl4a
+
+    def restart_runtime(self):
+        """Restarts android runtime.
+
+        Terminate all sl4a sessions, restarts runtime, wait for framework
+        complete restart, and restart an sl4a session if restart_sl4a is True.
+        """
+        self.stop_services()
+        self.log.info("Restarting android runtime")
+        self.adb.shell("stop")
+        # Reset the boot completed flag before we restart the framework
+        # to correctly detect when the framework has fully come up.
+        self.adb.shell("setprop sys.boot_completed 0")
+        self.adb.shell("start")
+        self.wait_for_boot_completion()
+        self.root_adb()
+
+        self.start_services()
+
+    def get_ipv4_address(self, interface="wlan0", timeout=5):
+        for timer in range(0, timeout):
+            try:
+                ip_string = self.adb.shell(f"ifconfig {interface}|grep inet")
+                break
+            except adb.AdbError as e:
+                if timer + 1 == timeout:
+                    self.log.warning(f"Unable to find IP address for {interface}.")
+                    return None
+                else:
+                    time.sleep(1)
+        result = re.search("addr:(.*) Bcast", ip_string)
+        if result != None:
+            ip_address = result.group(1)
+            try:
+                socket.inet_aton(ip_address)
+                return ip_address
+            except socket.error:
+                return None
+        else:
+            return None
+
+    def get_ipv4_gateway(self, timeout=5):
+        for timer in range(0, timeout):
+            try:
+                gateway_string = self.adb.shell("dumpsys wifi | grep mDhcpResults")
+                break
+            except adb.AdbError as e:
+                if timer + 1 == timeout:
+                    self.log.warning("Unable to find gateway")
+                    return None
+                else:
+                    time.sleep(1)
+        result = re.search("Gateway (.*) DNS servers", gateway_string)
+        if result != None:
+            ipv4_gateway = result.group(1)
+            try:
+                socket.inet_aton(ipv4_gateway)
+                return ipv4_gateway
+            except socket.error:
+                return None
+        else:
+            return None
+
+    def send_keycode(self, keycode):
+        self.adb.shell(f"input keyevent KEYCODE_{keycode}")
+
+    def get_my_current_focus_window(self):
+        """Get the current focus window on screen"""
+        output = self.adb.shell(
+            "dumpsys window displays | grep -E mCurrentFocus | grep -v null",
+            ignore_status=True,
+        )
+        if not output or "not found" in output or "Can't find" in output:
+            result = ""
+        else:
+            result = output.split(" ")[-1].strip("}")
+        self.log.debug("Current focus window is %s", result)
+        return result
+
+    def get_my_current_focus_app(self):
+        """Get the current focus application"""
+        dumpsys_cmd = [
+            "dumpsys window | grep -E mFocusedApp",
+            "dumpsys window displays | grep -E mFocusedApp",
+        ]
+        for cmd in dumpsys_cmd:
+            output = self.adb.shell(cmd, ignore_status=True)
+            if (
+                not output
+                or "not found" in output
+                or "Can't find" in output
+                or ("mFocusedApp=null" in output)
+            ):
+                result = ""
+            else:
+                result = output.split(" ")[-2]
+                break
+        self.log.debug("Current focus app is %s", result)
+        return result
+
+    def is_window_ready(self, window_name=None):
+        current_window = self.get_my_current_focus_window()
+        if window_name:
+            return window_name in current_window
+        return current_window and ENCRYPTION_WINDOW not in current_window
+
+    def wait_for_window_ready(
+        self, window_name=None, check_interval=5, check_duration=60
+    ):
+        elapsed_time = 0
+        while elapsed_time < check_duration:
+            if self.is_window_ready(window_name=window_name):
+                return True
+            time.sleep(check_interval)
+            elapsed_time += check_interval
+        self.log.info("Current focus window is %s", self.get_my_current_focus_window())
+        return False
+
+    def is_user_setup_complete(self):
+        return "1" in self.adb.shell("settings get secure user_setup_complete")
+
+    def is_screen_awake(self):
+        """Check if device screen is in sleep mode"""
+        return "Awake" in self.adb.shell("dumpsys power | grep mWakefulness=")
+
+    def is_screen_emergency_dialer(self):
+        """Check if device screen is in emergency dialer mode"""
+        return "EmergencyDialer" in self.get_my_current_focus_window()
+
+    def is_screen_in_call_activity(self):
+        """Check if device screen is in in-call activity notification"""
+        return "InCallActivity" in self.get_my_current_focus_window()
+
+    def is_setupwizard_on(self):
+        """Check if device screen is in emergency dialer mode"""
+        return "setupwizard" in self.get_my_current_focus_app()
+
+    def is_screen_lock_enabled(self):
+        """Check if screen lock is enabled"""
+        cmd = "dumpsys window policy | grep showing="
+        out = self.adb.shell(cmd, ignore_status=True)
+        return "true" in out
+
+    def is_waiting_for_unlock_pin(self):
+        """Check if device is waiting for unlock pin to boot up"""
+        current_window = self.get_my_current_focus_window()
+        current_app = self.get_my_current_focus_app()
+        if ENCRYPTION_WINDOW in current_window:
+            self.log.info("Device is in CrpytKeeper window")
+            return True
+        if "StatusBar" in current_window and (
+            (not current_app) or "FallbackHome" in current_app
+        ):
+            self.log.info("Device is locked")
+            return True
+        return False
+
+    def ensure_screen_on(self):
+        """Ensure device screen is powered on"""
+        if self.is_screen_lock_enabled():
+            for _ in range(2):
+                self.unlock_screen()
+                time.sleep(1)
+                if self.is_waiting_for_unlock_pin():
+                    self.unlock_screen(password=DEFAULT_DEVICE_PASSWORD)
+                    time.sleep(1)
+                if (
+                    not self.is_waiting_for_unlock_pin()
+                    and self.wait_for_window_ready()
+                ):
+                    return True
+            return False
+        else:
+            self.wakeup_screen()
+            return True
+
+    def wakeup_screen(self):
+        if not self.is_screen_awake():
+            self.log.info("Screen is not awake, wake it up")
+            self.send_keycode("WAKEUP")
+
+    def go_to_sleep(self):
+        if self.is_screen_awake():
+            self.send_keycode("SLEEP")
+
+    def send_keycode_number_pad(self, number):
+        self.send_keycode(f"NUMPAD_{number}")
+
+    def unlock_screen(self, password=None):
+        self.log.info("Unlocking with %s", password or "swipe up")
+        # Bring device to SLEEP so that unlock process can start fresh
+        self.send_keycode("SLEEP")
+        time.sleep(1)
+        self.send_keycode("WAKEUP")
+        if ENCRYPTION_WINDOW not in self.get_my_current_focus_app():
+            self.send_keycode("MENU")
+        if password:
+            self.send_keycode("DEL")
+            for number in password:
+                self.send_keycode_number_pad(number)
+            self.send_keycode("ENTER")
+            self.send_keycode("BACK")
+
+    def screenshot(self, name=""):
+        """Take a screenshot on the device.
+
+        Args:
+            name: additional information of screenshot on the file name.
+        """
+        if name:
+            file_name = f"{DEFAULT_SCREENSHOT_PATH}_{name}"
+        file_name = f"{file_name}_{utils.get_current_epoch_time()}.png"
+        self.ensure_screen_on()
+        self.log.info("Log screenshot to %s", file_name)
+        try:
+            self.adb.shell(f"screencap -p {file_name}")
+        except:
+            self.log.error("Fail to log screenshot to %s", file_name)
+
+    def exit_setup_wizard(self):
+        # Handling Android TV's setupwizard is ignored for now.
+        if "feature:android.hardware.type.television" in self.adb.shell(
+            "pm list features"
+        ):
+            return
+        if not self.is_user_setup_complete() or self.is_setupwizard_on():
+            # b/116709539 need this to prevent reboot after skip setup wizard
+            self.adb.shell(
+                "am start -a com.android.setupwizard.EXIT", ignore_status=True
+            )
+            self.adb.shell(
+                f"pm disable {self.get_setupwizard_package_name()}",
+                ignore_status=True,
+            )
+        # Wait up to 5 seconds for user_setup_complete to be updated
+        end_time = time.time() + 5
+        while time.time() < end_time:
+            if self.is_user_setup_complete() or not self.is_setupwizard_on():
+                return
+
+        # If fail to exit setup wizard, set local.prop and reboot
+        if not self.is_user_setup_complete() and self.is_setupwizard_on():
+            self.adb.shell("echo ro.test_harness=1 > /data/local.prop")
+            self.adb.shell("chmod 644 /data/local.prop")
+            self.reboot(stop_at_lock_screen=True)
+
+    def get_setupwizard_package_name(self):
+        """Finds setupwizard package/.activity
+
+        Bypass setupwizard or setupwraith depending on device.
+
+         Returns:
+            packageName/.ActivityName
+        """
+        packages_to_skip = "'setupwizard|setupwraith'"
+        android_package_name = "com.google.android"
+        package = self.adb.shell(
+            "pm list packages -f | grep -E {} | grep {}".format(
+                packages_to_skip, android_package_name
+            )
+        )
+        wizard_package = package.split("=")[1]
+        activity = package.split("=")[0].split("/")[-2]
+        self.log.info(f"{wizard_package}/.{activity}Activity")
+        return f"{wizard_package}/.{activity}Activity"
+
+    def push_system_file(self, src_file_path, dst_file_path, push_timeout=300):
+        """Pushes a file onto the read-only file system.
+
+        For speed, the device is left in root mode after this call, and leaves
+        verity disabled. To re-enable verity, call ensure_verity_enabled().
+
+        Args:
+            src_file_path: The path to the system app to install.
+            dst_file_path: The destination of the file.
+            push_timeout: How long to wait for the push to finish.
+        Returns:
+            Whether or not the install was successful.
+        """
+        self.adb.ensure_root()
+        try:
+            self.ensure_verity_disabled()
+            self.adb.remount()
+            out = self.adb.push(
+                f"{src_file_path} {dst_file_path}", timeout=push_timeout
+            )
+            if "error" in out:
+                self.log.error(
+                    "Unable to push system file %s to %s due to %s",
+                    src_file_path,
+                    dst_file_path,
+                    out,
+                )
+                return False
+            return True
+        except Exception as e:
+            self.log.error(
+                "Unable to push system file %s to %s due to %s",
+                src_file_path,
+                dst_file_path,
+                e,
+            )
+            return False
+
+    def ensure_verity_enabled(self):
+        """Ensures that verity is enabled.
+
+        If verity is not enabled, this call will reboot the phone. Note that
+        this only works on debuggable builds.
+        """
+        user = self.adb.get_user_id()
+        # The below properties will only exist if verity has been enabled.
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
+        if not system_verity or not vendor_verity:
+            self.adb.ensure_root()
+            self.adb.enable_verity()
+            self.reboot()
+            self.adb.ensure_user(user)
+
+    def ensure_verity_disabled(self):
+        """Ensures that verity is disabled.
+
+        If verity is enabled, this call will reboot the phone.
+        """
+        user = self.adb.get_user_id()
+        # The below properties will only exist if verity has been enabled.
+        system_verity = self.adb.getprop("partition.system.verified")
+        vendor_verity = self.adb.getprop("partition.vendor.verified")
+        if system_verity or vendor_verity:
+            self.adb.ensure_root()
+            self.adb.disable_verity()
+            self.reboot()
+            self.adb.ensure_user(user)
+
+
+class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
+    def process(self, msg, kwargs):
+        msg = f"[AndroidDevice|{self.extra['serial']}] {msg}"
+        return (msg, kwargs)
diff --git a/src/antlion/controllers/android_lib/__init__.py b/packages/antlion/controllers/android_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/android_lib/__init__.py
rename to packages/antlion/controllers/android_lib/__init__.py
diff --git a/src/antlion/controllers/android_lib/errors.py b/packages/antlion/controllers/android_lib/errors.py
similarity index 100%
rename from src/antlion/controllers/android_lib/errors.py
rename to packages/antlion/controllers/android_lib/errors.py
diff --git a/src/antlion/controllers/android_lib/events.py b/packages/antlion/controllers/android_lib/events.py
similarity index 100%
rename from src/antlion/controllers/android_lib/events.py
rename to packages/antlion/controllers/android_lib/events.py
diff --git a/packages/antlion/controllers/android_lib/logcat.py b/packages/antlion/controllers/android_lib/logcat.py
new file mode 100644
index 0000000..4aab7d0
--- /dev/null
+++ b/packages/antlion/controllers/android_lib/logcat.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+
+from antlion.libs.logging import log_stream
+from antlion.libs.logging.log_stream import LogStyles
+from antlion.libs.proc.process import Process
+
+TIMESTAMP_REGEX = r"((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)"
+
+
+class TimestampTracker(object):
+    """Stores the last timestamp outputted by the Logcat process."""
+
+    def __init__(self):
+        self._last_timestamp = None
+
+    @property
+    def last_timestamp(self):
+        return self._last_timestamp
+
+    def read_output(self, message):
+        """Reads the message and parses all timestamps from it."""
+        all_timestamps = re.findall(TIMESTAMP_REGEX, message)
+        if len(all_timestamps) > 0:
+            self._last_timestamp = all_timestamps[0]
+
+
+def _get_log_level(message):
+    """Returns the log level for the given message."""
+    if message.startswith("-") or len(message) < 37:
+        return logging.ERROR
+    else:
+        log_level = message[36]
+        if log_level in ("V", "D"):
+            return logging.DEBUG
+        elif log_level == "I":
+            return logging.INFO
+        elif log_level == "W":
+            return logging.WARNING
+        elif log_level == "E":
+            return logging.ERROR
+    return logging.NOTSET
+
+
+def _log_line_func(log, timestamp_tracker):
+    """Returns a lambda that logs a message to the given logger."""
+
+    def log_line(message):
+        timestamp_tracker.read_output(message)
+        log.log(_get_log_level(message), message)
+
+    return log_line
+
+
+def _on_retry(serial, extra_params, timestamp_tracker):
+    def on_retry(_):
+        begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1)
+        additional_params = extra_params or ""
+
+        return f"adb -s {serial} logcat -T {begin_at} -v year {additional_params}"
+
+    return on_retry
+
+
+def create_logcat_keepalive_process(serial, logcat_dir, extra_params=""):
+    """Creates a Logcat Process that automatically attempts to reconnect.
+
+    Args:
+        serial: The serial of the device to read the logcat of.
+        logcat_dir: The directory used for logcat file output.
+        extra_params: Any additional params to be added to the logcat cmdline.
+
+    Returns:
+        A acts.libs.proc.process.Process object.
+    """
+    logger = log_stream.create_logger(
+        f"adblog_{serial}",
+        log_name=serial,
+        subcontext=logcat_dir,
+        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG),
+    )
+    process = Process(f"adb -s {serial} logcat -T 1 -v year {extra_params}")
+    timestamp_tracker = TimestampTracker()
+    process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
+    process.set_on_terminate_callback(
+        _on_retry(serial, extra_params, timestamp_tracker)
+    )
+    return process
diff --git a/packages/antlion/controllers/android_lib/services.py b/packages/antlion/controllers/android_lib/services.py
new file mode 100644
index 0000000..098f524
--- /dev/null
+++ b/packages/antlion/controllers/android_lib/services.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion.controllers.android_lib import errors
+from antlion.controllers.android_lib import events as android_events
+from antlion.event import event_bus
+
+
+class AndroidService(object):
+    """The base class for Android long-running services.
+
+    The _start method is registered to an AndroidStartServicesEvent, and
+    the _stop method is registered to an AndroidStopServicesEvent.
+
+    Attributes:
+        ad: The AndroidDevice instance associated with the service.
+        serial: The serial of the device.
+        _registration_ids: List of registration IDs for the event subscriptions.
+    """
+
+    def __init__(self, ad):
+        self.ad = ad
+        self._registration_ids = []
+
+    @property
+    def serial(self):
+        return self.ad.serial
+
+    def register(self):
+        """Registers the _start and _stop methods to their corresponding
+        events.
+        """
+
+        def check_serial(event):
+            return self.serial == event.ad.serial
+
+        self._registration_ids = [
+            event_bus.register(
+                android_events.AndroidStartServicesEvent,
+                self._start,
+                filter_fn=check_serial,
+            ),
+            event_bus.register(
+                android_events.AndroidStopServicesEvent,
+                self._stop,
+                filter_fn=check_serial,
+            ),
+        ]
+
+    def unregister(self):
+        """Unregisters all subscriptions in this service."""
+        event_bus.unregister_all(from_list=self._registration_ids)
+        self._registration_ids.clear()
+
+    def _start(self, start_event):
+        """Start the service. Called upon an AndroidStartServicesEvent.
+
+        Args:
+            start_event: The AndroidStartServicesEvent instance.
+        """
+        raise NotImplementedError
+
+    def _stop(self, stop_event):
+        """Stop the service. Called upon an AndroidStopServicesEvent.
+
+        Args:
+            stop_event: The AndroidStopServicesEvent instance.
+        """
+        raise NotImplementedError
+
+
+class AdbLogcatService(AndroidService):
+    """Service for adb logcat."""
+
+    def _start(self, _):
+        self.ad.start_adb_logcat()
+
+    def _stop(self, _):
+        self.ad.stop_adb_logcat()
+
+
+class Sl4aService(AndroidService):
+    """Service for SL4A."""
+
+    def _start(self, start_event):
+        if self.ad.skip_sl4a:
+            return
+
+        if not self.ad.is_sl4a_installed():
+            self.ad.log.error("sl4a.apk is not installed")
+            raise errors.AndroidDeviceError(
+                "The required sl4a.apk is not installed", serial=self.serial
+            )
+        if not self.ad.ensure_screen_on():
+            self.ad.log.error("User window cannot come up")
+            raise errors.AndroidDeviceError(
+                "User window cannot come up", serial=self.serial
+            )
+
+        droid, ed = self.ad.get_droid()
+        ed.start()
+
+    def _stop(self, _):
+        self.ad.terminate_all_sessions()
+        self.ad._sl4a_manager.stop_service()
diff --git a/src/antlion/controllers/ap_lib/__init__.py b/packages/antlion/controllers/ap_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/ap_lib/__init__.py
rename to packages/antlion/controllers/ap_lib/__init__.py
diff --git a/packages/antlion/controllers/ap_lib/ap_get_interface.py b/packages/antlion/controllers/ap_lib/ap_get_interface.py
new file mode 100644
index 0000000..7836644
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/ap_get_interface.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import TYPE_CHECKING
+
+from antlion.libs.proc import job
+
+if TYPE_CHECKING:
+    from antlion.controllers.access_point import AccessPoint
+
+GET_ALL_INTERFACE = "ls /sys/class/net"
+GET_VIRTUAL_INTERFACE = "ls /sys/devices/virtual/net"
+BRCTL_SHOW = "brctl show"
+
+
+class ApInterfacesError(Exception):
+    """Error related to AP interfaces."""
+
+
+class ApInterfaces(object):
+    """Class to get network interface information for the device."""
+
+    def __init__(
+        self, ap: "AccessPoint", wan_interface_override: str | None = None
+    ) -> None:
+        """Initialize the ApInterface class.
+
+        Args:
+            ap: the ap object within ACTS
+            wan_interface_override: wan interface to use if specified by config
+        """
+        self.ssh = ap.ssh
+        self.wan_interface_override = wan_interface_override
+
+    def get_all_interface(self) -> list[str]:
+        """Get all network interfaces on the device.
+
+        Returns:
+            interfaces_all: list of all the network interfaces on device
+        """
+        output = self.ssh.run(GET_ALL_INTERFACE)
+        interfaces_all = output.stdout.split("\n")
+
+        return interfaces_all
+
+    def get_virtual_interface(self) -> list[str]:
+        """Get all virtual interfaces on the device.
+
+        Returns:
+            interfaces_virtual: list of all the virtual interfaces on device
+        """
+        output = self.ssh.run(GET_VIRTUAL_INTERFACE)
+        interfaces_virtual = output.stdout.split("\n")
+
+        return interfaces_virtual
+
+    def get_physical_interface(self) -> list[str]:
+        """Get all the physical interfaces of the device.
+
+        Get all physical interfaces such as eth ports and wlan ports
+
+        Returns:
+            interfaces_phy: list of all the physical interfaces
+        """
+        interfaces_all = self.get_all_interface()
+        interfaces_virtual = self.get_virtual_interface()
+        interfaces_phy = list(set(interfaces_all) - set(interfaces_virtual))
+
+        return interfaces_phy
+
+    def get_bridge_interface(self) -> list[str]:
+        """Get all the bridge interfaces of the device.
+
+        Returns:
+            interfaces_bridge: the list of bridge interfaces, return None if
+                bridge utility is not available on the device
+
+        Raises:
+            ApInterfaceError: Failing to run brctl
+        """
+        try:
+            output = self.ssh.run(BRCTL_SHOW)
+        except job.Error as e:
+            raise ApInterfacesError(f'failed to execute "{BRCTL_SHOW}"') from e
+
+        lines = output.stdout.split("\n")
+        interfaces_bridge = []
+        for line in lines:
+            interfaces_bridge.append(line.split("\t")[0])
+        interfaces_bridge.pop(0)
+        return [x for x in interfaces_bridge if x != ""]
+
+    def get_wlan_interface(self) -> tuple[str, str]:
+        """Get all WLAN interfaces and specify 2.4 GHz and 5 GHz interfaces.
+
+        Returns:
+            interfaces_wlan: all wlan interfaces
+        Raises:
+            ApInterfacesError: Missing at least one WLAN interface
+        """
+        wlan_2g = None
+        wlan_5g = None
+        interfaces_phy = self.get_physical_interface()
+        for iface in interfaces_phy:
+            output = self.ssh.run(f"iwlist {iface} freq")
+            if "Channel 06" in output.stdout and "Channel 36" not in output.stdout:
+                wlan_2g = iface
+            elif "Channel 36" in output.stdout and "Channel 06" not in output.stdout:
+                wlan_5g = iface
+
+        if wlan_2g is None or wlan_5g is None:
+            raise ApInterfacesError("Missing at least one WLAN interface")
+
+        return (wlan_2g, wlan_5g)
+
+    def get_wan_interface(self) -> str:
+        """Get the WAN interface which has internet connectivity. If a wan
+        interface is already specified return that instead.
+
+        Returns:
+            wan: the only one WAN interface
+        Raises:
+            ApInterfacesError: no running WAN can be found
+        """
+        if self.wan_interface_override:
+            return self.wan_interface_override
+
+        wan = None
+        interfaces_phy = self.get_physical_interface()
+        interfaces_wlan = self.get_wlan_interface()
+        interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan))
+        for iface in interfaces_eth:
+            network_status = self.check_ping(iface)
+            if network_status == 1:
+                wan = iface
+                break
+        if wan:
+            return wan
+
+        output = self.ssh.run("ifconfig")
+        interfaces_all = output.stdout.split("\n")
+        logging.info(f"IFCONFIG output = {interfaces_all}")
+
+        raise ApInterfacesError("No WAN interface available")
+
+    def get_lan_interface(self) -> str | None:
+        """Get the LAN interface connecting to local devices.
+
+        Returns:
+            lan: the only one running LAN interface of the devices
+            None, if nothing was found.
+        """
+        lan = None
+        interfaces_phy = self.get_physical_interface()
+        interfaces_wlan = self.get_wlan_interface()
+        interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan))
+        interface_wan = self.get_wan_interface()
+        interfaces_eth.remove(interface_wan)
+        for iface in interfaces_eth:
+            output = self.ssh.run(f"ifconfig {iface}")
+            if "RUNNING" in output.stdout:
+                lan = iface
+                break
+        return lan
+
+    def check_ping(self, iface: str) -> int:
+        """Check the ping status on specific interface to determine the WAN.
+
+        Args:
+            iface: the specific interface to check
+        Returns:
+            network_status: the connectivity status of the interface
+        """
+        try:
+            self.ssh.run(f"ping -c 3 -I {iface} 8.8.8.8")
+            return 1
+        except job.Error:
+            return 0
diff --git a/packages/antlion/controllers/ap_lib/ap_iwconfig.py b/packages/antlion/controllers/ap_lib/ap_iwconfig.py
new file mode 100644
index 0000000..01346b9
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/ap_iwconfig.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import TYPE_CHECKING
+
+from antlion.runner import CompletedProcess
+
+if TYPE_CHECKING:
+    from antlion.controllers.access_point import AccessPoint
+
+
+class ApIwconfigError(Exception):
+    """Error related to configuring the wireless interface via iwconfig."""
+
+
+class ApIwconfig(object):
+    """Class to configure wireless interface via iwconfig"""
+
+    PROGRAM_FILE = "/usr/local/sbin/iwconfig"
+
+    def __init__(self, ap: "AccessPoint") -> None:
+        """Initialize the ApIwconfig class.
+
+        Args:
+            ap: the ap object within ACTS
+        """
+        self.ssh = ap.ssh
+
+    def ap_iwconfig(
+        self, interface: str, arguments: str | None = None
+    ) -> CompletedProcess:
+        """Configure the wireless interface using iwconfig.
+
+        Returns:
+            output: the output of the command, if any
+        """
+        return self.ssh.run(f"{self.PROGRAM_FILE} {interface} {arguments}")
diff --git a/packages/antlion/controllers/ap_lib/bridge_interface.py b/packages/antlion/controllers/ap_lib/bridge_interface.py
new file mode 100644
index 0000000..b060267
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/bridge_interface.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+
+from antlion.libs.proc import job
+
+_BRCTL = "brctl"
+BRIDGE_NAME = "br-lan"
+CREATE_BRIDGE = f"{_BRCTL} addbr {BRIDGE_NAME}"
+DELETE_BRIDGE = f"{_BRCTL} delbr {BRIDGE_NAME}"
+BRING_DOWN_BRIDGE = f"ifconfig {BRIDGE_NAME} down"
+
+
+class BridgeInterfaceConfigs(object):
+    """Configs needed for creating bridge interface between LAN and WLAN."""
+
+    def __init__(self, iface_wlan, iface_lan, bridge_ip):
+        """Set bridge interface configs based on the channel info.
+
+        Args:
+            iface_wlan: the wlan interface as part of the bridge
+            iface_lan: the ethernet LAN interface as part of the bridge
+            bridge_ip: the ip address assigned to the bridge interface
+        """
+        self.iface_wlan = iface_wlan
+        self.iface_lan = iface_lan
+        self.bridge_ip = bridge_ip
+
+
+class BridgeInterface(object):
+    """Class object for bridge interface betwen WLAN and LAN"""
+
+    def __init__(self, ap):
+        """Initialize the BridgeInterface class.
+
+        Bridge interface will be added between ethernet LAN port and WLAN port.
+        Args:
+            ap: AP object within ACTS
+        """
+        self.ssh = ap.ssh
+
+    def startup(self, brconfigs):
+        """Start up the bridge interface.
+
+        Args:
+            brconfigs: the bridge interface config, type BridgeInterfaceConfigs
+        """
+
+        logging.info("Create bridge interface between LAN and WLAN")
+        # Create the bridge
+        try:
+            self.ssh.run(CREATE_BRIDGE)
+        except job.Error:
+            logging.warning(
+                f"Bridge interface {BRIDGE_NAME} already exists, no action needed"
+            )
+
+        # Enable 4addr mode on for the wlan interface
+        ENABLE_4ADDR = f"iw dev {brconfigs.iface_wlan} set 4addr on"
+        try:
+            self.ssh.run(ENABLE_4ADDR)
+        except job.Error:
+            logging.warning(f"4addr is already enabled on {brconfigs.iface_wlan}")
+
+        # Add both LAN and WLAN interfaces to the bridge interface
+        for interface in [brconfigs.iface_lan, brconfigs.iface_wlan]:
+            ADD_INTERFACE = f"{_BRCTL} addif {BRIDGE_NAME} {interface}"
+            try:
+                self.ssh.run(ADD_INTERFACE)
+            except job.Error:
+                logging.warning(f"{interface} has already been added to {BRIDGE_NAME}")
+        time.sleep(5)
+
+        # Set IP address on the bridge interface to bring it up
+        SET_BRIDGE_IP = f"ifconfig {BRIDGE_NAME} {brconfigs.bridge_ip}"
+        self.ssh.run(SET_BRIDGE_IP)
+        time.sleep(2)
+
+        # Bridge interface is up
+        logging.info("Bridge interface is up and running")
+
+    def teardown(self, brconfigs):
+        """Tear down the bridge interface.
+
+        Args:
+            brconfigs: the bridge interface config, type BridgeInterfaceConfigs
+        """
+        logging.info("Bringing down the bridge interface")
+        # Delete the bridge interface
+        self.ssh.run(BRING_DOWN_BRIDGE)
+        time.sleep(1)
+        self.ssh.run(DELETE_BRIDGE)
+
+        # Bring down wlan interface and disable 4addr mode
+        BRING_DOWN_WLAN = f"ifconfig {brconfigs.iface_wlan} down"
+        self.ssh.run(BRING_DOWN_WLAN)
+        time.sleep(2)
+        DISABLE_4ADDR = f"iw dev {brconfigs.iface_wlan} set 4addr off"
+        self.ssh.run(DISABLE_4ADDR)
+        time.sleep(1)
+        logging.info("Bridge interface is down")
diff --git a/packages/antlion/controllers/ap_lib/dhcp_config.py b/packages/antlion/controllers/ap_lib/dhcp_config.py
new file mode 100644
index 0000000..5fa8cf0
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/dhcp_config.py
@@ -0,0 +1,205 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+from ipaddress import IPv4Address, IPv4Network
+
+_ROUTER_DNS = "8.8.8.8, 4.4.4.4"
+
+
+class Subnet(object):
+    """Configs for a subnet  on the dhcp server.
+
+    Attributes:
+        network: ipaddress.IPv4Network, the network that this subnet is in.
+        start: ipaddress.IPv4Address, the start ip address.
+        end: ipaddress.IPv4Address, the end ip address.
+        router: The router to give to all hosts in this subnet.
+        lease_time: The lease time of all hosts in this subnet.
+        additional_parameters: A dictionary corresponding to DHCP parameters.
+        additional_options: A dictionary corresponding to DHCP options.
+    """
+
+    def __init__(
+        self,
+        subnet: IPv4Network,
+        start: IPv4Address | None = None,
+        end: IPv4Address | None = None,
+        router: IPv4Address | None = None,
+        lease_time: int | None = None,
+        additional_parameters: dict[str, str] = {},
+        additional_options: dict[str, int | str] = {},
+    ):
+        """
+        Args:
+            subnet: ipaddress.IPv4Network, The address space of the subnetwork
+                    served by the DHCP server.
+            start: ipaddress.IPv4Address, The start of the address range to
+                   give hosts in this subnet. If not given, the second ip in
+                   the network is used, under the assumption that the first
+                   address is the router.
+            end: ipaddress.IPv4Address, The end of the address range to give
+                 hosts. If not given then the address prior to the broadcast
+                 address (i.e. the second to last ip in the network) is used.
+            router: ipaddress.IPv4Address, The router hosts should use in this
+                    subnet. If not given the first ip in the network is used.
+            lease_time: int, The amount of lease time in seconds
+                        hosts in this subnet have.
+            additional_parameters: A dictionary corresponding to DHCP parameters.
+            additional_options: A dictionary corresponding to DHCP options.
+        """
+        self.network = subnet
+
+        if start:
+            self.start = start
+        else:
+            self.start = self.network[2]
+
+        if not self.start in self.network:
+            raise ValueError("The start range is not in the subnet.")
+        if self.start.is_reserved:
+            raise ValueError("The start of the range cannot be reserved.")
+
+        if end:
+            self.end = end
+        else:
+            self.end = self.network[-2]
+
+        if not self.end in self.network:
+            raise ValueError("The end range is not in the subnet.")
+        if self.end.is_reserved:
+            raise ValueError("The end of the range cannot be reserved.")
+        if self.end < self.start:
+            raise ValueError("The end must be an address larger than the start.")
+
+        if router:
+            if router >= self.start and router <= self.end:
+                raise ValueError("Router must not be in pool range.")
+            if not router in self.network:
+                raise ValueError("Router must be in the given subnet.")
+
+            self.router = router
+        else:
+            # TODO: Use some more clever logic so that we don't have to search
+            # every host potentially.
+            # This is especially important if we support IPv6 networks in this
+            # configuration. The improved logic that we can use is:
+            #    a) erroring out if start and end encompass the whole network, and
+            #    b) picking any address before self.start or after self.end.
+            for host in self.network.hosts():
+                if host < self.start or host > self.end:
+                    self.router = host
+                    break
+
+            if not hasattr(self, "router"):
+                raise ValueError("No useable host found.")
+
+        self.lease_time = lease_time
+        self.additional_parameters = additional_parameters
+        self.additional_options = additional_options
+        if "domain-name-servers" not in self.additional_options:
+            self.additional_options["domain-name-servers"] = _ROUTER_DNS
+
+
+class StaticMapping(object):
+    """Represents a static dhcp host.
+
+    Attributes:
+        identifier: How id of the host (usually the mac addres
+                    e.g. 00:11:22:33:44:55).
+        address: ipaddress.IPv4Address, The ipv4 address to give the host.
+        lease_time: How long to give a lease to this host.
+    """
+
+    def __init__(self, identifier, address, lease_time=None):
+        self.identifier = identifier
+        self.ipv4_address = address
+        self.lease_time = lease_time
+
+
+class DhcpConfig(object):
+    """The configs for a dhcp server.
+
+    Attributes:
+        subnets: A list of all subnets for the dhcp server to create.
+        static_mappings: A list of static host addresses.
+        default_lease_time: The default time for a lease.
+        max_lease_time: The max time to allow a lease.
+    """
+
+    def __init__(
+        self,
+        subnets=None,
+        static_mappings=None,
+        default_lease_time=600,
+        max_lease_time=7200,
+    ):
+        self.subnets = copy.deepcopy(subnets) if subnets else []
+        self.static_mappings = copy.deepcopy(static_mappings) if static_mappings else []
+        self.default_lease_time = default_lease_time
+        self.max_lease_time = max_lease_time
+
+    def render_config_file(self):
+        """Renders the config parameters into a format compatible with
+        the ISC DHCP server (dhcpd).
+        """
+        lines = []
+
+        if self.default_lease_time:
+            lines.append(f"default-lease-time {self.default_lease_time};")
+        if self.max_lease_time:
+            lines.append(f"max-lease-time {self.max_lease_time};")
+
+        for subnet in self.subnets:
+            address = subnet.network.network_address
+            mask = subnet.network.netmask
+            router = subnet.router
+            start = subnet.start
+            end = subnet.end
+            lease_time = subnet.lease_time
+            additional_parameters = subnet.additional_parameters
+            additional_options = subnet.additional_options
+
+            lines.append("subnet %s netmask %s {" % (address, mask))
+            lines.append("\tpool {")
+            lines.append(f"\t\toption subnet-mask {mask};")
+            lines.append(f"\t\toption routers {router};")
+            lines.append(f"\t\trange {start} {end};")
+            if lease_time:
+                lines.append(f"\t\tdefault-lease-time {lease_time};")
+                lines.append(f"\t\tmax-lease-time {lease_time};")
+            for param, value in additional_parameters.items():
+                lines.append(f"\t\t{param} {value};")
+            for option, value in additional_options.items():
+                lines.append(f"\t\toption {option} {value};")
+            lines.append("\t}")
+            lines.append("}")
+
+        for mapping in self.static_mappings:
+            identifier = mapping.identifier
+            fixed_address = mapping.ipv4_address
+            host_fake_name = f"host{identifier.replace(':', '')}"
+            lease_time = mapping.lease_time
+
+            lines.append("host %s {" % host_fake_name)
+            lines.append(f"\thardware ethernet {identifier};")
+            lines.append(f"\tfixed-address {fixed_address};")
+            if lease_time:
+                lines.append(f"\tdefault-lease-time {lease_time};")
+                lines.append(f"\tmax-lease-time {lease_time};")
+            lines.append("}")
+
+        config_str = "\n".join(lines)
+
+        return config_str
diff --git a/packages/antlion/controllers/ap_lib/dhcp_server.py b/packages/antlion/controllers/ap_lib/dhcp_server.py
new file mode 100644
index 0000000..5bdabbc
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/dhcp_server.py
@@ -0,0 +1,208 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+
+from mobly import logger
+from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
+
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig
+from antlion.controllers.utils_lib.commands import shell
+
+
+class Error(Exception):
+    """An error caused by the dhcp server."""
+
+
+class NoInterfaceError(Exception):
+    """Error thrown when the dhcp server has no interfaces on any subnet."""
+
+
+class DhcpServer(object):
+    """Manages the dhcp server program.
+
+    Only one of these can run in an environment at a time.
+
+    Attributes:
+        config: The dhcp server configuration that is being used.
+    """
+
+    PROGRAM_FILE = "dhcpd"
+
+    def __init__(self, runner, interface, working_dir="/tmp"):
+        """
+        Args:
+            runner: Object that has a run_async and run methods for running
+                    shell commands.
+            interface: string, The name of the interface to use.
+            working_dir: The directory to work out of.
+        """
+        self._log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[DHCP Server|{interface}]",
+            },
+        )
+
+        self._runner = runner
+        self._working_dir = working_dir
+        self._shell = shell.ShellCommand(runner, working_dir)
+        self._stdio_log_file = f"dhcpd_{interface}.log"
+        self._config_file = f"dhcpd_{interface}.conf"
+        self._lease_file = f"dhcpd_{interface}.leases"
+        self._pid_file = f"dhcpd_{interface}.pid"
+        self._identifier = f"{self.PROGRAM_FILE}.*{self._config_file}"
+
+    # There is a slight timing issue where if the proc filesystem in Linux
+    # doesn't get updated in time as when this is called, the NoInterfaceError
+    # will happening.  By adding this retry, the error appears to have gone away
+    # but will still show a warning if the problem occurs.  The error seems to
+    # happen more with bridge interfaces than standard interfaces.
+    @retry(
+        retry=retry_if_exception_type(NoInterfaceError),
+        stop=stop_after_attempt(3),
+        wait=wait_fixed(1),
+    )
+    def start(self, config: DhcpConfig, timeout_sec: int = 60) -> None:
+        """Starts the dhcp server.
+
+        Starts the dhcp server daemon and runs it in the background.
+
+        Args:
+            config: Configs to start the dhcp server with.
+
+        Raises:
+            Error: Raised when a dhcp server error is found.
+        """
+        if self.is_alive():
+            self.stop()
+
+        self._write_configs(config)
+        self._shell.delete_file(self._stdio_log_file)
+        self._shell.delete_file(self._pid_file)
+        self._shell.touch_file(self._lease_file)
+
+        dhcpd_command = (
+            f"{self.PROGRAM_FILE} "
+            f'-cf "{self._config_file}" '
+            f"-lf {self._lease_file} "
+            f'-pf "{self._pid_file}" '
+            "-f -d"
+        )
+
+        base_command = f'cd "{self._working_dir}"; {dhcpd_command}'
+        job_str = f'{base_command} > "{self._stdio_log_file}" 2>&1'
+        self._runner.run_async(job_str)
+
+        try:
+            self._wait_for_process(timeout=timeout_sec)
+            self._wait_for_server(timeout=timeout_sec)
+        except:
+            self._log.warn("Failed to start DHCP server.")
+            self._log.info(f"DHCP configuration:\n{config.render_config_file()}\n")
+            self._log.info(f"DHCP logs:\n{self.get_logs()}\n")
+            self.stop()
+            raise
+
+    def stop(self):
+        """Kills the daemon if it is running."""
+        if self.is_alive():
+            self._shell.kill(self._identifier)
+
+    def is_alive(self):
+        """
+        Returns:
+            True if the daemon is running.
+        """
+        return self._shell.is_alive(self._identifier)
+
+    def get_logs(self) -> str:
+        """Pulls the log files from where dhcp server is running.
+
+        Returns:
+            A string of the dhcp server logs.
+        """
+        return self._shell.read_file(self._stdio_log_file)
+
+    def _wait_for_process(self, timeout=60):
+        """Waits for the process to come up.
+
+        Waits until the dhcp server process is found running, or there is
+        a timeout. If the program never comes up then the log file
+        will be scanned for errors.
+
+        Raises: See _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout and not self.is_alive():
+            self._scan_for_errors(False)
+            time.sleep(0.1)
+
+        self._scan_for_errors(True)
+
+    def _wait_for_server(self, timeout=60):
+        """Waits for dhcp server to report that the server is up.
+
+        Waits until dhcp server says the server has been brought up or an
+        error occurs.
+
+        Raises: see _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout:
+            success = self._shell.search_file(
+                "Wrote [0-9]* leases to leases file", self._stdio_log_file
+            )
+            if success:
+                return
+
+            self._scan_for_errors(True)
+
+    def _scan_for_errors(self, should_be_up):
+        """Scans the dhcp server log for any errors.
+
+        Args:
+            should_be_up: If true then dhcp server is expected to be alive.
+                          If it is found not alive while this is true an error
+                          is thrown.
+
+        Raises:
+            Error: Raised when a dhcp server error is found.
+        """
+        # If this is checked last we can run into a race condition where while
+        # scanning the log the process has not died, but after scanning it
+        # has. If this were checked last in that condition then the wrong
+        # error will be thrown. To prevent this we gather the alive state first
+        # so that if it is dead it will definitely give the right error before
+        # just giving a generic one.
+        is_dead = not self.is_alive()
+
+        no_interface = self._shell.search_file(
+            "Not configured to listen on any interfaces", self._stdio_log_file
+        )
+        if no_interface:
+            raise NoInterfaceError(
+                "Dhcp does not contain a subnet for any of the networks the"
+                " current interfaces are on."
+            )
+
+        if should_be_up and is_dead:
+            raise Error("Dhcp server failed to start.", self)
+
+    def _write_configs(self, config):
+        """Writes the configs to the dhcp server config file."""
+        self._shell.delete_file(self._config_file)
+        config_str = config.render_config_file()
+        self._shell.write_file(self._config_file, config_str)
diff --git a/packages/antlion/controllers/ap_lib/extended_capabilities.py b/packages/antlion/controllers/ap_lib/extended_capabilities.py
new file mode 100644
index 0000000..4570409
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/extended_capabilities.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import IntEnum, unique
+
+
+@unique
+class ExtendedCapability(IntEnum):
+    """All extended capabilities present in IEEE 802.11-2020 Table 9-153.
+
+    Each name has a value corresponding to that extended capability's bit offset
+    in the specification's extended capabilities field.
+
+    Note that most extended capabilities are represented by a single bit, which
+    indicates whether the extended capability is advertised by the STA; but
+    some are represented by multiple bits. In the enum, each extended capability
+    has the value of its offset; comments indicate capabilities that use
+    multiple bits.
+    """
+
+    TWENTY_FORTY_BSS_COEXISTENCE_MANAGEMENT_SUPPORT = 0
+    GLK = 1
+    EXTENDED_CHANNEL_SWITCHING = 2
+    GLK_GCR = 3
+    PSMP_CAPABILITY = 4
+    # 5 reserved
+    S_PSMP_SUPPORT = 6
+    EVENT = 7
+    DIAGNOSTICS = 8
+    MULTICAST_DIAGNOSTICS = 9
+    LOCATION_TRACKING = 10
+    FMS = 11
+    PROXY_ARP_SERVICE = 12
+    COLLOCATED_INTERFERENCE_REPORTING = 13
+    CIVIC_LOCATION = 14
+    GEOSPATIAL_LOCATION = 15
+    TFS = 16
+    WNM_SLEEP_MODE = 17
+    TIM_BROADCAST = 18
+    BSS_TRANSITION = 19
+    QOS_TRAFFIC_CAPABILITY = 20
+    AC_STATION_COUNT = 21
+    MULTIPLE_BSSID = 22
+    TIMING_MEASUREMENT = 23
+    CHANNEL_USAGE = 24
+    SSID_LIST = 25
+    DMS = 26
+    UTC_TSF_OFFSET = 27
+    TPU_BUFFER_STA_SUPPORT = 28
+    TDLS_PEER_PSM_SUPPORT = 29
+    TDLS_CHANNEL_SWITCHING = 30
+    INTERWORKING = 31
+    QOS_MAP = 32
+    EBR = 33
+    SSPN_INTERFACE = 34
+    # 35 reserved
+    MSGCF_CAPABILITY = 36
+    TDLS_SUPPORT = 37
+    TDLS_PROHIBITED = 38
+    TDLS_CHANNEL_SWITCHING_PROHIBITED = 39
+    REJECT_UNADMITTED_FRAME = 40
+    SERVICE_INTERVAL_GRANULARITY = 41
+    # Bits 41-43 contain SERVICE_INTERVAL_GRANULARITY value
+    IDENTIFIER_LOCATION = 44
+    U_APSD_COEXISTENCE = 45
+    WNM_NOTIFICATION = 46
+    QAB_CAPABILITY = 47
+    UTF_8_SSID = 48
+    QMF_ACTIVATED = 49
+    QMF_RECONFIGURATION_ACTIVATED = 50
+    ROBUST_AV_STREAMING = 51
+    ADVANCED_GCR = 52
+    MESH_GCR = 53
+    SCS = 54
+    QLOAD_REPORT = 55
+    ALTERNATE_EDCA = 56
+    UNPROTECTED_TXOP_NEGOTIATION = 57
+    PROTECTED_TXOP_NEGOTIATION = 58
+    # 59 reserved
+    PROTECTED_QLOAD_REPORT = 60
+    TDLS_WIDER_BANDWIDTH = 61
+    OPERATING_MODE_NOTIFICATION = 62
+    MAX_NUMBER_OF_MSDUS_IN_A_MSDU = 63
+    # 63-64 contain MAX_NUMBER_OF_MSDUS_IN_A_MSDU value
+    CHANNEL_SCHEDULE_MANAGEMENT = 65
+    GEODATABASE_INBAND_ENABLING_SIGNAL = 66
+    NETWORK_CHANNEL_CONTROL = 67
+    WHITE_SPACE_MAP = 68
+    CHANNEL_AVAILABILITY_QUERY = 69
+    FINE_TIMING_MEASUREMENT_RESPONDER = 70
+    FINE_TIMING_MEASUREMENT_INITIATOR = 71
+    FILS_CAPABILITY = 72
+    EXTENDED_SPECTRUM_MANAGEMENT_CAPABLE = 73
+    FUTURE_CHANNEL_GUIDANCE = 74
+    PAD = 75
+    # 76-79 reserved
+    COMPLETE_LIST_OF_NON_TX_BSSID_PROFILES = 80
+    SAE_PASSWORD_IDENTIFIERS_IN_USE = 81
+    SAE_PASSWORD_IDENTIFIERS_USED_EXCLUSIVELY = 82
+    # 83 reserved
+    BEACON_PROTECTION_ENABLED = 84
+    MIRRORED_SCS = 85
+    # 86 reserved
+    LOCAL_MAC_ADDRESS_POLICY = 87
+    # 88-n reserved
+
+
+def _offsets(ext_cap_offset: ExtendedCapability) -> tuple[int, int]:
+    """For given capability, return the byte and bit offsets within the field.
+
+    802.11 divides the extended capability field into bytes, as does the
+    ExtendedCapabilities class below. This function returns the index of the
+    byte that contains the given extended capability, as well as the bit offset
+    inside that byte (all offsets zero-indexed). For example,
+    MULTICAST_DIAGNOSTICS is bit 9, which is within byte 1 at bit offset 1.
+    """
+    byte_offset = ext_cap_offset // 8
+    bit_offset = ext_cap_offset % 8
+    return byte_offset, bit_offset
+
+
+class ExtendedCapabilities:
+    """Extended capability parsing and representation.
+
+    See IEEE 802.11-2020 9.4.2.26.
+    """
+
+    def __init__(self, ext_cap: bytearray = bytearray()):
+        """Represent the given extended capabilities field.
+
+        Args:
+            ext_cap: IEEE 802.11-2020 9.4.2.26 extended capabilities field.
+            Default is an empty field, meaning no extended capabilities are
+            advertised.
+        """
+        self._ext_cap = ext_cap
+
+    def _capability_advertised(self, ext_cap: ExtendedCapability) -> bool:
+        """Whether an extended capability is advertised.
+
+        Args:
+            ext_cap: an extended capability.
+        Returns:
+            True if the bit is present and its value is 1, otherwise False.
+        Raises:
+            NotImplementedError: for extended capabilities that span more than
+            a single bit. These could be supported, but no callers need them
+            at this time.
+        """
+        if ext_cap in [
+            ExtendedCapability.SERVICE_INTERVAL_GRANULARITY,
+            ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU,
+        ]:
+            raise NotImplementedError(
+                f"{ext_cap.name} not implemented yet by {self.__class__}"
+            )
+        byte_offset, bit_offset = _offsets(ext_cap)
+        if len(self._ext_cap) > byte_offset:
+            # Use bit_offset to derive a mask that will check the correct bit.
+            if self._ext_cap[byte_offset] & 2**bit_offset > 0:
+                return True
+        return False
+
+    @property
+    def bss_transition(self) -> bool:
+        return self._capability_advertised(ExtendedCapability.BSS_TRANSITION)
+
+    @property
+    def proxy_arp_service(self) -> bool:
+        return self._capability_advertised(ExtendedCapability.PROXY_ARP_SERVICE)
+
+    @property
+    def utc_tsf_offset(self) -> bool:
+        return self._capability_advertised(ExtendedCapability.UTC_TSF_OFFSET)
+
+    @property
+    def wnm_sleep_mode(self) -> bool:
+        return self._capability_advertised(ExtendedCapability.WNM_SLEEP_MODE)
+
+    # Other extended capability property methods can be added as needed by callers.
diff --git a/packages/antlion/controllers/ap_lib/hostapd.py b/packages/antlion/controllers/ap_lib/hostapd.py
new file mode 100644
index 0000000..c022b1e
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd.py
@@ -0,0 +1,443 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import itertools
+import logging
+import re
+import time
+from datetime import datetime, timezone
+from subprocess import CalledProcessError
+from typing import Any, Iterable
+
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
+from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.controllers.utils_lib.commands import shell
+from antlion.logger import LogLevel
+from antlion.runner import Runner
+
+PROGRAM_FILE = "/usr/sbin/hostapd"
+CLI_PROGRAM_FILE = "/usr/bin/hostapd_cli"
+
+
+class Error(Exception):
+    """An error caused by hostapd."""
+
+
+class Hostapd(object):
+    """Manages the hostapd program.
+
+    Attributes:
+        config: The hostapd configuration that is being used.
+    """
+
+    def __init__(
+        self, runner: Runner, interface: str, working_dir: str = "/tmp"
+    ) -> None:
+        """
+        Args:
+            runner: Object that has run_async and run methods for executing
+                    shell commands (e.g. connection.SshConnection)
+            interface: The name of the interface to use (eg. wlan0).
+            working_dir: The directory to work out of.
+        """
+        self._runner = runner
+        self._interface = interface
+        self._working_dir = working_dir
+        self.config: HostapdConfig | None = None
+        self._shell = shell.ShellCommand(runner, working_dir)
+        self._log_file = f"hostapd-{self._interface}.log"
+        self._ctrl_file = f"hostapd-{self._interface}.ctrl"
+        self._config_file = f"hostapd-{self._interface}.conf"
+        self._identifier = f"{PROGRAM_FILE}.*{self._config_file}"
+
+    def start(
+        self,
+        config: HostapdConfig,
+        timeout: int = 60,
+        additional_parameters: dict[str, Any] | None = None,
+    ) -> None:
+        """Starts hostapd
+
+        Starts the hostapd daemon and runs it in the background.
+
+        Args:
+            config: Configs to start the hostapd with.
+            timeout: Time to wait for DHCP server to come up.
+            additional_parameters: A dictionary of parameters that can sent
+                                   directly into the hostapd config file.  This
+                                   can be used for debugging and or adding one
+                                   off parameters into the config.
+
+        Returns:
+            True if the daemon could be started. Note that the daemon can still
+            start and not work. Invalid configurations can take a long amount
+            of time to be produced, and because the daemon runs indefinitely
+            it's impossible to wait on. If you need to check if configs are ok
+            then periodic checks to is_running and logs should be used.
+        """
+        if additional_parameters is None:
+            additional_parameters = {}
+
+        if self.is_alive():
+            self.stop()
+
+        self.config = config
+
+        self._shell.delete_file(self._ctrl_file)
+        self._shell.delete_file(self._log_file)
+        self._shell.delete_file(self._config_file)
+        self._write_configs(additional_parameters)
+
+        hostapd_command = f'{PROGRAM_FILE} -dd -t "{self._config_file}"'
+        base_command = f'cd "{self._working_dir}"; {hostapd_command}'
+        job_str = f'rfkill unblock all; {base_command} > "{self._log_file}" 2>&1'
+        self._runner.run_async(job_str)
+
+        try:
+            self._wait_for_process(timeout=timeout)
+            self._wait_for_interface(timeout=timeout)
+        except:
+            self.stop()
+            raise
+
+    def stop(self) -> None:
+        """Kills the daemon if it is running."""
+        if self.is_alive():
+            self._shell.kill(self._identifier)
+
+    def channel_switch(self, channel_num: int) -> None:
+        """Switches to the given channel.
+
+        Returns:
+            acts.libs.proc.job.Result containing the results of the command.
+        Raises: See _run_hostapd_cli_cmd
+        """
+        try:
+            channel_freq = hostapd_constants.FREQUENCY_MAP[channel_num]
+        except KeyError:
+            raise ValueError(f"Invalid channel number {channel_num}")
+        csa_beacon_count = 10
+        channel_switch_cmd = f"chan_switch {csa_beacon_count} {channel_freq}"
+        self._run_hostapd_cli_cmd(channel_switch_cmd)
+
+    def get_current_channel(self) -> int:
+        """Returns the current channel number.
+
+        Raises: See _run_hostapd_cli_cmd
+        """
+        status_cmd = "status"
+        result = self._run_hostapd_cli_cmd(status_cmd)
+        match = re.search(r"^channel=(\d+)$", result, re.MULTILINE)
+        if not match:
+            raise Error("Current channel could not be determined")
+        try:
+            channel = int(match.group(1))
+        except ValueError:
+            raise Error("Internal error: current channel could not be parsed")
+        return channel
+
+    def get_stas(self) -> set[str]:
+        """Return MAC addresses of all associated STAs."""
+        list_sta_result = self._run_hostapd_cli_cmd("list_sta")
+        stas = set()
+        for line in list_sta_result.splitlines():
+            # Each line must be a valid MAC address. Capture it.
+            m = re.match(r"((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})", line)
+            if m:
+                stas.add(m.group(1))
+        return stas
+
+    def _sta(self, sta_mac: str) -> str:
+        """Return hostapd's detailed info about an associated STA.
+
+        Returns:
+            Results of the command.
+
+        Raises: See _run_hostapd_cli_cmd
+        """
+        return self._run_hostapd_cli_cmd(f"sta {sta_mac}")
+
+    def get_sta_extended_capabilities(self, sta_mac: str) -> ExtendedCapabilities:
+        """Get extended capabilities for the given STA, as seen by the AP.
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            Extended capabilities of the given STA.
+        Raises:
+            Error if extended capabilities for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        # hostapd ext_capab field is a hex encoded string representation of the
+        # 802.11 extended capabilities structure, each byte represented by two
+        # chars (each byte having format %02x).
+        m = re.search(r"ext_capab=([0-9A-Faf]+)", sta_result, re.MULTILINE)
+        if not m:
+            raise Error("Failed to get ext_capab from STA details")
+        raw_ext_capab = m.group(1)
+        try:
+            return ExtendedCapabilities(bytearray.fromhex(raw_ext_capab))
+        except ValueError:
+            raise Error(f"ext_capab contains invalid hex string repr {raw_ext_capab}")
+
+    def sta_authenticated(self, sta_mac: str) -> bool:
+        """Is the given STA authenticated?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is authenticated, False otherwise.
+        Raises:
+            Error if authenticated status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[AUTH\]", sta_result, re.MULTILINE)
+        return bool(m)
+
+    def sta_associated(self, sta_mac: str) -> bool:
+        """Is the given STA associated?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is associated, False otherwise.
+        Raises:
+            Error if associated status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[ASSOC\]", sta_result, re.MULTILINE)
+        return bool(m)
+
+    def sta_authorized(self, sta_mac: str) -> bool:
+        """Is the given STA authorized (802.1X controlled port open)?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is 802.1X authorized, False otherwise.
+        Raises:
+            Error if authorized status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[AUTHORIZED\]", sta_result, re.MULTILINE)
+        return bool(m)
+
+    def _bss_tm_req(
+        self, client_mac: str, request: BssTransitionManagementRequest
+    ) -> None:
+        """Send a hostapd BSS Transition Management request command to a STA.
+
+        Args:
+            client_mac: MAC address that will receive the request.
+            request: BSS Transition Management request that will be sent.
+        Returns:
+            acts.libs.proc.job.Result containing the results of the command.
+        Raises: See _run_hostapd_cli_cmd
+        """
+        bss_tm_req_cmd = f"bss_tm_req {client_mac}"
+
+        if request.abridged:
+            bss_tm_req_cmd += " abridged=1"
+        if request.bss_termination_included and request.bss_termination_duration:
+            bss_tm_req_cmd += f" bss_term={request.bss_termination_duration.duration}"
+        if request.disassociation_imminent:
+            bss_tm_req_cmd += " disassoc_imminent=1"
+        if request.disassociation_timer is not None:
+            bss_tm_req_cmd += f" disassoc_timer={request.disassociation_timer}"
+        if request.preferred_candidate_list_included:
+            bss_tm_req_cmd += " pref=1"
+        if request.session_information_url:
+            bss_tm_req_cmd += f" url={request.session_information_url}"
+        if request.validity_interval:
+            bss_tm_req_cmd += f" valid_int={request.validity_interval}"
+
+        # neighbor= can appear multiple times, so it requires special handling.
+        if request.candidate_list is not None:
+            for neighbor in request.candidate_list:
+                bssid = neighbor.bssid
+                bssid_info = hex(neighbor.bssid_information)
+                op_class = neighbor.operating_class
+                chan_num = neighbor.channel_number
+                phy_type = int(neighbor.phy_type)
+                bss_tm_req_cmd += (
+                    f" neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}"
+                )
+
+        self._run_hostapd_cli_cmd(bss_tm_req_cmd)
+
+    def send_bss_transition_management_req(
+        self, sta_mac: str, request: BssTransitionManagementRequest
+    ) -> None:
+        """Send a BSS Transition Management request to an associated STA.
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+            request: BSS Transition Management request that will be sent.
+        Returns:
+            acts.libs.proc.job.Result containing the results of the command.
+        Raises: See _run_hostapd_cli_cmd
+        """
+        self._bss_tm_req(sta_mac, request)
+
+    def is_alive(self) -> bool:
+        """
+        Returns:
+            True if the daemon is running.
+        """
+        return self._shell.is_alive(self._identifier)
+
+    def pull_logs(self) -> str:
+        """Pulls the log files from where hostapd is running.
+
+        Returns:
+            A string of the hostapd logs.
+        """
+        # TODO: Auto pulling of logs when stop is called.
+        with LogLevel(self._runner.log, logging.INFO):
+            log = self._shell.read_file(self._log_file)
+
+        # Convert epoch to human-readable times
+        result: list[str] = []
+        for line in log.splitlines():
+            try:
+                end = line.index(":")
+                epoch = float(line[:end])
+                timestamp = datetime.fromtimestamp(epoch, timezone.utc).strftime(
+                    "%m-%d %H:%M:%S.%f"
+                )
+                result.append(f"{timestamp} {line[end+1:]}")
+            except ValueError:  # Colon not found or float conversion failure
+                result.append(line)
+
+        return "\n".join(result)
+
+    def _run_hostapd_cli_cmd(self, cmd: str) -> str:
+        """Run the given hostapd_cli command.
+
+        Runs the command, waits for the output (up to default timeout), and
+            returns the result.
+
+        Returns:
+            Results of the ssh command.
+
+        Raises:
+            subprocess.TimeoutExpired: When the remote command took too
+                long to execute.
+            antlion.controllers.utils_lib.ssh.connection.Error: When the ssh
+                connection failed to be created.
+            subprocess.CalledProcessError: Ssh worked, but the command had an
+                error executing.
+        """
+        hostapd_cli_job = (
+            f"cd {self._working_dir}; " f"{CLI_PROGRAM_FILE} -p {self._ctrl_file} {cmd}"
+        )
+        proc = self._runner.run(hostapd_cli_job)
+        if proc.returncode:
+            raise CalledProcessError(
+                proc.returncode, hostapd_cli_job, proc.stdout, proc.stderr
+            )
+        return proc.stdout
+
+    def _wait_for_process(self, timeout: int = 60) -> None:
+        """Waits for the process to come up.
+
+        Waits until the hostapd process is found running, or there is
+        a timeout. If the program never comes up then the log file
+        will be scanned for errors.
+
+        Raises: See _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout and not self.is_alive():
+            self._scan_for_errors(False)
+            time.sleep(0.1)
+
+    def _wait_for_interface(self, timeout: int = 60) -> None:
+        """Waits for hostapd to report that the interface is up.
+
+        Waits until hostapd says the interface has been brought up or an
+        error occurs.
+
+        Raises: see _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout:
+            time.sleep(0.1)
+            success = self._shell.search_file("Setup of interface done", self._log_file)
+            if success:
+                return
+            self._scan_for_errors(False)
+
+        self._scan_for_errors(True)
+
+    def _scan_for_errors(self, should_be_up: bool) -> None:
+        """Scans the hostapd log for any errors.
+
+        Args:
+            should_be_up: If true then hostapd program is expected to be alive.
+                          If it is found not alive while this is true an error
+                          is thrown.
+
+        Raises:
+            Error: Raised when a hostapd error is found.
+        """
+        # Store this so that all other errors have priority.
+        is_dead = not self.is_alive()
+
+        bad_config = self._shell.search_file(
+            "Interface initialization failed", self._log_file
+        )
+        if bad_config:
+            raise Error("Interface failed to start", self)
+
+        bad_config = self._shell.search_file(
+            f"Interface {self._interface} wasn't started", self._log_file
+        )
+        if bad_config:
+            raise Error("Interface failed to start", self)
+
+        if should_be_up and is_dead:
+            raise Error("Hostapd failed to start", self)
+
+    def _write_configs(self, additional_parameters: dict[str, Any]) -> None:
+        """Writes the configs to the hostapd config file."""
+        self._shell.delete_file(self._config_file)
+
+        interface_configs = collections.OrderedDict()
+        interface_configs["interface"] = self._interface
+        interface_configs["ctrl_interface"] = self._ctrl_file
+        pairs: Iterable[str] = (f"{k}={v}" for k, v in interface_configs.items())
+
+        packaged_configs = self.config.package_configs() if self.config else []
+        if additional_parameters:
+            packaged_configs.append(additional_parameters)
+        for packaged_config in packaged_configs:
+            config_pairs = (
+                f"{k}={v}" for k, v in packaged_config.items() if v is not None
+            )
+            pairs = itertools.chain(pairs, config_pairs)
+
+        hostapd_conf = "\n".join(pairs)
+
+        logging.info(f"Writing {self._config_file}")
+        logging.debug("******************Start*******************")
+        logging.debug(f"\n{hostapd_conf}")
+        logging.debug("*******************End********************")
+
+        self._shell.write_file(self._config_file, hostapd_conf)
diff --git a/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py b/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
new file mode 100644
index 0000000..6a11120
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
@@ -0,0 +1,544 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from typing import Any, FrozenSet, TypeVar
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security
+from antlion.controllers.ap_lib.third_party_ap_profiles import (
+    actiontec,
+    asus,
+    belkin,
+    linksys,
+    netgear,
+    securifi,
+    tplink,
+)
+
+T = TypeVar("T")
+
+
+def _get_or_default(var: T | None, default_value: T) -> T:
+    """Check variable and return non-null value.
+
+    Args:
+         var: Any variable.
+         default_value: Value to return if the var is None.
+
+    Returns:
+         Variable value if not None, default value otherwise.
+    """
+    return var if var is not None else default_value
+
+
+def create_ap_preset(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    profile_name: str = "whirlwind",
+    channel: int | None = None,
+    mode: str | None = None,
+    frequency: int | None = None,
+    security: Security | None = None,
+    pmf_support: int | None = None,
+    ssid: str | None = None,
+    hidden: bool | None = None,
+    dtim_period: int | None = None,
+    frag_threshold: int | None = None,
+    rts_threshold: int | None = None,
+    force_wmm: bool | None = None,
+    beacon_interval: int | None = None,
+    short_preamble: bool | None = None,
+    n_capabilities: list[Any] | None = None,
+    ac_capabilities: list[Any] | None = None,
+    vht_bandwidth: int | None = None,
+    wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    bss_settings: list[Any] = [],
+) -> hostapd_config.HostapdConfig:
+    """AP preset config generator.  This a wrapper for hostapd_config but
+       but supplies the default settings for the preset that is selected.
+
+        You may specify channel or frequency, but not both.  Both options
+        are checked for validity (i.e. you can't specify an invalid channel
+        or a frequency that will not be accepted).
+
+    Args:
+        profile_name: The name of the device want the preset for.
+                      Options: whirlwind
+        channel: Channel number.
+        dtim: DTIM value of the AP, default is 2.
+        frequency: Frequency of channel.
+        security: The security settings to use.
+        ssid: The name of the ssid to broadcast.
+        pmf_support: Whether pmf is disabled, enabled, or required
+        vht_bandwidth: VHT bandwidth for 11ac operation.
+        bss_settings: The settings for all bss.
+        iface_wlan_2g: the wlan 2g interface name of the AP.
+        iface_wlan_5g: the wlan 5g interface name of the AP.
+        mode: The hostapd 802.11 mode of operation.
+        ssid: The ssid for the wireless network.
+        hidden: Whether to include the ssid in the beacons.
+        dtim_period: The dtim period for the BSS
+        frag_threshold: Max size of packet before fragmenting the packet.
+        rts_threshold: Max size of packet before requiring protection for
+            rts/cts or cts to self.
+        n_capabilities: 802.11n capabilities for for BSS to advertise.
+        ac_capabilities: 802.11ac capabilities for for BSS to advertise.
+        wnm_features: WNM features to enable on the AP.
+
+    Returns: A hostapd_config object that can be used by the hostapd object.
+    """
+    if security is None:
+        security = Security()
+
+    # Verify interfaces
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+
+    if channel is not None:
+        frequency = hostapd_config.get_frequency_for_channel(channel)
+    elif frequency is not None:
+        channel = hostapd_config.get_channel_for_frequency(frequency)
+
+    if channel is None or frequency is None:
+        raise ValueError("Must specify channel or frequency")
+
+    if profile_name == "whirlwind":
+        # profile indicates phy mode is 11bgn for 2.4Ghz or 11acn for 5Ghz
+        hidden = _get_or_default(hidden, False)
+        force_wmm = _get_or_default(force_wmm, True)
+        beacon_interval = _get_or_default(beacon_interval, 100)
+        short_preamble = _get_or_default(short_preamble, True)
+        dtim_period = _get_or_default(dtim_period, 2)
+        frag_threshold = _get_or_default(frag_threshold, 2346)
+        rts_threshold = _get_or_default(rts_threshold, 2347)
+        if frequency < 5000:
+            interface = iface_wlan_2g
+            mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
+            n_capabilities = _get_or_default(
+                n_capabilities,
+                [
+                    hostapd_constants.N_CAPABILITY_LDPC,
+                    hostapd_constants.N_CAPABILITY_SGI20,
+                    hostapd_constants.N_CAPABILITY_SGI40,
+                    hostapd_constants.N_CAPABILITY_TX_STBC,
+                    hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+                ],
+            )
+            config = hostapd_config.HostapdConfig(
+                ssid=ssid,
+                hidden=hidden,
+                security=security,
+                pmf_support=pmf_support,
+                interface=interface,
+                mode=mode,
+                force_wmm=force_wmm,
+                beacon_interval=beacon_interval,
+                dtim_period=dtim_period,
+                short_preamble=short_preamble,
+                frequency=frequency,
+                n_capabilities=n_capabilities,
+                frag_threshold=frag_threshold,
+                rts_threshold=rts_threshold,
+                wnm_features=wnm_features,
+                bss_settings=bss_settings,
+            )
+        else:
+            interface = iface_wlan_5g
+            vht_bandwidth = _get_or_default(vht_bandwidth, 80)
+            mode = _get_or_default(mode, hostapd_constants.MODE_11AC_MIXED)
+            if hostapd_config.ht40_plus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
+            elif hostapd_config.ht40_minus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
+            # Channel 165 operates in 20MHz with n or ac modes.
+            if channel == 165:
+                mode = hostapd_constants.MODE_11N_MIXED
+                extended_channel = hostapd_constants.N_CAPABILITY_HT20
+            # Define the n capability vector for 20 MHz and higher bandwidth
+            if not vht_bandwidth:
+                n_capabilities = _get_or_default(n_capabilities, [])
+            elif vht_bandwidth >= 40:
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        extended_channel,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    ],
+                )
+            else:
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                        hostapd_constants.N_CAPABILITY_HT20,
+                    ],
+                )
+            ac_capabilities = _get_or_default(
+                ac_capabilities,
+                [
+                    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+                    hostapd_constants.AC_CAPABILITY_RXLDPC,
+                    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+                    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+                    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+                    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+                    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+                ],
+            )
+            config = hostapd_config.HostapdConfig(
+                ssid=ssid,
+                hidden=hidden,
+                security=security,
+                pmf_support=pmf_support,
+                interface=interface,
+                mode=mode,
+                force_wmm=force_wmm,
+                vht_channel_width=vht_bandwidth,
+                beacon_interval=beacon_interval,
+                dtim_period=dtim_period,
+                short_preamble=short_preamble,
+                frequency=frequency,
+                frag_threshold=frag_threshold,
+                rts_threshold=rts_threshold,
+                wnm_features=wnm_features,
+                n_capabilities=n_capabilities,
+                ac_capabilities=ac_capabilities,
+                bss_settings=bss_settings,
+            )
+    elif profile_name == "whirlwind_11ab_legacy":
+        if frequency < 5000:
+            mode = hostapd_constants.MODE_11B
+        else:
+            mode = hostapd_constants.MODE_11A
+
+        config = create_ap_preset(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            ssid=ssid,
+            channel=channel,
+            mode=mode,
+            security=security,
+            pmf_support=pmf_support,
+            hidden=hidden,
+            force_wmm=force_wmm,
+            beacon_interval=beacon_interval,
+            short_preamble=short_preamble,
+            dtim_period=dtim_period,
+            rts_threshold=rts_threshold,
+            frag_threshold=frag_threshold,
+            n_capabilities=[],
+            ac_capabilities=[],
+            vht_bandwidth=None,
+            wnm_features=wnm_features,
+        )
+    elif profile_name == "whirlwind_11ag_legacy":
+        if frequency < 5000:
+            mode = hostapd_constants.MODE_11G
+        else:
+            mode = hostapd_constants.MODE_11A
+
+        config = create_ap_preset(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            ssid=ssid,
+            channel=channel,
+            mode=mode,
+            security=security,
+            pmf_support=pmf_support,
+            hidden=hidden,
+            force_wmm=force_wmm,
+            beacon_interval=beacon_interval,
+            short_preamble=short_preamble,
+            dtim_period=dtim_period,
+            rts_threshold=rts_threshold,
+            frag_threshold=frag_threshold,
+            n_capabilities=[],
+            ac_capabilities=[],
+            vht_bandwidth=None,
+            wnm_features=wnm_features,
+        )
+    elif profile_name == "mistral":
+        hidden = _get_or_default(hidden, False)
+        force_wmm = _get_or_default(force_wmm, True)
+        beacon_interval = _get_or_default(beacon_interval, 100)
+        short_preamble = _get_or_default(short_preamble, True)
+        dtim_period = _get_or_default(dtim_period, 2)
+        frag_threshold = None
+        rts_threshold = None
+
+        # Google IE
+        # Country Code IE ('us' lowercase)
+        vendor_elements = {
+            "vendor_elements": "dd0cf4f5e80505ff0000ffffffff" "070a75732024041e95051e00"
+        }
+        default_configs = {"bridge": "br-lan", "iapp_interface": "br-lan"}
+        additional_params = (
+            vendor_elements
+            | default_configs
+            | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+            | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        )
+
+        if frequency < 5000:
+            interface = iface_wlan_2g
+            mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
+            n_capabilities = _get_or_default(
+                n_capabilities,
+                [
+                    hostapd_constants.N_CAPABILITY_LDPC,
+                    hostapd_constants.N_CAPABILITY_SGI20,
+                    hostapd_constants.N_CAPABILITY_SGI40,
+                    hostapd_constants.N_CAPABILITY_TX_STBC,
+                    hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+                ],
+            )
+
+            config = hostapd_config.HostapdConfig(
+                ssid=ssid,
+                hidden=hidden,
+                security=security,
+                pmf_support=pmf_support,
+                interface=interface,
+                mode=mode,
+                force_wmm=force_wmm,
+                beacon_interval=beacon_interval,
+                dtim_period=dtim_period,
+                short_preamble=short_preamble,
+                frequency=frequency,
+                n_capabilities=n_capabilities,
+                frag_threshold=frag_threshold,
+                rts_threshold=rts_threshold,
+                wnm_features=wnm_features,
+                bss_settings=bss_settings,
+                additional_parameters=additional_params,
+                set_ap_defaults_profile=profile_name,
+            )
+        else:
+            interface = iface_wlan_5g
+            vht_bandwidth = _get_or_default(vht_bandwidth, 80)
+            mode = _get_or_default(mode, hostapd_constants.MODE_11AC_MIXED)
+            if hostapd_config.ht40_plus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
+            elif hostapd_config.ht40_minus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
+            # Channel 165 operates in 20MHz with n or ac modes.
+            if channel == 165:
+                mode = hostapd_constants.MODE_11N_MIXED
+                extended_channel = hostapd_constants.N_CAPABILITY_HT20
+            if vht_bandwidth >= 40:
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        extended_channel,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                    ],
+                )
+            else:
+                n_capabilities = _get_or_default(
+                    n_capabilities,
+                    [
+                        hostapd_constants.N_CAPABILITY_LDPC,
+                        hostapd_constants.N_CAPABILITY_SGI20,
+                        hostapd_constants.N_CAPABILITY_SGI40,
+                        hostapd_constants.N_CAPABILITY_TX_STBC,
+                        hostapd_constants.N_CAPABILITY_RX_STBC1,
+                        hostapd_constants.N_CAPABILITY_HT20,
+                    ],
+                )
+            ac_capabilities = _get_or_default(
+                ac_capabilities,
+                [
+                    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+                    hostapd_constants.AC_CAPABILITY_RXLDPC,
+                    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+                    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+                    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+                    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+                    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+                    hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER,
+                    hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE,
+                    hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER,
+                    hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4,
+                    hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4,
+                ],
+            )
+
+            config = hostapd_config.HostapdConfig(
+                ssid=ssid,
+                hidden=hidden,
+                security=security,
+                pmf_support=pmf_support,
+                interface=interface,
+                mode=mode,
+                force_wmm=force_wmm,
+                vht_channel_width=vht_bandwidth,
+                beacon_interval=beacon_interval,
+                dtim_period=dtim_period,
+                short_preamble=short_preamble,
+                frequency=frequency,
+                frag_threshold=frag_threshold,
+                rts_threshold=rts_threshold,
+                n_capabilities=n_capabilities,
+                ac_capabilities=ac_capabilities,
+                wnm_features=wnm_features,
+                bss_settings=bss_settings,
+                additional_parameters=additional_params,
+                set_ap_defaults_profile=profile_name,
+            )
+    elif profile_name == "actiontec_pk5000":
+        config = actiontec.actiontec_pk5000(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "actiontec_mi424wr":
+        config = actiontec.actiontec_mi424wr(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "asus_rtac66u":
+        config = asus.asus_rtac66u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtac86u":
+        config = asus.asus_rtac86u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtac5300":
+        config = asus.asus_rtac5300(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtn56u":
+        config = asus.asus_rtn56u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "asus_rtn66u":
+        config = asus.asus_rtn66u(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "belkin_f9k1001v5":
+        config = belkin.belkin_f9k1001v5(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "linksys_ea4500":
+        config = linksys.linksys_ea4500(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "linksys_ea9500":
+        config = linksys.linksys_ea9500(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "linksys_wrt1900acv2":
+        config = linksys.linksys_wrt1900acv2(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "netgear_r7000":
+        config = netgear.netgear_r7000(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "netgear_wndr3400":
+        config = netgear.netgear_wndr3400(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "securifi_almond":
+        config = securifi.securifi_almond(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    elif profile_name == "tplink_archerc5":
+        config = tplink.tplink_archerc5(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_archerc7":
+        config = tplink.tplink_archerc7(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_c1200":
+        config = tplink.tplink_c1200(
+            iface_wlan_2g=iface_wlan_2g,
+            iface_wlan_5g=iface_wlan_5g,
+            channel=channel,
+            ssid=ssid,
+            security=security,
+        )
+    elif profile_name == "tplink_tlwr940n":
+        config = tplink.tplink_tlwr940n(
+            iface_wlan_2g=iface_wlan_2g, channel=channel, ssid=ssid, security=security
+        )
+    else:
+        raise ValueError(f"Invalid ap model specified ({profile_name})")
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py b/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
new file mode 100644
index 0000000..2f4d261
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
@@ -0,0 +1,61 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+
+from antlion.controllers.ap_lib.hostapd_security import Security
+
+
+class BssSettings(object):
+    """Settings for a bss.
+
+    Settings for a bss to allow multiple network on a single device.
+
+    Attributes:
+        name: The name that this bss will go by.
+        ssid: The name of the ssid to broadcast.
+        hidden: If true then the ssid will be hidden.
+        security: The security settings to use.
+        bssid: The bssid to use.
+    """
+
+    def __init__(
+        self,
+        name: str,
+        ssid: str,
+        security: Security,
+        hidden: bool = False,
+        bssid: str | None = None,
+    ):
+        self.name = name
+        self.ssid = ssid
+        self.security = security
+        self.hidden = hidden
+        self.bssid = bssid
+
+    def generate_dict(self) -> dict[str, str | int]:
+        """Returns: A dictionary of bss settings."""
+        settings: dict[str, str | int] = collections.OrderedDict()
+        settings["bss"] = self.name
+        if self.bssid:
+            settings["bssid"] = self.bssid
+        if self.ssid:
+            settings["ssid"] = self.ssid
+            settings["ignore_broadcast_ssid"] = 1 if self.hidden else 0
+
+        security_settings = self.security.generate_dict()
+        for k, v in security_settings.items():
+            settings[k] = v
+
+        return settings
diff --git a/packages/antlion/controllers/ap_lib/hostapd_config.py b/packages/antlion/controllers/ap_lib/hostapd_config.py
new file mode 100644
index 0000000..749e585
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_config.py
@@ -0,0 +1,710 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import logging
+from typing import Any, FrozenSet
+
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_bss_settings import BssSettings
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def ht40_plus_allowed(channel: int):
+    """Returns: True iff HT40+ is enabled for this configuration."""
+    channel_supported = (
+        channel
+        in hostapd_constants.HT40_ALLOW_MAP[
+            hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
+        ]
+    )
+    return channel_supported
+
+
+def ht40_minus_allowed(channel: int):
+    """Returns: True iff HT40- is enabled for this configuration."""
+    channel_supported = (
+        channel
+        in hostapd_constants.HT40_ALLOW_MAP[
+            hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
+        ]
+    )
+    return channel_supported
+
+
+def get_frequency_for_channel(channel: int):
+    """The frequency associated with a given channel number.
+
+    Args:
+        value: channel number.
+
+    Returns:
+        int, frequency in MHz associated with the channel.
+
+    """
+    for frequency, channel_iter in hostapd_constants.CHANNEL_MAP.items():
+        if channel == channel_iter:
+            return frequency
+    else:
+        raise ValueError(f"Unknown channel value: {channel!r}.")
+
+
+def get_channel_for_frequency(frequency: int):
+    """The channel number associated with a given frequency.
+
+    Args:
+        value: frequency in MHz.
+
+    Returns:
+        int, frequency associated with the channel.
+
+    """
+    return hostapd_constants.CHANNEL_MAP[frequency]
+
+
+class HostapdConfig(object):
+    """The root settings for the router.
+
+    All the settings for a router that are not part of an ssid.
+    """
+
+    def __init__(
+        self,
+        interface: str | None = None,
+        mode: str | None = None,
+        channel: int | None = None,
+        frequency: int | None = None,
+        n_capabilities: list[Any] | None = None,
+        beacon_interval: int | None = None,
+        dtim_period: int | None = None,
+        frag_threshold: int | None = None,
+        rts_threshold: int | None = None,
+        short_preamble: bool | None = None,
+        ssid: str | None = None,
+        hidden: bool = False,
+        security: Security | None = None,
+        bssid: str | None = None,
+        force_wmm: bool | None = None,
+        pmf_support: int | None = None,
+        obss_interval: int | None = None,
+        vht_channel_width: Any | None = None,
+        vht_center_channel: int | None = None,
+        ac_capabilities: list[Any] | None = None,
+        beacon_footer: str = "",
+        spectrum_mgmt_required: bool | None = None,
+        scenario_name: str | None = None,
+        min_streams: int | None = None,
+        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+        bss_settings: list[Any] | None = None,
+        additional_parameters: dict[str, Any] | None = None,
+        set_ap_defaults_profile: str = "whirlwind",
+    ) -> None:
+        """Construct a HostapdConfig.
+
+        You may specify channel or frequency, but not both.  Both options
+        are checked for validity (i.e. you can't specify an invalid channel
+        or a frequency that will not be accepted).
+
+        Args:
+            interface: The name of the interface to use.
+            mode: MODE_11x defined above.
+            channel: Channel number.
+            frequency: Frequency of channel.
+            n_capabilities: List of N_CAPABILITY_x defined above.
+            beacon_interval: Beacon interval of AP.
+            dtim_period: Include a DTIM every |dtim_period| beacons.
+            frag_threshold: Maximum outgoing data frame size.
+            rts_threshold: Maximum packet size without requiring explicit
+                protection via rts/cts or cts to self.
+            short_preamble: Whether to use a short preamble.
+            ssid: string, The name of the ssid to broadcast.
+            hidden: Should the ssid be hidden.
+            security: The security settings to use.
+            bssid: A MAC address like string for the BSSID.
+            force_wmm: True if we should force WMM on, False if we should
+                force it off, None if we shouldn't force anything.
+            pmf_support: One of PMF_SUPPORT_* above.  Controls whether the
+                client supports/must support 802.11w. If None, defaults to
+                required with wpa3, else defaults to disabled.
+            obss_interval: Interval in seconds that client should be
+                required to do background scans for overlapping BSSes.
+            vht_channel_width: Object channel width
+            vht_center_channel: Center channel of segment 0.
+            ac_capabilities: List of AC_CAPABILITY_x defined above.
+            beacon_footer: Containing (not validated) IE data to be
+                placed at the end of the beacon.
+            spectrum_mgmt_required: True if we require the DUT to support
+                spectrum management.
+            scenario_name: To be included in file names, instead
+                of the interface name.
+            min_streams: Number of spatial streams required.
+            wnm_features: WNM features to enable on the AP.
+            control_interface: The file name to use as the control interface.
+            bss_settings: The settings for all bss.
+            additional_parameters: A dictionary of additional parameters to add
+                to the hostapd config.
+            set_ap_defaults_profile: profile name to load defaults from
+        """
+        if n_capabilities is None:
+            n_capabilities = []
+        if ac_capabilities is None:
+            ac_capabilities = []
+        if bss_settings is None:
+            bss_settings = []
+        if additional_parameters is None:
+            additional_parameters = {}
+        if security is None:
+            security = Security()
+
+        self.set_ap_defaults_profile = set_ap_defaults_profile
+        self._interface = interface
+        if channel is not None and frequency is not None:
+            raise ValueError("Specify either frequency or channel " "but not both.")
+
+        unknown_caps = [
+            cap
+            for cap in n_capabilities
+            if cap not in hostapd_constants.N_CAPABILITIES_MAPPING
+        ]
+        if unknown_caps:
+            raise ValueError(f"Unknown capabilities: {unknown_caps!r}")
+
+        if channel:
+            self.channel = channel
+        elif frequency:
+            self.frequency = frequency
+        else:
+            raise ValueError("Specify either frequency or channel.")
+
+        self._n_capabilities = set(n_capabilities)
+        if force_wmm is not None:
+            self._wmm_enabled = force_wmm
+        elif self._n_capabilities:
+            self._wmm_enabled = True
+        if self._n_capabilities and mode is None:
+            mode = hostapd_constants.MODE_11N_PURE
+        self._mode = mode
+
+        if not self.supports_frequency(self.frequency):
+            raise ValueError(
+                "Configured a mode %s that does not support "
+                "frequency %d" % (self._mode, self.frequency)
+            )
+
+        self._beacon_interval = beacon_interval
+        self._dtim_period = dtim_period
+        self._frag_threshold = frag_threshold
+        self._rts_threshold = rts_threshold
+        self._short_preamble = short_preamble
+        self._ssid = ssid
+        self._hidden = hidden
+        self._security = security
+        self._bssid = bssid
+        # Default PMF Values
+        if pmf_support is None:
+            if self.security and self.security.security_mode is SecurityMode.WPA3:
+                # Set PMF required for WP3
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED
+            elif self.security and self.security.security_mode.is_wpa3():
+                # Default PMF to enabled for WPA3 mixed modes (can be
+                # overwritten by explicitly provided value)
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED
+            else:
+                # Default PMD to disabled for all other modes (can be
+                # overwritten by explicitly provided value)
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED
+        elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES:
+            raise ValueError(f"Invalid value for pmf_support: {pmf_support!r}")
+        elif (
+            pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
+            and self.security
+            and self.security.security_mode is SecurityMode.WPA3
+        ):
+            raise ValueError("PMF support must be required with wpa3.")
+        else:
+            self._pmf_support = pmf_support
+        self._obss_interval = obss_interval
+        if self.is_11ac:
+            if str(vht_channel_width) == "40" or str(vht_channel_width) == "20":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40
+            elif str(vht_channel_width) == "80":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80
+            elif str(vht_channel_width) == "160":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160
+            elif str(vht_channel_width) == "80+80":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80_80
+            elif vht_channel_width is not None:
+                raise ValueError("Invalid channel width")
+            else:
+                logging.warning(
+                    "No channel bandwidth specified.  Using 80MHz for 11ac."
+                )
+                self._vht_oper_chwidth = 1
+            if vht_center_channel is not None:
+                self._vht_oper_centr_freq_seg0_idx = vht_center_channel
+            elif vht_channel_width == 20 and channel is not None:
+                self._vht_oper_centr_freq_seg0_idx = channel
+            else:
+                self._vht_oper_centr_freq_seg0_idx = (
+                    self._get_11ac_center_channel_from_channel(self.channel)
+                )
+            self._ac_capabilities = set(ac_capabilities)
+        self._beacon_footer = beacon_footer
+        self._spectrum_mgmt_required = spectrum_mgmt_required
+        self._scenario_name = scenario_name
+        self._min_streams = min_streams
+        self._wnm_features = wnm_features
+        self._additional_parameters = additional_parameters
+
+        self._bss_lookup: dict[str, BssSettings] = collections.OrderedDict()
+        for bss in bss_settings:
+            if bss.name in self._bss_lookup:
+                raise ValueError(
+                    "Cannot have multiple bss settings with the same name."
+                )
+            self._bss_lookup[bss.name] = bss
+
+    def _get_11ac_center_channel_from_channel(self, channel: int) -> int:
+        """Returns the center channel of the selected channel band based
+        on the channel and channel bandwidth provided.
+        """
+        channel = int(channel)
+        center_channel_delta = hostapd_constants.CENTER_CHANNEL_MAP[
+            self._vht_oper_chwidth
+        ]["delta"]
+
+        for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[self._vht_oper_chwidth][
+            "channels"
+        ]:
+            lower_channel_bound, upper_channel_bound = channel_map
+            if lower_channel_bound <= channel <= upper_channel_bound:
+                return lower_channel_bound + center_channel_delta
+        raise ValueError(f"Invalid channel for {self._vht_oper_chwidth}.")
+
+    @property
+    def _get_default_config(self):
+        """Returns: dict of default options for hostapd."""
+        if self.set_ap_defaults_profile == "mistral":
+            return collections.OrderedDict(
+                [
+                    ("logger_syslog", "-1"),
+                    ("logger_syslog_level", "0"),
+                    # default RTS and frag threshold to ``off''
+                    ("rts_threshold", None),
+                    ("fragm_threshold", None),
+                    ("driver", hostapd_constants.DRIVER_NAME),
+                ]
+            )
+        else:
+            return collections.OrderedDict(
+                [
+                    ("logger_syslog", "-1"),
+                    ("logger_syslog_level", "0"),
+                    # default RTS and frag threshold to ``off''
+                    ("rts_threshold", "2347"),
+                    ("fragm_threshold", "2346"),
+                    ("driver", hostapd_constants.DRIVER_NAME),
+                ]
+            )
+
+    @property
+    def _hostapd_ht_capabilities(self):
+        """Returns: string suitable for the ht_capab= line in a hostapd config."""
+        ret = []
+        for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
+            if cap in self._n_capabilities:
+                ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
+        return "".join(ret)
+
+    @property
+    def _hostapd_vht_capabilities(self):
+        """Returns: string suitable for the vht_capab= line in a hostapd config."""
+        ret = []
+        for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
+            if cap in self._ac_capabilities:
+                ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
+        return "".join(ret)
+
+    @property
+    def _require_ht(self):
+        """Returns: True iff clients should be required to support HT."""
+        return self._mode == hostapd_constants.MODE_11N_PURE
+
+    @property
+    def _require_vht(self):
+        """Returns: True if clients should be required to support VHT."""
+        return self._mode == hostapd_constants.MODE_11AC_PURE
+
+    @property
+    def hw_mode(self):
+        """Returns: string hardware mode understood by hostapd."""
+        if self._mode == hostapd_constants.MODE_11A:
+            return hostapd_constants.MODE_11A
+        if self._mode == hostapd_constants.MODE_11B:
+            return hostapd_constants.MODE_11B
+        if self._mode == hostapd_constants.MODE_11G:
+            return hostapd_constants.MODE_11G
+        if self.is_11n or self.is_11ac:
+            # For their own historical reasons, hostapd wants it this way.
+            if self._frequency > 5000:
+                return hostapd_constants.MODE_11A
+            return hostapd_constants.MODE_11G
+        raise ValueError("Invalid mode.")
+
+    @property
+    def is_11n(self):
+        """Returns: True if we're trying to host an 802.11n network."""
+        return self._mode in (
+            hostapd_constants.MODE_11N_MIXED,
+            hostapd_constants.MODE_11N_PURE,
+        )
+
+    @property
+    def is_11ac(self):
+        """Returns: True if we're trying to host an 802.11ac network."""
+        return self._mode in (
+            hostapd_constants.MODE_11AC_MIXED,
+            hostapd_constants.MODE_11AC_PURE,
+        )
+
+    @property
+    def channel(self):
+        """Returns: int channel number for self.frequency."""
+        return get_channel_for_frequency(self.frequency)
+
+    @channel.setter
+    def channel(self, value):
+        """Sets the channel number to configure hostapd to listen on.
+
+        Args:
+            value: int, channel number.
+
+        """
+        self.frequency = get_frequency_for_channel(value)
+
+    @property
+    def bssid(self) -> str | None:
+        return self._bssid
+
+    @bssid.setter
+    def bssid(self, value: str):
+        self._bssid = value
+
+    @property
+    def frequency(self) -> int:
+        """Returns: frequency for hostapd to listen on."""
+        return self._frequency
+
+    @frequency.setter
+    def frequency(self, value: int):
+        """Sets the frequency for hostapd to listen on.
+
+        Args:
+            value: int, frequency in MHz.
+
+        """
+        if value not in hostapd_constants.CHANNEL_MAP:
+            raise ValueError(f"Tried to set an invalid frequency: {value!r}.")
+
+        self._frequency = value
+
+    @property
+    def bss_lookup(self) -> dict[str, BssSettings]:
+        return self._bss_lookup
+
+    @property
+    def ssid(self) -> str | None:
+        """Returns: SsidSettings, The root Ssid settings being used."""
+        return self._ssid
+
+    @ssid.setter
+    def ssid(self, value: str):
+        """Sets the ssid for the hostapd.
+
+        Args:
+            value: SsidSettings, new ssid settings to use.
+
+        """
+        self._ssid = value
+
+    @property
+    def hidden(self):
+        """Returns: bool, True if the ssid is hidden, false otherwise."""
+        return self._hidden
+
+    @hidden.setter
+    def hidden(self, value: bool):
+        """Sets if this ssid is hidden.
+
+        Args:
+            value: If true the ssid will be hidden.
+        """
+        self.hidden = value
+
+    @property
+    def security(self) -> Security:
+        """Returns: The security type being used."""
+        return self._security
+
+    @security.setter
+    def security(self, value: Security):
+        """Sets the security options to use.
+
+        Args:
+            value: The type of security to use.
+        """
+        self._security = value
+
+    @property
+    def ht_packet_capture_mode(self) -> str | None:
+        """Get an appropriate packet capture HT parameter.
+
+        When we go to configure a raw monitor we need to configure
+        the phy to listen on the correct channel.  Part of doing
+        so is to specify the channel width for HT channels.  In the
+        case that the AP is configured to be either HT40+ or HT40-,
+        we could return the wrong parameter because we don't know which
+        configuration will be chosen by hostap.
+
+        Returns:
+            string, HT parameter for frequency configuration.
+
+        """
+        if not self.is_11n:
+            return None
+
+        if ht40_plus_allowed(self.channel):
+            return "HT40+"
+
+        if ht40_minus_allowed(self.channel):
+            return "HT40-"
+
+        return "HT20"
+
+    @property
+    def beacon_footer(self) -> str:
+        return self._beacon_footer
+
+    @beacon_footer.setter
+    def beacon_footer(self, value: str):
+        """Changes the beacon footer.
+
+        Args:
+            value: The beacon footer value.
+        """
+        self._beacon_footer = value
+
+    @property
+    def scenario_name(self) -> str | None:
+        return self._scenario_name
+
+    @property
+    def min_streams(self) -> int | None:
+        return self._min_streams
+
+    @property
+    def wnm_features(self) -> FrozenSet[hostapd_constants.WnmFeature]:
+        return self._wnm_features
+
+    @wnm_features.setter
+    def wnm_features(self, value: FrozenSet[hostapd_constants.WnmFeature]):
+        self._wnm_features = value
+
+    def __repr__(self) -> str:
+        return (
+            "%s(mode=%r, channel=%r, frequency=%r, "
+            "n_capabilities=%r, beacon_interval=%r, "
+            "dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, "
+            "wmm_enabled=%r, security_config=%r, "
+            "spectrum_mgmt_required=%r)"
+            % (
+                self.__class__.__name__,
+                self._mode,
+                self.channel,
+                self.frequency,
+                self._n_capabilities,
+                self._beacon_interval,
+                self._dtim_period,
+                self._frag_threshold,
+                self._ssid,
+                self._bssid,
+                self._wmm_enabled,
+                self._security,
+                self._spectrum_mgmt_required,
+            )
+        )
+
+    def supports_channel(self, value: int) -> bool:
+        """Check whether channel is supported by the current hardware mode.
+
+        @param value: channel to check.
+        @return True iff the current mode supports the band of the channel.
+
+        """
+        for freq, channel in hostapd_constants.CHANNEL_MAP.items():
+            if channel == value:
+                return self.supports_frequency(freq)
+
+        return False
+
+    def supports_frequency(self, frequency: int) -> bool:
+        """Check whether frequency is supported by the current hardware mode.
+
+        @param frequency: frequency to check.
+        @return True iff the current mode supports the band of the frequency.
+
+        """
+        if self._mode == hostapd_constants.MODE_11A and frequency < 5000:
+            return False
+
+        if (
+            self._mode in (hostapd_constants.MODE_11B, hostapd_constants.MODE_11G)
+            and frequency > 5000
+        ):
+            return False
+
+        if frequency not in hostapd_constants.CHANNEL_MAP:
+            return False
+
+        channel = hostapd_constants.CHANNEL_MAP[frequency]
+        supports_plus = (
+            channel
+            in hostapd_constants.HT40_ALLOW_MAP[
+                hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS
+            ]
+        )
+        supports_minus = (
+            channel
+            in hostapd_constants.HT40_ALLOW_MAP[
+                hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS
+            ]
+        )
+        if (
+            hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities
+            and not supports_plus
+        ):
+            return False
+
+        if (
+            hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities
+            and not supports_minus
+        ):
+            return False
+
+        return True
+
+    def add_bss(self, bss: BssSettings) -> None:
+        """Adds a new bss setting.
+
+        Args:
+            bss: The bss settings to add.
+        """
+        if bss.name in self._bss_lookup:
+            raise ValueError("A bss with the same name already exists.")
+
+        self._bss_lookup[bss.name] = bss
+
+    def remove_bss(self, bss_name: str) -> None:
+        """Removes a bss setting from the config."""
+        del self._bss_lookup[bss_name]
+
+    def package_configs(self) -> list[dict[str, str | int]]:
+        """Package the configs.
+
+        Returns:
+            A list of dictionaries, one dictionary for each section of the
+            config.
+        """
+        # Start with the default config parameters.
+        conf = self._get_default_config
+
+        if self._interface:
+            conf["interface"] = self._interface
+        if self._bssid:
+            conf["bssid"] = self._bssid
+        if self._ssid:
+            conf["ssid"] = self._ssid
+            conf["ignore_broadcast_ssid"] = 1 if self._hidden else 0
+        conf["channel"] = self.channel
+        conf["hw_mode"] = self.hw_mode
+        if self.is_11n or self.is_11ac:
+            conf["ieee80211n"] = 1
+            conf["ht_capab"] = self._hostapd_ht_capabilities
+        if self.is_11ac:
+            conf["ieee80211ac"] = 1
+            conf["vht_oper_chwidth"] = self._vht_oper_chwidth
+            conf["vht_oper_centr_freq_seg0_idx"] = self._vht_oper_centr_freq_seg0_idx
+            conf["vht_capab"] = self._hostapd_vht_capabilities
+        if self._wmm_enabled is not None:
+            conf["wmm_enabled"] = 1 if self._wmm_enabled else 0
+        if self._require_ht:
+            conf["require_ht"] = 1
+        if self._require_vht:
+            conf["require_vht"] = 1
+        if self._beacon_interval:
+            conf["beacon_int"] = self._beacon_interval
+        if self._dtim_period:
+            conf["dtim_period"] = self._dtim_period
+        if self._frag_threshold:
+            conf["fragm_threshold"] = self._frag_threshold
+        if self._rts_threshold:
+            conf["rts_threshold"] = self._rts_threshold
+        if self._pmf_support:
+            conf["ieee80211w"] = self._pmf_support
+        if self._obss_interval:
+            conf["obss_interval"] = self._obss_interval
+        if self._short_preamble:
+            conf["preamble"] = 1
+        if self._spectrum_mgmt_required:
+            # To set spectrum_mgmt_required, we must first set
+            # local_pwr_constraint. And to set local_pwr_constraint,
+            # we must first set ieee80211d. And to set ieee80211d, ...
+            # Point being: order matters here.
+            conf["country_code"] = "US"  # Required for local_pwr_constraint
+            conf["ieee80211d"] = 1  # Required for local_pwr_constraint
+            conf["local_pwr_constraint"] = 0  # No local constraint
+            conf["spectrum_mgmt_required"] = 1  # Requires local_pwr_constraint
+
+        for k, v in self._security.generate_dict().items():
+            conf[k] = v
+
+        for wnm_feature in self._wnm_features:
+            if wnm_feature == hostapd_constants.WnmFeature.TIME_ADVERTISEMENT:
+                conf.update(hostapd_constants.ENABLE_WNM_TIME_ADVERTISEMENT)
+            elif wnm_feature == hostapd_constants.WnmFeature.WNM_SLEEP_MODE:
+                conf.update(hostapd_constants.ENABLE_WNM_SLEEP_MODE)
+            elif wnm_feature == hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT:
+                conf.update(hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT)
+            elif wnm_feature == hostapd_constants.WnmFeature.PROXY_ARP:
+                conf.update(hostapd_constants.ENABLE_WNM_PROXY_ARP)
+            elif (
+                wnm_feature
+                == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
+            ):
+                conf.update(
+                    hostapd_constants.ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
+                )
+
+        all_conf = [conf]
+
+        for bss in self._bss_lookup.values():
+            bss_conf = collections.OrderedDict()
+            for k, v in (bss.generate_dict()).items():
+                bss_conf[k] = v
+            all_conf.append(bss_conf)
+
+        if self._additional_parameters:
+            all_conf.append(self._additional_parameters)
+
+        return all_conf
diff --git a/packages/antlion/controllers/ap_lib/hostapd_constants.py b/packages/antlion/controllers/ap_lib/hostapd_constants.py
new file mode 100755
index 0000000..ea6fdb2
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_constants.py
@@ -0,0 +1,938 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+from enum import Enum, StrEnum, auto, unique
+from typing import TypedDict
+
+# TODO(http://b/286584981): Replace with BandType
+BAND_2G = "2g"
+BAND_5G = "5g"
+
+
+@unique
+class BandType(StrEnum):
+    BAND_2G = "2g"
+    BAND_5G = "5g"
+
+    def default_channel(self) -> int:
+        match self:
+            case BandType.BAND_2G:
+                return 6
+            case BandType.BAND_5G:
+                return 36
+
+
+CHANNEL_BANDWIDTH_20MHZ = 20
+CHANNEL_BANDWIDTH_40MHZ = 40
+CHANNEL_BANDWIDTH_80MHZ = 80
+CHANNEL_BANDWIDTH_160MHZ = 160
+
+# TODO(http://b/286584981): Replace with SecurityModeInt
+WEP = 0
+WPA1 = 1
+WPA2 = 2
+WPA3 = 2  # same as wpa2 and wpa2/wpa3, distinguished by wpa_key_mgmt
+MIXED = 3  # applies to wpa/wpa2, and wpa/wpa2/wpa3, distinguished by wpa_key_mgmt
+ENT = 4  # get the correct constant
+
+MAX_WPA_PSK_LENGTH = 64
+MIN_WPA_PSK_LENGTH = 8
+MAX_WPA_PASSWORD_LENGTH = 63
+WPA_STRICT_REKEY = 1
+WPA_DEFAULT_CIPHER = "TKIP"
+WPA2_DEFAULT_CIPER = "CCMP"
+WPA_GROUP_KEY_ROTATION_TIME = 600
+WPA_STRICT_REKEY_DEFAULT = True
+
+# TODO(http://b/286584981): Replace these with SecurityMode enum
+WEP_STRING = "wep"
+WPA_STRING = "wpa"
+WPA2_STRING = "wpa2"
+WPA_MIXED_STRING = "wpa/wpa2"
+WPA3_STRING = "wpa3"
+WPA2_WPA3_MIXED_STRING = "wpa2/wpa3"
+WPA_WPA2_WPA3_MIXED_STRING = "wpa/wpa2/wpa3"
+ENT_STRING = "ent"
+
+# TODO(http://b/286584981): Replace with KeyManagement
+ENT_KEY_MGMT = "WPA-EAP"
+WPA_PSK_KEY_MGMT = "WPA-PSK"
+SAE_KEY_MGMT = "SAE"
+DUAL_WPA_PSK_SAE_KEY_MGMT = "WPA-PSK SAE"
+
+# TODO(http://b/286584981): Replace with SecurityMode.security_mode_int
+SECURITY_STRING_TO_SECURITY_MODE_INT = {
+    WPA_STRING: WPA1,
+    WPA2_STRING: WPA2,
+    WPA_MIXED_STRING: MIXED,
+    WPA3_STRING: WPA3,
+    WPA2_WPA3_MIXED_STRING: WPA3,
+    WPA_WPA2_WPA3_MIXED_STRING: MIXED,
+    WEP_STRING: WEP,
+    ENT_STRING: ENT,
+}
+
+# TODO(http://b/286584981): Replace with SecurityMode.key_management
+SECURITY_STRING_TO_WPA_KEY_MGMT = {
+    WPA_STRING: WPA_PSK_KEY_MGMT,
+    WPA2_STRING: WPA_PSK_KEY_MGMT,
+    WPA_MIXED_STRING: WPA_PSK_KEY_MGMT,
+    WPA3_STRING: SAE_KEY_MGMT,
+    WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
+    WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
+}
+
+# TODO(http://b/286584981): Replace with SecurityMode.fuchsia_security_type
+SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY = {
+    WEP_STRING: WEP_STRING,
+    WPA_STRING: WPA_STRING,
+    WPA2_STRING: WPA2_STRING,
+    WPA_MIXED_STRING: WPA2_STRING,
+    WPA3_STRING: WPA3_STRING,
+    WPA2_WPA3_MIXED_STRING: WPA3_STRING,
+    WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING,
+}
+
+IEEE8021X = 1
+WLAN0_STRING = "wlan0"
+WLAN1_STRING = "wlan1"
+WLAN2_STRING = "wlan2"
+WLAN3_STRING = "wlan3"
+WLAN0_GALE = "wlan-2400mhz"
+WLAN1_GALE = "wlan-5000mhz"
+WEP_DEFAULT_KEY = 0
+WEP_HEX_LENGTH = [10, 26, 32, 58]
+WEP_STR_LENGTH = [5, 13, 16]
+WEP_DEFAULT_STR_LENGTH = 13
+
+# TODO(http://b/286584981): Replace with BandType.default_channel()
+AP_DEFAULT_CHANNEL_2G = 6
+AP_DEFAULT_CHANNEL_5G = 36
+
+AP_DEFAULT_MAX_SSIDS_2G = 8
+AP_DEFAULT_MAX_SSIDS_5G = 8
+AP_SSID_LENGTH_2G = 8
+AP_SSID_MIN_LENGTH_2G = 1
+AP_SSID_MAX_LENGTH_2G = 32
+AP_PASSPHRASE_LENGTH_2G = 10
+AP_SSID_LENGTH_5G = 8
+AP_SSID_MIN_LENGTH_5G = 1
+AP_SSID_MAX_LENGTH_5G = 32
+AP_PASSPHRASE_LENGTH_5G = 10
+INTERFACE_2G_LIST = [WLAN0_STRING, WLAN0_GALE]
+INTERFACE_5G_LIST = [WLAN1_STRING, WLAN1_GALE]
+HIGH_BEACON_INTERVAL = 300
+LOW_BEACON_INTERVAL = 100
+HIGH_DTIM = 3
+LOW_DTIM = 1
+
+# A mapping of frequency to channel number.  This includes some
+# frequencies used outside the US.
+CHANNEL_MAP = {
+    2412: 1,
+    2417: 2,
+    2422: 3,
+    2427: 4,
+    2432: 5,
+    2437: 6,
+    2442: 7,
+    2447: 8,
+    2452: 9,
+    2457: 10,
+    2462: 11,
+    # 12, 13 are only legitimate outside the US.
+    2467: 12,
+    2472: 13,
+    # 14 is for Japan, DSSS and CCK only.
+    2484: 14,
+    # 34 valid in Japan.
+    5170: 34,
+    # 36-116 valid in the US, except 38, 42, and 46, which have
+    # mixed international support.
+    5180: 36,
+    5190: 38,
+    5200: 40,
+    5210: 42,
+    5220: 44,
+    5230: 46,
+    5240: 48,
+    # DFS channels.
+    5260: 52,
+    5280: 56,
+    5300: 60,
+    5320: 64,
+    5500: 100,
+    5520: 104,
+    5540: 108,
+    5560: 112,
+    5580: 116,
+    # 120, 124, 128 valid in Europe/Japan.
+    5600: 120,
+    5620: 124,
+    5640: 128,
+    # 132+ valid in US.
+    5660: 132,
+    5680: 136,
+    5700: 140,
+    # 144 is supported by a subset of WiFi chips
+    # (e.g. bcm4354, but not ath9k).
+    5720: 144,
+    # End DFS channels.
+    5745: 149,
+    5755: 151,
+    5765: 153,
+    5775: 155,
+    5795: 159,
+    5785: 157,
+    5805: 161,
+    5825: 165,
+}
+FREQUENCY_MAP = {v: k for k, v in CHANNEL_MAP.items()}
+
+US_CHANNELS_2G = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
+US_CHANNELS_5G = [
+    36,
+    40,
+    44,
+    48,
+    52,
+    56,
+    60,
+    64,
+    100,
+    104,
+    108,
+    112,
+    116,
+    120,
+    124,
+    128,
+    132,
+    136,
+    140,
+    144,
+    149,
+    153,
+    157,
+    161,
+    165,
+]
+
+LOWEST_5G_CHANNEL = 36
+
+MODE_11A = "a"
+MODE_11B = "b"
+MODE_11G = "g"
+MODE_11N_MIXED = "n-mixed"
+MODE_11N_PURE = "n-only"
+MODE_11AC_MIXED = "ac-mixed"
+MODE_11AC_PURE = "ac-only"
+
+N_CAPABILITY_LDPC = object()
+N_CAPABILITY_HT20 = object()
+N_CAPABILITY_HT40_PLUS = object()
+N_CAPABILITY_HT40_MINUS = object()
+N_CAPABILITY_GREENFIELD = object()
+N_CAPABILITY_SGI20 = object()
+N_CAPABILITY_SGI40 = object()
+N_CAPABILITY_TX_STBC = object()
+N_CAPABILITY_RX_STBC1 = object()
+N_CAPABILITY_RX_STBC12 = object()
+N_CAPABILITY_RX_STBC123 = object()
+N_CAPABILITY_DSSS_CCK_40 = object()
+N_CAPABILITY_LSIG_TXOP_PROT = object()
+N_CAPABILITY_40_INTOLERANT = object()
+N_CAPABILITY_MAX_AMSDU_7935 = object()
+N_CAPABILITY_DELAY_BLOCK_ACK = object()
+N_CAPABILITY_SMPS_STATIC = object()
+N_CAPABILITY_SMPS_DYNAMIC = object()
+N_CAPABILITIES_MAPPING = {
+    N_CAPABILITY_LDPC: "[LDPC]",
+    N_CAPABILITY_HT20: "[HT20]",
+    N_CAPABILITY_HT40_PLUS: "[HT40+]",
+    N_CAPABILITY_HT40_MINUS: "[HT40-]",
+    N_CAPABILITY_GREENFIELD: "[GF]",
+    N_CAPABILITY_SGI20: "[SHORT-GI-20]",
+    N_CAPABILITY_SGI40: "[SHORT-GI-40]",
+    N_CAPABILITY_TX_STBC: "[TX-STBC]",
+    N_CAPABILITY_RX_STBC1: "[RX-STBC1]",
+    N_CAPABILITY_RX_STBC12: "[RX-STBC12]",
+    N_CAPABILITY_RX_STBC123: "[RX-STBC123]",
+    N_CAPABILITY_DSSS_CCK_40: "[DSSS_CCK-40]",
+    N_CAPABILITY_LSIG_TXOP_PROT: "[LSIG-TXOP-PROT]",
+    N_CAPABILITY_40_INTOLERANT: "[40-INTOLERANT]",
+    N_CAPABILITY_MAX_AMSDU_7935: "[MAX-AMSDU-7935]",
+    N_CAPABILITY_DELAY_BLOCK_ACK: "[DELAYED-BA]",
+    N_CAPABILITY_SMPS_STATIC: "[SMPS-STATIC]",
+    N_CAPABILITY_SMPS_DYNAMIC: "[SMPS-DYNAMIC]",
+}
+N_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in N_CAPABILITIES_MAPPING.items()}
+N_CAPABILITY_HT40_MINUS_CHANNELS = object()
+N_CAPABILITY_HT40_PLUS_CHANNELS = object()
+AC_CAPABILITY_VHT160 = object()
+AC_CAPABILITY_VHT160_80PLUS80 = object()
+AC_CAPABILITY_RXLDPC = object()
+AC_CAPABILITY_SHORT_GI_80 = object()
+AC_CAPABILITY_SHORT_GI_160 = object()
+AC_CAPABILITY_TX_STBC_2BY1 = object()
+AC_CAPABILITY_RX_STBC_1 = object()
+AC_CAPABILITY_RX_STBC_12 = object()
+AC_CAPABILITY_RX_STBC_123 = object()
+AC_CAPABILITY_RX_STBC_1234 = object()
+AC_CAPABILITY_SU_BEAMFORMER = object()
+AC_CAPABILITY_SU_BEAMFORMEE = object()
+AC_CAPABILITY_BF_ANTENNA_2 = object()
+AC_CAPABILITY_BF_ANTENNA_3 = object()
+AC_CAPABILITY_BF_ANTENNA_4 = object()
+AC_CAPABILITY_SOUNDING_DIMENSION_2 = object()
+AC_CAPABILITY_SOUNDING_DIMENSION_3 = object()
+AC_CAPABILITY_SOUNDING_DIMENSION_4 = object()
+AC_CAPABILITY_MU_BEAMFORMER = object()
+AC_CAPABILITY_MU_BEAMFORMEE = object()
+AC_CAPABILITY_VHT_TXOP_PS = object()
+AC_CAPABILITY_HTC_VHT = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6 = object()
+AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7 = object()
+AC_CAPABILITY_VHT_LINK_ADAPT2 = object()
+AC_CAPABILITY_VHT_LINK_ADAPT3 = object()
+AC_CAPABILITY_RX_ANTENNA_PATTERN = object()
+AC_CAPABILITY_TX_ANTENNA_PATTERN = object()
+AC_CAPABILITY_MAX_MPDU_7991 = object()
+AC_CAPABILITY_MAX_MPDU_11454 = object()
+AC_CAPABILITIES_MAPPING = {
+    AC_CAPABILITY_VHT160: "[VHT160]",
+    AC_CAPABILITY_VHT160_80PLUS80: "[VHT160-80PLUS80]",
+    AC_CAPABILITY_RXLDPC: "[RXLDPC]",
+    AC_CAPABILITY_SHORT_GI_80: "[SHORT-GI-80]",
+    AC_CAPABILITY_SHORT_GI_160: "[SHORT-GI-160]",
+    AC_CAPABILITY_TX_STBC_2BY1: "[TX-STBC-2BY1]",
+    AC_CAPABILITY_RX_STBC_1: "[RX-STBC-1]",
+    AC_CAPABILITY_RX_STBC_12: "[RX-STBC-12]",
+    AC_CAPABILITY_RX_STBC_123: "[RX-STBC-123]",
+    AC_CAPABILITY_RX_STBC_1234: "[RX-STBC-1234]",
+    AC_CAPABILITY_SU_BEAMFORMER: "[SU-BEAMFORMER]",
+    AC_CAPABILITY_SU_BEAMFORMEE: "[SU-BEAMFORMEE]",
+    AC_CAPABILITY_BF_ANTENNA_2: "[BF-ANTENNA-2]",
+    AC_CAPABILITY_BF_ANTENNA_3: "[BF-ANTENNA-3]",
+    AC_CAPABILITY_BF_ANTENNA_4: "[BF-ANTENNA-4]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_2: "[SOUNDING-DIMENSION-2]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_3: "[SOUNDING-DIMENSION-3]",
+    AC_CAPABILITY_SOUNDING_DIMENSION_4: "[SOUNDING-DIMENSION-4]",
+    AC_CAPABILITY_MU_BEAMFORMER: "[MU-BEAMFORMER]",
+    AC_CAPABILITY_MU_BEAMFORMEE: "[MU-BEAMFORMEE]",
+    AC_CAPABILITY_VHT_TXOP_PS: "[VHT-TXOP-PS]",
+    AC_CAPABILITY_HTC_VHT: "[HTC-VHT]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: "[MAX-A-MPDU-LEN-EXP0]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: "[MAX-A-MPDU-LEN-EXP1]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: "[MAX-A-MPDU-LEN-EXP2]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: "[MAX-A-MPDU-LEN-EXP3]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: "[MAX-A-MPDU-LEN-EXP4]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: "[MAX-A-MPDU-LEN-EXP5]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: "[MAX-A-MPDU-LEN-EXP6]",
+    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: "[MAX-A-MPDU-LEN-EXP7]",
+    AC_CAPABILITY_VHT_LINK_ADAPT2: "[VHT-LINK-ADAPT2]",
+    AC_CAPABILITY_VHT_LINK_ADAPT3: "[VHT-LINK-ADAPT3]",
+    AC_CAPABILITY_RX_ANTENNA_PATTERN: "[RX-ANTENNA-PATTERN]",
+    AC_CAPABILITY_TX_ANTENNA_PATTERN: "[TX-ANTENNA-PATTERN]",
+    AC_CAPABILITY_MAX_MPDU_11454: "[MAX-MPDU-11454]",
+    AC_CAPABILITY_MAX_MPDU_7991: "[MAX-MPDU-7991]",
+}
+AC_CAPABILITIES_MAPPING_INVERSE = {v: k for k, v in AC_CAPABILITIES_MAPPING.items()}
+VHT_CHANNEL_WIDTH_40 = 0
+VHT_CHANNEL_WIDTH_80 = 1
+VHT_CHANNEL_WIDTH_160 = 2
+VHT_CHANNEL_WIDTH_80_80 = 3
+
+VHT_CHANNEL = {
+    40: VHT_CHANNEL_WIDTH_40,
+    80: VHT_CHANNEL_WIDTH_80,
+    160: VHT_CHANNEL_WIDTH_160,
+}
+
+# This is a loose merging of the rules for US and EU regulatory
+# domains as taken from IEEE Std 802.11-2012 Appendix E.  For instance,
+# we tolerate HT40 in channels 149-161 (not allowed in EU), but also
+# tolerate HT40+ on channel 7 (not allowed in the US).  We take the loose
+# definition so that we don't prohibit testing in either domain.
+HT40_ALLOW_MAP = {
+    N_CAPABILITY_HT40_MINUS_CHANNELS: tuple(
+        itertools.chain(range(6, 14), range(40, 65, 8), range(104, 145, 8), [153, 161])
+    ),
+    N_CAPABILITY_HT40_PLUS_CHANNELS: tuple(
+        itertools.chain(range(1, 8), range(36, 61, 8), range(100, 141, 8), [149, 157])
+    ),
+}
+
+PMF_SUPPORT_DISABLED = 0
+PMF_SUPPORT_ENABLED = 1
+PMF_SUPPORT_REQUIRED = 2
+PMF_SUPPORT_VALUES = (PMF_SUPPORT_DISABLED, PMF_SUPPORT_ENABLED, PMF_SUPPORT_REQUIRED)
+
+DRIVER_NAME = "nl80211"
+
+
+class VHTChannelWidth(TypedDict):
+    delta: int
+    channels: list[tuple[int, int]]
+
+
+CENTER_CHANNEL_MAP = {
+    VHT_CHANNEL_WIDTH_40: VHTChannelWidth(
+        delta=2,
+        channels=[
+            (36, 40),
+            (44, 48),
+            (52, 56),
+            (60, 64),
+            (100, 104),
+            (108, 112),
+            (116, 120),
+            (124, 128),
+            (132, 136),
+            (140, 144),
+            (149, 153),
+            (157, 161),
+        ],
+    ),
+    VHT_CHANNEL_WIDTH_80: VHTChannelWidth(
+        delta=6,
+        channels=[
+            (36, 48),
+            (52, 64),
+            (100, 112),
+            (116, 128),
+            (132, 144),
+            (149, 161),
+        ],
+    ),
+    VHT_CHANNEL_WIDTH_160: VHTChannelWidth(
+        delta=14,
+        channels=[(36, 64), (100, 128)],
+    ),
+}
+
+OFDM_DATA_RATES = {"supported_rates": "60 90 120 180 240 360 480 540"}
+
+CCK_DATA_RATES = {"supported_rates": "10 20 55 110"}
+
+CCK_AND_OFDM_DATA_RATES = {
+    "supported_rates": "10 20 55 110 60 90 120 180 240 360 480 540"
+}
+
+OFDM_ONLY_BASIC_RATES = {"basic_rates": "60 120 240"}
+
+CCK_AND_OFDM_BASIC_RATES = {"basic_rates": "10 20 55 110"}
+
+WEP_AUTH = {
+    "open": {"auth_algs": 1},
+    "shared": {"auth_algs": 2},
+    "open_and_shared": {"auth_algs": 3},
+}
+
+WMM_11B_DEFAULT_PARAMS = {
+    "wmm_ac_bk_cwmin": 5,
+    "wmm_ac_bk_cwmax": 10,
+    "wmm_ac_bk_aifs": 7,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 3,
+    "wmm_ac_be_cwmin": 5,
+    "wmm_ac_be_cwmax": 7,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 4,
+    "wmm_ac_vi_cwmax": 5,
+    "wmm_ac_vi_txop_limit": 188,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 3,
+    "wmm_ac_vo_cwmax": 4,
+    "wmm_ac_vo_txop_limit": 102,
+}
+
+WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS = {
+    "wmm_ac_bk_cwmin": 4,
+    "wmm_ac_bk_cwmax": 10,
+    "wmm_ac_bk_aifs": 7,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 3,
+    "wmm_ac_be_cwmin": 4,
+    "wmm_ac_be_cwmax": 10,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 3,
+    "wmm_ac_vi_cwmax": 4,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 2,
+    "wmm_ac_vo_cwmax": 3,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_NON_DEFAULT_PARAMS = {
+    "wmm_ac_bk_cwmin": 5,
+    "wmm_ac_bk_cwmax": 9,
+    "wmm_ac_bk_aifs": 3,
+    "wmm_ac_bk_txop_limit": 94,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 2,
+    "wmm_ac_be_cwmax": 8,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 1,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 10,
+    "wmm_ac_vi_txop_limit": 47,
+    "wmm_ac_vo_aifs": 1,
+    "wmm_ac_vo_cwmin": 6,
+    "wmm_ac_vo_cwmax": 10,
+    "wmm_ac_vo_txop_limit": 94,
+}
+
+WMM_DEGRADED_VO_PARAMS = {
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 2,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_DEGRADED_VI_PARAMS = {
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 2,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_IMPROVE_BE_PARAMS = {
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 10,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 2,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_IMPROVE_BK_PARAMS = {
+    "wmm_ac_bk_cwmin": 7,
+    "wmm_ac_bk_cwmax": 15,
+    "wmm_ac_bk_aifs": 2,
+    "wmm_ac_bk_txop_limit": 0,
+    "wmm_ac_be_aifs": 10,
+    "wmm_ac_be_cwmin": 7,
+    "wmm_ac_be_cwmax": 15,
+    "wmm_ac_be_txop_limit": 0,
+    "wmm_ac_vi_aifs": 10,
+    "wmm_ac_vi_cwmin": 7,
+    "wmm_ac_vi_cwmax": 15,
+    "wmm_ac_vi_txop_limit": 94,
+    "wmm_ac_vo_aifs": 10,
+    "wmm_ac_vo_cwmin": 7,
+    "wmm_ac_vo_cwmax": 15,
+    "wmm_ac_vo_txop_limit": 47,
+}
+
+WMM_ACM_BK = {"wmm_ac_bk_acm": 1}
+WMM_ACM_BE = {"wmm_ac_be_acm": 1}
+WMM_ACM_VI = {"wmm_ac_vi_acm": 1}
+WMM_ACM_VO = {"wmm_ac_vo_acm": 1}
+
+UAPSD_ENABLED = {"uapsd_advertisement_enabled": 1}
+
+UTF_8_SSID = {"utf8_ssid": 1}
+
+ENABLE_RRM_BEACON_REPORT = {"rrm_beacon_report": 1}
+ENABLE_RRM_NEIGHBOR_REPORT = {"rrm_neighbor_report": 1}
+
+# Wireless Network Management (AKA 802.11v) features.
+ENABLE_WNM_TIME_ADVERTISEMENT = {"time_advertisement": 2, "time_zone": "EST5"}
+ENABLE_WNM_SLEEP_MODE = {"wnm_sleep_mode": 1}
+ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {"bss_transition": 1}
+ENABLE_WNM_PROXY_ARP = {"proxy_arp": 1}
+ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {"na_mcast_to_ucast": 1}
+
+VENDOR_IE = {
+    "correct_length_beacon": {"vendor_elements": "dd0411223301"},
+    "too_short_length_beacon": {"vendor_elements": "dd0311223301"},
+    "too_long_length_beacon": {"vendor_elements": "dd0511223301"},
+    "zero_length_beacon_with_data": {"vendor_elements": "dd0011223301"},
+    "zero_length_beacon_without_data": {"vendor_elements": "dd00"},
+    "simliar_to_wpa": {"vendor_elements": "dd040050f203"},
+    "correct_length_association_response": {"assocresp_elements": "dd0411223301"},
+    "too_short_length_association_response": {"assocresp_elements": "dd0311223301"},
+    "too_long_length_association_response": {"assocresp_elements": "dd0511223301"},
+    "zero_length_association_response_with_data": {
+        "assocresp_elements": "dd0011223301"
+    },
+    "zero_length_association_response_without_data": {"assocresp_elements": "dd00"},
+}
+
+ENABLE_IEEE80211D = {"ieee80211d": 1}
+
+COUNTRY_STRING = {
+    "ALL": {"country3": "0x20"},
+    "OUTDOOR": {"country3": "0x4f"},
+    "INDOOR": {"country3": "0x49"},
+    "NONCOUNTRY": {"country3": "0x58"},
+    "GLOBAL": {"country3": "0x04"},
+}
+
+COUNTRY_CODE = {
+    "AFGHANISTAN": {"country_code": "AF"},
+    "ALAND_ISLANDS": {"country_code": "AX"},
+    "ALBANIA": {"country_code": "AL"},
+    "ALGERIA": {"country_code": "DZ"},
+    "AMERICAN_SAMOA": {"country_code": "AS"},
+    "ANDORRA": {"country_code": "AD"},
+    "ANGOLA": {"country_code": "AO"},
+    "ANGUILLA": {"country_code": "AI"},
+    "ANTARCTICA": {"country_code": "AQ"},
+    "ANTIGUA_AND_BARBUDA": {"country_code": "AG"},
+    "ARGENTINA": {"country_code": "AR"},
+    "ARMENIA": {"country_code": "AM"},
+    "ARUBA": {"country_code": "AW"},
+    "AUSTRALIA": {"country_code": "AU"},
+    "AUSTRIA": {"country_code": "AT"},
+    "AZERBAIJAN": {"country_code": "AZ"},
+    "BAHAMAS": {"country_code": "BS"},
+    "BAHRAIN": {"country_code": "BH"},
+    "BANGLADESH": {"country_code": "BD"},
+    "BARBADOS": {"country_code": "BB"},
+    "BELARUS": {"country_code": "BY"},
+    "BELGIUM": {"country_code": "BE"},
+    "BELIZE": {"country_code": "BZ"},
+    "BENIN": {"country_code": "BJ"},
+    "BERMUDA": {"country_code": "BM"},
+    "BHUTAN": {"country_code": "BT"},
+    "BOLIVIA": {"country_code": "BO"},
+    "BONAIRE": {"country_code": "BQ"},
+    "BOSNIA_AND_HERZEGOVINA": {"country_code": "BA"},
+    "BOTSWANA": {"country_code": "BW"},
+    "BOUVET_ISLAND": {"country_code": "BV"},
+    "BRAZIL": {"country_code": "BR"},
+    "BRITISH_INDIAN_OCEAN_TERRITORY": {"country_code": "IO"},
+    "BRUNEI_DARUSSALAM": {"country_code": "BN"},
+    "BULGARIA": {"country_code": "BG"},
+    "BURKINA_FASO": {"country_code": "BF"},
+    "BURUNDI": {"country_code": "BI"},
+    "CAMBODIA": {"country_code": "KH"},
+    "CAMEROON": {"country_code": "CM"},
+    "CANADA": {"country_code": "CA"},
+    "CAPE_VERDE": {"country_code": "CV"},
+    "CAYMAN_ISLANDS": {"country_code": "KY"},
+    "CENTRAL_AFRICAN_REPUBLIC": {"country_code": "CF"},
+    "CHAD": {"country_code": "TD"},
+    "CHILE": {"country_code": "CL"},
+    "CHINA": {"country_code": "CN"},
+    "CHRISTMAS_ISLAND": {"country_code": "CX"},
+    "COCOS_ISLANDS": {"country_code": "CC"},
+    "COLOMBIA": {"country_code": "CO"},
+    "COMOROS": {"country_code": "KM"},
+    "CONGO": {"country_code": "CG"},
+    "DEMOCRATIC_REPUBLIC_CONGO": {"country_code": "CD"},
+    "COOK_ISLANDS": {"country_code": "CK"},
+    "COSTA_RICA": {"country_code": "CR"},
+    "COTE_D_IVOIRE": {"country_code": "CI"},
+    "CROATIA": {"country_code": "HR"},
+    "CUBA": {"country_code": "CU"},
+    "CURACAO": {"country_code": "CW"},
+    "CYPRUS": {"country_code": "CY"},
+    "CZECH_REPUBLIC": {"country_code": "CZ"},
+    "DENMARK": {"country_code": "DK"},
+    "DJIBOUTI": {"country_code": "DJ"},
+    "DOMINICA": {"country_code": "DM"},
+    "DOMINICAN_REPUBLIC": {"country_code": "DO"},
+    "ECUADOR": {"country_code": "EC"},
+    "EGYPT": {"country_code": "EG"},
+    "EL_SALVADOR": {"country_code": "SV"},
+    "EQUATORIAL_GUINEA": {"country_code": "GQ"},
+    "ERITREA": {"country_code": "ER"},
+    "ESTONIA": {"country_code": "EE"},
+    "ETHIOPIA": {"country_code": "ET"},
+    "FALKLAND_ISLANDS_(MALVINAS)": {"country_code": "FK"},
+    "FAROE_ISLANDS": {"country_code": "FO"},
+    "FIJI": {"country_code": "FJ"},
+    "FINLAND": {"country_code": "FI"},
+    "FRANCE": {"country_code": "FR"},
+    "FRENCH_GUIANA": {"country_code": "GF"},
+    "FRENCH_POLYNESIA": {"country_code": "PF"},
+    "FRENCH_SOUTHERN_TERRITORIES": {"country_code": "TF"},
+    "GABON": {"country_code": "GA"},
+    "GAMBIA": {"country_code": "GM"},
+    "GEORGIA": {"country_code": "GE"},
+    "GERMANY": {"country_code": "DE"},
+    "GHANA": {"country_code": "GH"},
+    "GIBRALTAR": {"country_code": "GI"},
+    "GREECE": {"country_code": "GR"},
+    "GREENLAND": {"country_code": "GL"},
+    "GRENADA": {"country_code": "GD"},
+    "GUADELOUPE": {"country_code": "GP"},
+    "GUAM": {"country_code": "GU"},
+    "GUATEMALA": {"country_code": "GT"},
+    "GUERNSEY": {"country_code": "GG"},
+    "GUINEA": {"country_code": "GN"},
+    "GUINEA-BISSAU": {"country_code": "GW"},
+    "GUYANA": {"country_code": "GY"},
+    "HAITI": {"country_code": "HT"},
+    "HEARD_ISLAND_AND_MCDONALD_ISLANDS": {"country_code": "HM"},
+    "VATICAN_CITY_STATE": {"country_code": "VA"},
+    "HONDURAS": {"country_code": "HN"},
+    "HONG_KONG": {"country_code": "HK"},
+    "HUNGARY": {"country_code": "HU"},
+    "ICELAND": {"country_code": "IS"},
+    "INDIA": {"country_code": "IN"},
+    "INDONESIA": {"country_code": "ID"},
+    "IRAN": {"country_code": "IR"},
+    "IRAQ": {"country_code": "IQ"},
+    "IRELAND": {"country_code": "IE"},
+    "ISLE_OF_MAN": {"country_code": "IM"},
+    "ISRAEL": {"country_code": "IL"},
+    "ITALY": {"country_code": "IT"},
+    "JAMAICA": {"country_code": "JM"},
+    "JAPAN": {"country_code": "JP"},
+    "JERSEY": {"country_code": "JE"},
+    "JORDAN": {"country_code": "JO"},
+    "KAZAKHSTAN": {"country_code": "KZ"},
+    "KENYA": {"country_code": "KE"},
+    "KIRIBATI": {"country_code": "KI"},
+    "DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA": {"country_code": "KP"},
+    "REPUBLIC_OF_KOREA": {"country_code": "KR"},
+    "KUWAIT": {"country_code": "KW"},
+    "KYRGYZSTAN": {"country_code": "KG"},
+    "LAO": {"country_code": "LA"},
+    "LATVIA": {"country_code": "LV"},
+    "LEBANON": {"country_code": "LB"},
+    "LESOTHO": {"country_code": "LS"},
+    "LIBERIA": {"country_code": "LR"},
+    "LIBYA": {"country_code": "LY"},
+    "LIECHTENSTEIN": {"country_code": "LI"},
+    "LITHUANIA": {"country_code": "LT"},
+    "LUXEMBOURG": {"country_code": "LU"},
+    "MACAO": {"country_code": "MO"},
+    "MACEDONIA": {"country_code": "MK"},
+    "MADAGASCAR": {"country_code": "MG"},
+    "MALAWI": {"country_code": "MW"},
+    "MALAYSIA": {"country_code": "MY"},
+    "MALDIVES": {"country_code": "MV"},
+    "MALI": {"country_code": "ML"},
+    "MALTA": {"country_code": "MT"},
+    "MARSHALL_ISLANDS": {"country_code": "MH"},
+    "MARTINIQUE": {"country_code": "MQ"},
+    "MAURITANIA": {"country_code": "MR"},
+    "MAURITIUS": {"country_code": "MU"},
+    "MAYOTTE": {"country_code": "YT"},
+    "MEXICO": {"country_code": "MX"},
+    "MICRONESIA": {"country_code": "FM"},
+    "MOLDOVA": {"country_code": "MD"},
+    "MONACO": {"country_code": "MC"},
+    "MONGOLIA": {"country_code": "MN"},
+    "MONTENEGRO": {"country_code": "ME"},
+    "MONTSERRAT": {"country_code": "MS"},
+    "MOROCCO": {"country_code": "MA"},
+    "MOZAMBIQUE": {"country_code": "MZ"},
+    "MYANMAR": {"country_code": "MM"},
+    "NAMIBIA": {"country_code": "NA"},
+    "NAURU": {"country_code": "NR"},
+    "NEPAL": {"country_code": "NP"},
+    "NETHERLANDS": {"country_code": "NL"},
+    "NEW_CALEDONIA": {"country_code": "NC"},
+    "NEW_ZEALAND": {"country_code": "NZ"},
+    "NICARAGUA": {"country_code": "NI"},
+    "NIGER": {"country_code": "NE"},
+    "NIGERIA": {"country_code": "NG"},
+    "NIUE": {"country_code": "NU"},
+    "NORFOLK_ISLAND": {"country_code": "NF"},
+    "NORTHERN_MARIANA_ISLANDS": {"country_code": "MP"},
+    "NORWAY": {"country_code": "NO"},
+    "OMAN": {"country_code": "OM"},
+    "PAKISTAN": {"country_code": "PK"},
+    "PALAU": {"country_code": "PW"},
+    "PALESTINE": {"country_code": "PS"},
+    "PANAMA": {"country_code": "PA"},
+    "PAPUA_NEW_GUINEA": {"country_code": "PG"},
+    "PARAGUAY": {"country_code": "PY"},
+    "PERU": {"country_code": "PE"},
+    "PHILIPPINES": {"country_code": "PH"},
+    "PITCAIRN": {"country_code": "PN"},
+    "POLAND": {"country_code": "PL"},
+    "PORTUGAL": {"country_code": "PT"},
+    "PUERTO_RICO": {"country_code": "PR"},
+    "QATAR": {"country_code": "QA"},
+    "RÉUNION": {"country_code": "RE"},
+    "ROMANIA": {"country_code": "RO"},
+    "RUSSIAN_FEDERATION": {"country_code": "RU"},
+    "RWANDA": {"country_code": "RW"},
+    "SAINT_BARTHELEMY": {"country_code": "BL"},
+    "SAINT_KITTS_AND_NEVIS": {"country_code": "KN"},
+    "SAINT_LUCIA": {"country_code": "LC"},
+    "SAINT_MARTIN": {"country_code": "MF"},
+    "SAINT_PIERRE_AND_MIQUELON": {"country_code": "PM"},
+    "SAINT_VINCENT_AND_THE_GRENADINES": {"country_code": "VC"},
+    "SAMOA": {"country_code": "WS"},
+    "SAN_MARINO": {"country_code": "SM"},
+    "SAO_TOME_AND_PRINCIPE": {"country_code": "ST"},
+    "SAUDI_ARABIA": {"country_code": "SA"},
+    "SENEGAL": {"country_code": "SN"},
+    "SERBIA": {"country_code": "RS"},
+    "SEYCHELLES": {"country_code": "SC"},
+    "SIERRA_LEONE": {"country_code": "SL"},
+    "SINGAPORE": {"country_code": "SG"},
+    "SINT_MAARTEN": {"country_code": "SX"},
+    "SLOVAKIA": {"country_code": "SK"},
+    "SLOVENIA": {"country_code": "SI"},
+    "SOLOMON_ISLANDS": {"country_code": "SB"},
+    "SOMALIA": {"country_code": "SO"},
+    "SOUTH_AFRICA": {"country_code": "ZA"},
+    "SOUTH_GEORGIA": {"country_code": "GS"},
+    "SOUTH_SUDAN": {"country_code": "SS"},
+    "SPAIN": {"country_code": "ES"},
+    "SRI_LANKA": {"country_code": "LK"},
+    "SUDAN": {"country_code": "SD"},
+    "SURINAME": {"country_code": "SR"},
+    "SVALBARD_AND_JAN_MAYEN": {"country_code": "SJ"},
+    "SWAZILAND": {"country_code": "SZ"},
+    "SWEDEN": {"country_code": "SE"},
+    "SWITZERLAND": {"country_code": "CH"},
+    "SYRIAN_ARAB_REPUBLIC": {"country_code": "SY"},
+    "TAIWAN": {"country_code": "TW"},
+    "TAJIKISTAN": {"country_code": "TJ"},
+    "TANZANIA": {"country_code": "TZ"},
+    "THAILAND": {"country_code": "TH"},
+    "TIMOR-LESTE": {"country_code": "TL"},
+    "TOGO": {"country_code": "TG"},
+    "TOKELAU": {"country_code": "TK"},
+    "TONGA": {"country_code": "TO"},
+    "TRINIDAD_AND_TOBAGO": {"country_code": "TT"},
+    "TUNISIA": {"country_code": "TN"},
+    "TURKEY": {"country_code": "TR"},
+    "TURKMENISTAN": {"country_code": "TM"},
+    "TURKS_AND_CAICOS_ISLANDS": {"country_code": "TC"},
+    "TUVALU": {"country_code": "TV"},
+    "UGANDA": {"country_code": "UG"},
+    "UKRAINE": {"country_code": "UA"},
+    "UNITED_ARAB_EMIRATES": {"country_code": "AE"},
+    "UNITED_KINGDOM": {"country_code": "GB"},
+    "UNITED_STATES": {"country_code": "US"},
+    "UNITED_STATES_MINOR_OUTLYING_ISLANDS": {"country_code": "UM"},
+    "URUGUAY": {"country_code": "UY"},
+    "UZBEKISTAN": {"country_code": "UZ"},
+    "VANUATU": {"country_code": "VU"},
+    "VENEZUELA": {"country_code": "VE"},
+    "VIETNAM": {"country_code": "VN"},
+    "VIRGIN_ISLANDS_BRITISH": {"country_code": "VG"},
+    "VIRGIN_ISLANDS_US": {"country_code": "VI"},
+    "WALLIS_AND_FUTUNA": {"country_code": "WF"},
+    "WESTERN_SAHARA": {"country_code": "EH"},
+    "YEMEN": {"country_code": "YE"},
+    "ZAMBIA": {"country_code": "ZM"},
+    "ZIMBABWE": {"country_code": "ZW"},
+    "NON_COUNTRY": {"country_code": "XX"},
+}
+
+ALL_CHANNELS_2G = {
+    1: {20, 40},
+    2: {20, 40},
+    3: {20, 40},
+    4: {20, 40},
+    5: {20, 40},
+    6: {20, 40},
+    7: {20, 40},
+    8: {20, 40},
+    9: {20, 40},
+    10: {20, 40},
+    11: {20, 40},
+    12: {20, 40},
+    13: {20, 40},
+    14: {20},
+}
+
+ALL_CHANNELS_5G = {
+    36: {20, 40, 80},
+    40: {20, 40, 80},
+    44: {20, 40, 80},
+    48: {20, 40, 80},
+    52: {20, 40, 80},
+    56: {20, 40, 80},
+    60: {20, 40, 80},
+    64: {20, 40, 80},
+    100: {20, 40, 80},
+    104: {20, 40, 80},
+    108: {20, 40, 80},
+    112: {20, 40, 80},
+    116: {20, 40, 80},
+    120: {20, 40, 80},
+    124: {20, 40, 80},
+    128: {20, 40, 80},
+    132: {20, 40, 80},
+    136: {20, 40, 80},
+    140: {20, 40, 80},
+    144: {20, 40, 80},
+    149: {20, 40, 80},
+    153: {20, 40, 80},
+    157: {20, 40, 80},
+    161: {20, 40, 80},
+    165: {20},
+}
+
+ALL_CHANNELS = ALL_CHANNELS_2G | ALL_CHANNELS_5G
+
+
+@unique
+class WnmFeature(Enum):
+    """Wireless Network Management (AKA 802.11v) features hostapd supports."""
+
+    TIME_ADVERTISEMENT = auto()
+    WNM_SLEEP_MODE = auto()
+    BSS_TRANSITION_MANAGEMENT = auto()
+    PROXY_ARP = auto()
+    IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = auto()
diff --git a/packages/antlion/controllers/ap_lib/hostapd_security.py b/packages/antlion/controllers/ap_lib/hostapd_security.py
new file mode 100644
index 0000000..918ba26
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_security.py
@@ -0,0 +1,408 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import string
+from enum import Enum, StrEnum, auto, unique
+
+from antlion.controllers.ap_lib import hostapd_constants
+
+
+class SecurityModeInt(int, Enum):
+    """Possible values for hostapd's "wpa" config option.
+
+    The int value is a bit field that can enable WPA and/or WPA2.
+
+    bit0 = enable WPA defined by IEEE 802.11i/D3.0
+    bit1 = enable RNA (WPA2) defined by IEEE 802.11i/RSN
+    bit2 = enable WAPI (rejected/withdrawn)
+    bit3 = enable OSEN (ENT)
+    """
+
+    WEP = 0
+    WPA1 = 1
+    WPA2 = 2
+    WPA3 = 2  # same as wpa2 and wpa2/wpa3; distinguished by wpa_key_mgmt
+    MIXED = 3  # applies to wpa/wpa2 and wpa/wpa2/wpa3; distinguished by wpa_key_mgmt
+    ENT = 8
+
+    def __str__(self):
+        return str(self.value)
+
+
+@unique
+class KeyManagement(StrEnum):
+    SAE = "SAE"
+    WPA_PSK = "WPA-PSK"
+    WPA_PSK_SAE = "WPA-PSK SAE"
+    ENT = "WPA-EAP"
+
+
+# TODO(http://b/286584981): This is currently only being used for OpenWRT.
+# Investigate whether we can replace KeyManagement with OpenWRTEncryptionMode.
+@unique
+class OpenWRTEncryptionMode(StrEnum):
+    """Combination of Wi-Fi encryption mode and ciphers.
+
+    Only used by OpenWRT.
+
+    Besides the encryption mode, the encryption option also specifies the group and peer
+    ciphers to use. To override the cipher, the value of encryption must be given in the
+    form "mode+cipher". This enum contains all possible combinations.
+
+    See https://openwrt.org/docs/guide-user/network/wifi/basic#encryption_modes.
+    """
+
+    NONE = "none"
+    """No authentication, no ciphers"""
+    SAE = "sae"
+    """WPA3 Personal (SAE) using CCMP cipher"""
+    SAE_MIXED = "sae-mixed"
+    """WPA2/WPA3 Personal (PSK/SAE) mixed mode using CCMP cipher"""
+    PSK2_TKIP_CCMP = "psk2+tkip+ccmp"
+    """WPA2 Personal (PSK) using TKIP and CCMP ciphers"""
+    PSK2_TKIP_AES = "psk2+tkip+aes"
+    """WPA2 Personal (PSK) using TKIP and AES ciphers"""
+    PSK2_TKIP = "psk2+tkip"
+    """WPA2 Personal (PSK) using TKIP cipher"""
+    PSK2_CCMP = "psk2+ccmp"
+    """WPA2 Personal (PSK) using CCMP cipher"""
+    PSK2_AES = "psk2+aes"
+    """WPA2 Personal (PSK) using AES cipher"""
+    PSK2 = "psk2"
+    """WPA2 Personal (PSK) using CCMP cipher"""
+    PSK_TKIP_CCMP = "psk+tkip+ccmp"
+    """WPA Personal (PSK) using TKIP and CCMP ciphers"""
+    PSK_TKIP_AES = "psk+tkip+aes"
+    """WPA Personal (PSK) using TKIP and AES ciphers"""
+    PSK_TKIP = "psk+tkip"
+    """WPA Personal (PSK) using TKIP cipher"""
+    PSK_CCMP = "psk+ccmp"
+    """WPA Personal (PSK) using CCMP cipher"""
+    PSK_AES = "psk+aes"
+    """WPA Personal (PSK) using AES cipher"""
+    PSK = "psk"
+    """WPA Personal (PSK) using CCMP cipher"""
+    PSK_MIXED_TKIP_CCMP = "psk-mixed+tkip+ccmp"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP and CCMP ciphers"""
+    PSK_MIXED_TKIP_AES = "psk-mixed+tkip+aes"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP and AES ciphers"""
+    PSK_MIXED_TKIP = "psk-mixed+tkip"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP cipher"""
+    PSK_MIXED_CCMP = "psk-mixed+ccmp"
+    """WPA/WPA2 Personal (PSK) mixed mode using CCMP cipher"""
+    PSK_MIXED_AES = "psk-mixed+aes"
+    """WPA/WPA2 Personal (PSK) mixed mode using AES cipher"""
+    PSK_MIXED = "psk-mixed"
+    """WPA/WPA2 Personal (PSK) mixed mode using CCMP cipher"""
+    WEP = "wep"
+    """defaults to “open system” authentication aka wep+open using RC4 cipher"""
+    WEP_OPEN = "wep+open"
+    """“open system” authentication using RC4 cipher"""
+    WEP_SHARED = "wep+shared"
+    """“shared key” authentication using RC4 cipher"""
+    WPA3 = "wpa3"
+    """WPA3 Enterprise using CCMP cipher"""
+    WPA3_MIXED = "wpa3-mixed"
+    """WPA3/WPA2 Enterprise using CCMP cipher"""
+    WPA2_TKIP_CCMP = "wpa2+tkip+ccmp"
+    """WPA2 Enterprise using TKIP and CCMP ciphers"""
+    WPA2_TKIP_AES = "wpa2+tkip+aes"
+    """WPA2 Enterprise using TKIP and AES ciphers"""
+    WPA2_CCMP = "wpa2+ccmp"
+    """WPA2 Enterprise using CCMP cipher"""
+    WPA2_AES = "wpa2+aes'"
+    """WPA2 Enterprise using AES cipher"""
+    WPA2 = "wpa2"
+    """WPA2 Enterprise using CCMP cipher"""
+    WPA2_TKIP = "wpa2+tkip"
+    """WPA2 Enterprise using TKIP cipher"""
+    WPA_TKIP_CCMP = "wpa+tkip+ccmp"
+    """WPA Enterprise using TKIP and CCMP ciphers"""
+    WPA_TKIP_AES = "wpa+tkip+aes"
+    """WPA Enterprise using TKIP and AES ciphers"""
+    WPA_CCMP = "wpa+ccmp"
+    """WPA Enterprise using CCMP cipher"""
+    WPA_AES = "wpa+aes"
+    """WPA Enterprise using AES cipher"""
+    WPA_TKIP = "wpa+tkip"
+    """WPA Enterprise using TKIP cipher"""
+    WPA = "wpa"
+    """WPA Enterprise using CCMP cipher"""
+    WPA_MIXED_TKIP_CCMP = "wpa-mixed+tkip+ccmp"
+    """WPA/WPA2 Enterprise mixed mode using TKIP and CCMP ciphers"""
+    WPA_MIXED_TKIP_AES = "wpa-mixed+tkip+aes"
+    """WPA/WPA2 Enterprise mixed mode using TKIP and AES ciphers"""
+    WPA_MIXED_TKIP = "wpa-mixed+tkip"
+    """WPA/WPA2 Enterprise mixed mode using TKIP cipher"""
+    WPA_MIXED_CCMP = "wpa-mixed+ccmp"
+    """WPA/WPA2 Enterprise mixed mode using CCMP cipher"""
+    WPA_MIXED_AES = "wpa-mixed+aes"
+    """WPA/WPA2 Enterprise mixed mode using AES cipher"""
+    WPA_MIXED = "wpa-mixed"
+    """WPA/WPA2 Enterprise mixed mode using CCMP cipher"""
+    OWE = "owe"
+    """Opportunistic Wireless Encryption (OWE) using CCMP cipher"""
+
+
+@unique
+class FuchsiaSecurityType(StrEnum):
+    """Fuchsia supported security types.
+
+    Defined by the fuchsia.wlan.policy.SecurityType FIDL.
+
+    https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/types.fidl
+    """
+
+    NONE = "none"
+    WEP = "wep"
+    WPA = "wpa"
+    WPA2 = "wpa2"
+    WPA3 = "wpa3"
+
+
+@unique
+class SecurityMode(StrEnum):
+    OPEN = auto()
+    WEP = auto()
+    WPA = auto()
+    WPA2 = auto()
+    WPA_WPA2 = auto()
+    WPA3 = auto()
+    WPA2_WPA3 = auto()
+    WPA_WPA2_WPA3 = auto()
+    ENT = auto()
+
+    def security_mode_int(self) -> SecurityModeInt:
+        match self:
+            case SecurityMode.OPEN:
+                raise TypeError("Open security doesn't have a SecurityModeInt")
+            case SecurityMode.WEP:
+                return SecurityModeInt.WEP
+            case SecurityMode.WPA:
+                return SecurityModeInt.WPA1
+            case SecurityMode.WPA2:
+                return SecurityModeInt.WPA2
+            case SecurityMode.WPA_WPA2:
+                return SecurityModeInt.MIXED
+            case SecurityMode.WPA3:
+                return SecurityModeInt.WPA3
+            case SecurityMode.WPA2_WPA3:
+                return SecurityModeInt.WPA3
+            case SecurityMode.WPA_WPA2_WPA3:
+                return SecurityModeInt.MIXED
+            case SecurityMode.ENT:
+                return SecurityModeInt.ENT
+
+    def key_management(self) -> KeyManagement | None:
+        match self:
+            case SecurityMode.OPEN:
+                return None
+            case SecurityMode.WEP:
+                return None
+            case SecurityMode.WPA:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA2:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA_WPA2:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA3:
+                return KeyManagement.SAE
+            case SecurityMode.WPA2_WPA3:
+                return KeyManagement.WPA_PSK_SAE
+            case SecurityMode.WPA_WPA2_WPA3:
+                return KeyManagement.WPA_PSK_SAE
+            case SecurityMode.ENT:
+                return KeyManagement.ENT
+
+    def fuchsia_security_type(self) -> FuchsiaSecurityType:
+        match self:
+            case SecurityMode.OPEN:
+                return FuchsiaSecurityType.NONE
+            case SecurityMode.WEP:
+                return FuchsiaSecurityType.WEP
+            case SecurityMode.WPA:
+                return FuchsiaSecurityType.WPA
+            case SecurityMode.WPA2:
+                return FuchsiaSecurityType.WPA2
+            case SecurityMode.WPA_WPA2:
+                return FuchsiaSecurityType.WPA2
+            case SecurityMode.WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.WPA2_WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.WPA_WPA2_WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.ENT:
+                raise NotImplementedError(
+                    f'Fuchsia has not implemented support for security mode "{self}"'
+                )
+
+    def is_wpa3(self) -> bool:
+        match self:
+            case SecurityMode.OPEN:
+                return False
+            case SecurityMode.WEP:
+                return False
+            case SecurityMode.WPA:
+                return False
+            case SecurityMode.WPA2:
+                return False
+            case SecurityMode.WPA_WPA2:
+                return False
+            case SecurityMode.WPA3:
+                return True
+            case SecurityMode.WPA2_WPA3:
+                return True
+            case SecurityMode.WPA_WPA2_WPA3:
+                return True
+            case SecurityMode.ENT:
+                return False
+        raise TypeError("Unknown security mode")
+
+
+class Security(object):
+    """The Security class for hostapd representing some of the security
+    settings that are allowed in hostapd.  If needed more can be added.
+    """
+
+    def __init__(
+        self,
+        security_mode: SecurityMode = SecurityMode.OPEN,
+        password: str | None = None,
+        wpa_cipher: str | None = hostapd_constants.WPA_DEFAULT_CIPHER,
+        wpa2_cipher: str | None = hostapd_constants.WPA2_DEFAULT_CIPER,
+        wpa_group_rekey: int = hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
+        wpa_strict_rekey: bool = hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
+        wep_default_key: int = hostapd_constants.WEP_DEFAULT_KEY,
+        radius_server_ip: str | None = None,
+        radius_server_port: int | None = None,
+        radius_server_secret: str | None = None,
+    ) -> None:
+        """Gather all of the security settings for WPA-PSK.  This could be
+           expanded later.
+
+        Args:
+            security_mode: Type of security mode.
+            password: The PSK or passphrase for the security mode.
+            wpa_cipher: The cipher to be used for wpa.
+                        Options: TKIP, CCMP, TKIP CCMP
+                        Default: TKIP
+            wpa2_cipher: The cipher to be used for wpa2.
+                         Options: TKIP, CCMP, TKIP CCMP
+                         Default: CCMP
+            wpa_group_rekey: How often to refresh the GTK regardless of network
+                             changes.
+                             Options: An integer in seconds, None
+                             Default: 600 seconds
+            wpa_strict_rekey: Whether to do a group key update when client
+                              leaves the network or not.
+                              Options: True, False
+                              Default: True
+            wep_default_key: The wep key number to use when transmitting.
+            radius_server_ip: Radius server IP for Enterprise auth.
+            radius_server_port: Radius server port for Enterprise auth.
+            radius_server_secret: Radius server secret for Enterprise auth.
+        """
+        self.security_mode = security_mode
+        self.wpa_cipher = wpa_cipher
+        self.wpa2_cipher = wpa2_cipher
+        self.wpa_group_rekey = wpa_group_rekey
+        self.wpa_strict_rekey = wpa_strict_rekey
+        self.wep_default_key = wep_default_key
+        self.radius_server_ip = radius_server_ip
+        self.radius_server_port = radius_server_port
+        self.radius_server_secret = radius_server_secret
+        if password:
+            if self.security_mode is SecurityMode.WEP:
+                if len(password) in hostapd_constants.WEP_STR_LENGTH:
+                    self.password = f'"{password}"'
+                elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
+                    c in string.hexdigits for c in password
+                ):
+                    self.password = password
+                else:
+                    raise ValueError(
+                        "WEP key must be a hex string of %s characters"
+                        % hostapd_constants.WEP_HEX_LENGTH
+                    )
+            else:
+                if (
+                    len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH
+                    or len(password) > hostapd_constants.MAX_WPA_PSK_LENGTH
+                ):
+                    raise ValueError(
+                        "Password must be a minumum of %s characters and a maximum of %s"
+                        % (
+                            hostapd_constants.MIN_WPA_PSK_LENGTH,
+                            hostapd_constants.MAX_WPA_PSK_LENGTH,
+                        )
+                    )
+                else:
+                    self.password = password
+
+    def __str__(self) -> str:
+        return self.security_mode
+
+    def generate_dict(self) -> dict[str, str | int]:
+        """Returns: an ordered dictionary of settings"""
+        if self.security_mode is SecurityMode.OPEN:
+            return {}
+
+        settings: dict[str, str | int] = collections.OrderedDict()
+
+        if self.security_mode is SecurityMode.WEP:
+            settings["wep_default_key"] = self.wep_default_key
+            settings[f"wep_key{self.wep_default_key}"] = self.password
+        elif self.security_mode == SecurityMode.ENT:
+            if self.radius_server_ip is not None:
+                settings["auth_server_addr"] = self.radius_server_ip
+            if self.radius_server_port is not None:
+                settings["auth_server_port"] = self.radius_server_port
+            if self.radius_server_secret is not None:
+                settings["auth_server_shared_secret"] = self.radius_server_secret
+            settings["wpa_key_mgmt"] = hostapd_constants.ENT_KEY_MGMT
+            settings["ieee8021x"] = hostapd_constants.IEEE8021X
+            settings["wpa"] = hostapd_constants.WPA2
+        else:
+            settings["wpa"] = self.security_mode.security_mode_int().value
+            if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
+                settings["wpa_psk"] = self.password
+            else:
+                settings["wpa_passphrase"] = self.password
+            # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise
+            if self.wpa_cipher and (
+                self.security_mode is SecurityMode.WPA
+                or self.security_mode is SecurityMode.WPA_WPA2
+                or self.security_mode is SecurityMode.WPA_WPA2_WPA3
+            ):
+                settings["wpa_pairwise"] = self.wpa_cipher
+            # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise
+            if self.wpa2_cipher and (
+                self.security_mode is SecurityMode.WPA_WPA2
+                or self.security_mode is SecurityMode.WPA2
+                or self.security_mode is SecurityMode.WPA2_WPA3
+                or self.security_mode is SecurityMode.WPA3
+            ):
+                settings["rsn_pairwise"] = self.wpa2_cipher
+            # Add wpa_key_mgmt based on security mode string
+            wpa_key_mgmt = self.security_mode.key_management()
+            if wpa_key_mgmt is not None:
+                settings["wpa_key_mgmt"] = str(wpa_key_mgmt)
+            if self.wpa_group_rekey:
+                settings["wpa_group_rekey"] = self.wpa_group_rekey
+            if self.wpa_strict_rekey:
+                settings["wpa_strict_rekey"] = hostapd_constants.WPA_STRICT_REKEY
+
+        return settings
diff --git a/packages/antlion/controllers/ap_lib/hostapd_utils.py b/packages/antlion/controllers/ap_lib/hostapd_utils.py
new file mode 100644
index 0000000..060777e
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_utils.py
@@ -0,0 +1,97 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion import utils
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def generate_random_password(
+    security_mode: SecurityMode = SecurityMode.OPEN,
+    length: int | None = None,
+    hex: int | None = None,
+) -> str:
+    """Generates a random password. Defaults to an 8 character ASCII password.
+
+    Args:
+        security_mode: Used to determine if length should be WEP compatible
+            (useful for generated tests to simply pass in security mode)
+        length: Length of password to generate. Defaults to 8, unless
+            security_mode is WEP, then 13
+        hex: If True, generates a hex string, else ascii
+    """
+    if hex:
+        generator_func = utils.rand_hex_str
+    else:
+        generator_func = utils.rand_ascii_str
+
+    if length:
+        return generator_func(length)
+    if security_mode is SecurityMode.WEP:
+        return generator_func(hostapd_constants.WEP_DEFAULT_STR_LENGTH)
+    else:
+        return generator_func(hostapd_constants.MIN_WPA_PSK_LENGTH)
+
+
+def verify_interface(interface: str, valid_interfaces: list[str]) -> None:
+    """Raises error if interface is missing or invalid
+
+    Args:
+        interface: interface name
+        valid_interfaces: valid interface names
+    """
+    if interface not in valid_interfaces:
+        raise ValueError(f"Invalid interface name was passed: {interface}")
+
+
+def verify_security_mode(
+    security_profile: Security, valid_security_modes: list[SecurityMode]
+) -> None:
+    """Raises error if security mode is not in list of valid security modes.
+
+    Args:
+        security_profile: Security to verify
+        valid_security_modes: Valid security modes for a profile.
+    """
+    if security_profile.security_mode not in valid_security_modes:
+        raise ValueError(
+            f"Invalid Security Mode: {security_profile.security_mode}; "
+            f"Valid Security Modes for this profile: {valid_security_modes}"
+        )
+
+
+def verify_cipher(security_profile: Security, valid_ciphers: list[str]) -> None:
+    """Raise error if cipher is not in list of valid ciphers.
+
+    Args:
+        security_profile: Security profile to verify
+        valid_ciphers: A list of valid ciphers for security_profile.
+    """
+    if security_profile.security_mode is SecurityMode.OPEN:
+        raise ValueError("Security mode is open.")
+    elif security_profile.security_mode is SecurityMode.WPA:
+        if security_profile.wpa_cipher not in valid_ciphers:
+            raise ValueError(
+                f"Invalid WPA Cipher: {security_profile.wpa_cipher}. "
+                f"Valid WPA Ciphers for this profile: {valid_ciphers}"
+            )
+    elif security_profile.security_mode is SecurityMode.WPA2:
+        if security_profile.wpa2_cipher not in valid_ciphers:
+            raise ValueError(
+                f"Invalid WPA2 Cipher: {security_profile.wpa2_cipher}. "
+                f"Valid WPA2 Ciphers for this profile: {valid_ciphers}"
+            )
+    else:
+        raise ValueError(f"Invalid Security Mode: {security_profile.security_mode}")
diff --git a/packages/antlion/controllers/ap_lib/radio_measurement.py b/packages/antlion/controllers/ap_lib/radio_measurement.py
new file mode 100644
index 0000000..5c7f2e0
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/radio_measurement.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import IntEnum, unique
+
+
+@unique
+class ApReachability(IntEnum):
+    """Neighbor Report AP Reachability values.
+
+    See IEEE 802.11-2020 Figure 9-172.
+    """
+
+    NOT_REACHABLE = 1
+    UNKNOWN = 2
+    REACHABLE = 3
+
+
+class BssidInformationCapabilities:
+    """Representation of Neighbor Report BSSID Information Capabilities.
+
+    See IEEE 802.11-2020 Figure 9-338 and 9.4.1.4.
+    """
+
+    def __init__(
+        self,
+        spectrum_management: bool = False,
+        qos: bool = False,
+        apsd: bool = False,
+        radio_measurement: bool = False,
+    ):
+        """Create a capabilities object.
+
+        Args:
+            spectrum_management: whether spectrum management is required.
+            qos: whether QoS is implemented.
+            apsd: whether APSD is implemented.
+            radio_measurement: whether radio measurement is activated.
+        """
+        self._spectrum_management = spectrum_management
+        self._qos = qos
+        self._apsd = apsd
+        self._radio_measurement = radio_measurement
+
+    def __index__(self) -> int:
+        """Convert to numeric representation of the field's bits."""
+        return (
+            self.spectrum_management << 5
+            | self.qos << 4
+            | self.apsd << 3
+            | self.radio_measurement << 2
+        )
+
+    @property
+    def spectrum_management(self) -> bool:
+        return self._spectrum_management
+
+    @property
+    def qos(self) -> bool:
+        return self._qos
+
+    @property
+    def apsd(self) -> bool:
+        return self._apsd
+
+    @property
+    def radio_measurement(self) -> bool:
+        return self._radio_measurement
+
+
+class BssidInformation:
+    """Representation of Neighbor Report BSSID Information field.
+
+    BssidInformation contains info about a neighboring AP, to be included in a
+    neighbor report element. See IEEE 802.11-2020 Figure 9-337.
+    """
+
+    def __init__(
+        self,
+        ap_reachability: ApReachability = ApReachability.UNKNOWN,
+        security: bool = False,
+        key_scope: bool = False,
+        capabilities: BssidInformationCapabilities = BssidInformationCapabilities(),
+        mobility_domain: bool = False,
+        high_throughput: bool = False,
+        very_high_throughput: bool = False,
+        ftm: bool = False,
+    ):
+        """Create a BSSID Information object for a neighboring AP.
+
+        Args:
+            ap_reachability: whether this AP is reachable by the STA that
+                requested the neighbor report.
+            security: whether this AP is known to support the same security
+                provisioning as used by the STA in its current association.
+            key_scope: whether this AP is known to have the same
+                authenticator as the AP sending the report.
+            capabilities: selected capabilities of this AP.
+            mobility_domain: whether the AP is including an MDE in its beacon
+                frames and the contents of that MDE are identical to the MDE
+                advertised by the AP sending the report.
+            high_throughput: whether the AP is an HT AP including the HT
+                Capabilities element in its Beacons, and that the contents of
+                that HT capabilities element are identical to the HT
+                capabilities element advertised by the AP sending the report.
+            very_high_throughput: whether the AP is a VHT AP and the VHT
+                capabilities element, if included as a subelement, is
+                identical in content to the VHT capabilities element included
+                in the AP’s beacon.
+            ftm: whether the AP is known to have the Fine Timing Measurement
+                Responder extended capability.
+        """
+        self._ap_reachability = ap_reachability
+        self._security = security
+        self._key_scope = key_scope
+        self._capabilities = capabilities
+        self._mobility_domain = mobility_domain
+        self._high_throughput = high_throughput
+        self._very_high_throughput = very_high_throughput
+        self._ftm = ftm
+
+    def __index__(self) -> int:
+        """Convert to numeric representation of the field's bits."""
+        return (
+            self._ap_reachability << 30
+            | self.security << 29
+            | self.key_scope << 28
+            | int(self.capabilities) << 22
+            | self.mobility_domain << 21
+            | self.high_throughput << 20
+            | self.very_high_throughput << 19
+            | self.ftm << 18
+        )
+
+    @property
+    def security(self) -> bool:
+        return self._security
+
+    @property
+    def key_scope(self) -> bool:
+        return self._key_scope
+
+    @property
+    def capabilities(self) -> BssidInformationCapabilities:
+        return self._capabilities
+
+    @property
+    def mobility_domain(self) -> bool:
+        return self._mobility_domain
+
+    @property
+    def high_throughput(self) -> bool:
+        return self._high_throughput
+
+    @property
+    def very_high_throughput(self) -> bool:
+        return self._very_high_throughput
+
+    @property
+    def ftm(self) -> bool:
+        return self._ftm
+
+
+@unique
+class PhyType(IntEnum):
+    """PHY type values, see dot11PhyType in 802.11-2020 Annex C."""
+
+    DSSS = 2
+    OFDM = 4
+    HRDSS = 5
+    ERP = 6
+    HT = 7
+    DMG = 8
+    VHT = 9
+    TVHT = 10
+    S1G = 11
+    CDMG = 12
+    CMMG = 13
+
+
+class NeighborReportElement:
+    """Representation of Neighbor Report element.
+
+    See IEEE 802.11-2020 9.4.2.36.
+    """
+
+    def __init__(
+        self,
+        bssid: str,
+        bssid_information: BssidInformation,
+        operating_class: int,
+        channel_number: int,
+        phy_type: PhyType,
+    ):
+        """Create a neighbor report element.
+
+        Args:
+            bssid: MAC address of the neighbor.
+            bssid_information: BSSID Information of the neigbor.
+            operating_class: operating class of the neighbor.
+            channel_number: channel number of the neighbor.
+            phy_type: dot11PhyType of the neighbor.
+        """
+        self._bssid = bssid
+        self._bssid_information = bssid_information
+
+        # Operating Class, IEEE 802.11-2020 Annex E.
+        self._operating_class = operating_class
+
+        self._channel_number = channel_number
+
+        # PHY Type, IEEE 802.11-2020 Annex C.
+        self._phy_type = phy_type
+
+    @property
+    def bssid(self) -> str:
+        return self._bssid
+
+    @property
+    def bssid_information(self) -> BssidInformation:
+        return self._bssid_information
+
+    @property
+    def operating_class(self) -> int:
+        return self._operating_class
+
+    @property
+    def channel_number(self) -> int:
+        return self._channel_number
+
+    @property
+    def phy_type(self) -> PhyType:
+        return self._phy_type
diff --git a/packages/antlion/controllers/ap_lib/radvd.py b/packages/antlion/controllers/ap_lib/radvd.py
new file mode 100644
index 0000000..276136e
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/radvd.py
@@ -0,0 +1,214 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import shlex
+import tempfile
+import time
+
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+from antlion.controllers.utils_lib.commands import shell
+from antlion.libs.proc import job
+from antlion.logger import LogLevel
+from antlion.runner import Runner
+
+
+class Error(Exception):
+    """An error caused by radvd."""
+
+
+class Radvd(object):
+    """Manages the radvd program.
+
+    https://en.wikipedia.org/wiki/Radvd
+    This implements the Router Advertisement Daemon of IPv6 router addresses
+    and IPv6 routing prefixes using the Neighbor Discovery Protocol.
+
+    Attributes:
+        config: The radvd configuration that is being used.
+    """
+
+    def __init__(
+        self,
+        runner: Runner,
+        interface: str,
+        working_dir: str | None = None,
+        radvd_binary: str | None = None,
+    ) -> None:
+        """
+        Args:
+            runner: Object that has run_async and run methods for executing
+                    shell commands (e.g. connection.SshConnection)
+            interface: Name of the interface to use (eg. wlan0).
+            working_dir: Directory to work out of.
+            radvd_binary: Location of the radvd binary
+        """
+        if not radvd_binary:
+            logging.debug(
+                "No radvd binary specified.  " "Assuming radvd is in the path."
+            )
+            radvd_binary = "radvd"
+        else:
+            logging.debug(f"Using radvd binary located at {radvd_binary}")
+        if working_dir is None and runner.run == job.run:
+            working_dir = tempfile.gettempdir()
+        else:
+            working_dir = "/tmp"
+        self._radvd_binary = radvd_binary
+        self._runner = runner
+        self._interface = interface
+        self._working_dir = working_dir
+        self.config: RadvdConfig | None = None
+        self._shell = shell.ShellCommand(runner, working_dir)
+        self._log_file = f"{working_dir}/radvd-{self._interface}.log"
+        self._config_file = f"{working_dir}/radvd-{self._interface}.conf"
+        self._pid_file = f"{working_dir}/radvd-{self._interface}.pid"
+        self._ps_identifier = f"{self._radvd_binary}.*{self._config_file}"
+
+    def start(self, config: RadvdConfig, timeout: int = 60) -> None:
+        """Starts radvd
+
+        Starts the radvd daemon and runs it in the background.
+
+        Args:
+            config: Configs to start the radvd with.
+            timeout: Time to wait for radvd  to come up.
+
+        Returns:
+            True if the daemon could be started. Note that the daemon can still
+            start and not work. Invalid configurations can take a long amount
+            of time to be produced, and because the daemon runs indefinitely
+            it's impossible to wait on. If you need to check if configs are ok
+            then periodic checks to is_running and logs should be used.
+        """
+        if self.is_alive():
+            self.stop()
+
+        self.config = config
+
+        self._shell.delete_file(self._log_file)
+        self._shell.delete_file(self._config_file)
+        self._write_configs(self.config)
+
+        command = (
+            f"{self._radvd_binary} -C {shlex.quote(self._config_file)} "
+            f"-p {shlex.quote(self._pid_file)} -m logfile -d 5 "
+            f'-l {self._log_file} > "{self._log_file}" 2>&1'
+        )
+        self._runner.run_async(command)
+
+        try:
+            self._wait_for_process(timeout=timeout)
+        except Error:
+            self.stop()
+            raise
+
+    def stop(self):
+        """Kills the daemon if it is running."""
+        self._shell.kill(self._ps_identifier)
+
+    def is_alive(self):
+        """
+        Returns:
+            True if the daemon is running.
+        """
+        return self._shell.is_alive(self._ps_identifier)
+
+    def pull_logs(self) -> str:
+        """Pulls the log files from where radvd is running.
+
+        Returns:
+            A string of the radvd logs.
+        """
+        # TODO: Auto pulling of logs when stop is called.
+        with LogLevel(self._runner.log, logging.INFO):
+            return self._shell.read_file(self._log_file)
+
+    def _wait_for_process(self, timeout: int = 60) -> None:
+        """Waits for the process to come up.
+
+        Waits until the radvd process is found running, or there is
+        a timeout. If the program never comes up then the log file
+        will be scanned for errors.
+
+        Raises: See _scan_for_errors
+        """
+        start_time = time.time()
+        while time.time() - start_time < timeout and not self.is_alive():
+            time.sleep(0.1)
+            self._scan_for_errors(False)
+        self._scan_for_errors(True)
+
+    def _scan_for_errors(self, should_be_up: bool) -> None:
+        """Scans the radvd log for any errors.
+
+        Args:
+            should_be_up: If true then radvd program is expected to be alive.
+                          If it is found not alive while this is true an error
+                          is thrown.
+
+        Raises:
+            Error: Raised when a radvd error is found.
+        """
+        # Store this so that all other errors have priority.
+        is_dead = not self.is_alive()
+
+        exited_prematurely = self._shell.search_file("Exiting", self._log_file)
+        if exited_prematurely:
+            raise Error("Radvd exited prematurely.", self)
+        if should_be_up and is_dead:
+            raise Error("Radvd failed to start", self)
+
+    def _write_configs(self, config: RadvdConfig) -> None:
+        """Writes the configs to the radvd config file.
+
+        Args:
+            config: a RadvdConfig object.
+        """
+        self._shell.delete_file(self._config_file)
+        conf = config.package_configs()
+        lines = ["interface %s {" % self._interface]
+        for interface_option_key, interface_option in conf["interface_options"].items():
+            lines.append(f"\t{str(interface_option_key)} {str(interface_option)};")
+        lines.append(f"\tprefix {conf['prefix']}")
+        lines.append("\t{")
+        for prefix_option in conf["prefix_options"].items():
+            lines.append(f"\t\t{' '.join(map(str, prefix_option))};")
+        lines.append("\t};")
+        if conf["clients"]:
+            lines.append("\tclients")
+            lines.append("\t{")
+            for client in conf["clients"]:
+                lines.append(f"\t\t{client};")
+            lines.append("\t};")
+        if conf["route"]:
+            lines.append("\troute %s {" % conf["route"])
+            for route_option in conf["route_options"].items():
+                lines.append(f"\t\t{' '.join(map(str, route_option))};")
+            lines.append("\t};")
+        if conf["rdnss"]:
+            lines.append(
+                "\tRDNSS %s {" % " ".join([str(elem) for elem in conf["rdnss"]])
+            )
+            for rdnss_option in conf["rdnss_options"].items():
+                lines.append(f"\t\t{' '.join(map(str, rdnss_option))};")
+            lines.append("\t};")
+        lines.append("};")
+        output_config = "\n".join(lines)
+        logging.info(f"Writing {self._config_file}")
+        logging.debug("******************Start*******************")
+        logging.debug(f"\n{output_config}")
+        logging.debug("*******************End********************")
+
+        self._shell.write_file(self._config_file, output_config)
diff --git a/packages/antlion/controllers/ap_lib/radvd_config.py b/packages/antlion/controllers/ap_lib/radvd_config.py
new file mode 100644
index 0000000..d3d6d97
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/radvd_config.py
@@ -0,0 +1,313 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+from typing import Any
+
+from antlion.controllers.ap_lib import radvd_constants
+
+
+class RadvdConfig(object):
+    """The root settings for the router advertisement daemon.
+
+    All the settings for a router advertisement daemon.
+    """
+
+    def __init__(
+        self,
+        prefix: str = radvd_constants.DEFAULT_PREFIX,
+        clients: list[str] = [],
+        route: Any | None = None,
+        rdnss: list[str] = [],
+        ignore_if_missing: str | None = None,
+        adv_send_advert: str = radvd_constants.ADV_SEND_ADVERT_ON,
+        unicast_only: str | None = None,
+        max_rtr_adv_interval: int | None = None,
+        min_rtr_adv_interval: int | None = None,
+        min_delay_between_ras: int | None = None,
+        adv_managed_flag: str | None = None,
+        adv_other_config_flag: str | None = None,
+        adv_link_mtu: int | None = None,
+        adv_reachable_time: int | None = None,
+        adv_retrans_timer: int | None = None,
+        adv_cur_hop_limit: int | None = None,
+        adv_default_lifetime: int | None = None,
+        adv_default_preference: str | None = None,
+        adv_source_ll_address: str | None = None,
+        adv_home_agent_flag: str | None = None,
+        adv_home_agent_info: str | None = None,
+        home_agent_lifetime: int | None = None,
+        home_agent_preference: int | None = None,
+        adv_mob_rtr_support_flag: str | None = None,
+        adv_interval_opt: str | None = None,
+        adv_on_link: str = radvd_constants.ADV_ON_LINK_ON,
+        adv_autonomous: str = radvd_constants.ADV_AUTONOMOUS_ON,
+        adv_router_addr: str | None = None,
+        adv_valid_lifetime: int | None = None,
+        adv_preferred_lifetime: int | None = None,
+        base_6to4_interface: str | None = None,
+        adv_route_lifetime: int | None = None,
+        adv_route_preference: str | None = None,
+        adv_rdnss_preference: int | None = None,
+        adv_rdnss_open: str | None = None,
+        adv_rdnss_lifetime: int | None = None,
+    ) -> None:
+        """Construct a RadvdConfig.
+
+        Args:
+            prefix: IPv6 prefix and length, ie fd::/64
+            clients: A list of IPv6 link local addresses that will be the only
+                clients served.  All other IPv6 addresses will be ignored if
+                this list is present.
+            route: A route for the router advertisement with prefix.
+            rdnss: A list of recursive DNS servers
+            ignore_if_missing: A flag indicating whether or not the interface
+                is ignored if it does not exist at start-up. By default,
+                radvd exits.
+            adv_send_advert: A flag indicating whether or not the router sends
+                periodic router advertisements and responds to router
+                solicitations.
+            unicast_only: Indicates that the interface link type only supports
+                unicast.
+            max_rtr_adv_interval:The maximum time allowed between sending
+                unsolicited multicast router advertisements from the interface,
+                in seconds. Must be no less than 4 seconds and no greater than
+                1800 seconds.
+            min_rtr_adv_interval: The minimum time allowed between sending
+                unsolicited multicast router advertisements from the interface,
+                in seconds. Must be no less than 3 seconds and no greater than
+                0.75 * max_rtr_adv_interval.
+            min_delay_between_ras: The minimum time allowed between sending
+                multicast router advertisements from the interface, in seconds.,
+            adv_managed_flag: When set, hosts use the administered (stateful)
+                protocol for address autoconfiguration in addition to any
+                addresses autoconfigured using stateless address
+                autoconfiguration. The use of this flag is described in
+                RFC 4862.
+            adv_other_config_flag: When set, hosts use the administered
+                (stateful) protocol for autoconfiguration of other (non-address)
+                information. The use of this flag is described in RFC 4862.
+            adv_link_mtu: The MTU option is used in router advertisement
+                messages to insure that all nodes on a link use the same MTU
+                value in those cases where the link MTU is not well known.
+            adv_reachable_time: The time, in milliseconds, that a node assumes
+                a neighbor is reachable after having received a reachability
+                confirmation. Used by the Neighbor Unreachability Detection
+                algorithm (see Section 7.3 of RFC 4861). A value of zero means
+                unspecified (by this router).
+            adv_retrans_timer: The time, in milliseconds, between retransmitted
+                Neighbor Solicitation messages. Used by address resolution and
+                the Neighbor Unreachability Detection algorithm (see Sections
+                7.2 and 7.3 of RFC 4861). A value of zero means unspecified
+                (by this router).
+            adv_cur_hop_limit: The default value that should be placed in the
+                Hop Count field of the IP header for outgoing (unicast) IP
+                packets. The value should be set to the current diameter of the
+                Internet. The value zero means unspecified (by this router).
+            adv_default_lifetime: The lifetime associated with the default
+                router in units of seconds. The maximum value corresponds to
+                18.2 hours. A lifetime of 0 indicates that the router is not a
+                default router and should not appear on the default router list.
+                The router lifetime applies only to the router's usefulness as
+                a default router; it does not apply to information contained in
+                other message fields or options. Options that need time limits
+                for their information include their own lifetime fields.
+            adv_default_preference: The preference associated with the default
+                router, as either "low", "medium", or "high".
+            adv_source_ll_address: When set, the link-layer address of the
+                outgoing interface is included in the RA.
+            adv_home_agent_flag: When set, indicates that sending router is able
+                to serve as Mobile IPv6 Home Agent. When set, minimum limits
+                specified by Mobile IPv6 are used for MinRtrAdvInterval and
+                MaxRtrAdvInterval.
+            adv_home_agent_info: When set, Home Agent Information Option
+                (specified by Mobile IPv6) is included in Router Advertisements.
+                adv_home_agent_flag must also be set when using this option.
+            home_agent_lifetime: The length of time in seconds (relative to the
+                time the packet is sent) that the router is offering Mobile IPv6
+                 Home Agent services. A value 0 must not be used. The maximum
+                 lifetime is 65520 seconds (18.2 hours). This option is ignored,
+                 if adv_home_agent_info is not set.
+            home_agent_preference: The preference for the Home Agent sending
+                this Router Advertisement. Values greater than 0 indicate more
+                preferable Home Agent, values less than 0 indicate less
+                preferable Home Agent. This option is ignored, if
+                adv_home_agent_info is not set.
+            adv_mob_rtr_support_flag: When set, the Home Agent signals it
+                supports Mobile Router registrations (specified by NEMO Basic).
+                adv_home_agent_info must also be set when using this option.
+            adv_interval_opt: When set, Advertisement Interval Option
+                (specified by Mobile IPv6) is included in Router Advertisements.
+                When set, minimum limits specified by Mobile IPv6 are used for
+                MinRtrAdvInterval and MaxRtrAdvInterval.
+            adv_on_linkWhen set, indicates that this prefix can be used for
+                on-link determination. When not set the advertisement makes no
+                statement about on-link or off-link properties of the prefix.
+                For instance, the prefix might be used for address configuration
+                 with some of the addresses belonging to the prefix being
+                 on-link and others being off-link.
+            adv_autonomous: When set, indicates that this prefix can be used for
+                autonomous address configuration as specified in RFC 4862.
+            adv_router_addr: When set, indicates that the address of interface
+                is sent instead of network prefix, as is required by Mobile
+                IPv6. When set, minimum limits specified by Mobile IPv6 are used
+                for MinRtrAdvInterval and MaxRtrAdvInterval.
+            adv_valid_lifetime: The length of time in seconds (relative to the
+                time the packet is sent) that the prefix is valid for the
+                purpose of on-link determination. The symbolic value infinity
+                represents infinity (i.e. a value of all one bits (0xffffffff)).
+                 The valid lifetime is also used by RFC 4862.
+            adv_preferred_lifetimeThe length of time in seconds (relative to the
+                time the packet is sent) that addresses generated from the
+                prefix via stateless address autoconfiguration remain preferred.
+                The symbolic value infinity represents infinity (i.e. a value of
+                all one bits (0xffffffff)). See RFC 4862.
+            base_6to4_interface: If this option is specified, this prefix will
+                be combined with the IPv4 address of interface name to produce
+                a valid 6to4 prefix. The first 16 bits of this prefix will be
+                replaced by 2002 and the next 32 bits of this prefix will be
+                replaced by the IPv4 address assigned to interface name at
+                configuration time. The remaining 80 bits of the prefix
+                (including the SLA ID) will be advertised as specified in the
+                configuration file.
+            adv_route_lifetime: The lifetime associated with the route in units
+                of seconds. The symbolic value infinity represents infinity
+                (i.e. a value of all one bits (0xffffffff)).
+            adv_route_preference: The preference associated with the default
+                router, as either "low", "medium", or "high".
+            adv_rdnss_preference: The preference of the DNS server, compared to
+                other DNS servers advertised and used. 0 to 7 means less
+                important than manually configured nameservers in resolv.conf,
+                while 12 to 15 means more important.
+            adv_rdnss_open: "Service Open" flag. When set, indicates that RDNSS
+                continues to be available to hosts even if they moved to a
+                different subnet.
+            adv_rdnss_lifetime: The maximum duration how long the RDNSS entries
+                are used for name resolution. A value of 0 means the nameserver
+                should no longer be used. The maximum duration how long the
+                RDNSS entries are used for name resolution. A value of 0 means
+                the nameserver should no longer be used. The value, if not 0,
+                must be at least max_rtr_adv_interval. To ensure stale RDNSS
+                info gets removed in a timely fashion, this should not be
+                greater than 2*max_rtr_adv_interval.
+        """
+        self._prefix = prefix
+        self._clients = clients
+        self._route = route
+        self._rdnss = rdnss
+        self._ignore_if_missing = ignore_if_missing
+        self._adv_send_advert = adv_send_advert
+        self._unicast_only = unicast_only
+        self._max_rtr_adv_interval = max_rtr_adv_interval
+        self._min_rtr_adv_interval = min_rtr_adv_interval
+        self._min_delay_between_ras = min_delay_between_ras
+        self._adv_managed_flag = adv_managed_flag
+        self._adv_other_config_flag = adv_other_config_flag
+        self._adv_link_mtu = adv_link_mtu
+        self._adv_reachable_time = adv_reachable_time
+        self._adv_retrans_timer = adv_retrans_timer
+        self._adv_cur_hop_limit = adv_cur_hop_limit
+        self._adv_default_lifetime = adv_default_lifetime
+        self._adv_default_preference = adv_default_preference
+        self._adv_source_ll_address = adv_source_ll_address
+        self._adv_home_agent_flag = adv_home_agent_flag
+        self._adv_home_agent_info = adv_home_agent_info
+        self._home_agent_lifetime = home_agent_lifetime
+        self._home_agent_preference = home_agent_preference
+        self._adv_mob_rtr_support_flag = adv_mob_rtr_support_flag
+        self._adv_interval_opt = adv_interval_opt
+        self._adv_on_link = adv_on_link
+        self._adv_autonomous = adv_autonomous
+        self._adv_router_addr = adv_router_addr
+        self._adv_valid_lifetime = adv_valid_lifetime
+        self._adv_preferred_lifetime = adv_preferred_lifetime
+        self._base_6to4_interface = base_6to4_interface
+        self._adv_route_lifetime = adv_route_lifetime
+        self._adv_route_preference = adv_route_preference
+        self._adv_rdnss_preference = adv_rdnss_preference
+        self._adv_rdnss_open = adv_rdnss_open
+        self._adv_rdnss_lifetime = adv_rdnss_lifetime
+
+    def package_configs(self):
+        conf: dict[str, Any] = dict()
+        conf["prefix"] = self._prefix
+        conf["clients"] = self._clients
+        conf["route"] = self._route
+        conf["rdnss"] = self._rdnss
+
+        conf["interface_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("IgnoreIfMissing", self._ignore_if_missing),
+                    ("AdvSendAdvert", self._adv_send_advert),
+                    ("UnicastOnly", self._unicast_only),
+                    ("MaxRtrAdvInterval", self._max_rtr_adv_interval),
+                    ("MinRtrAdvInterval", self._min_rtr_adv_interval),
+                    ("MinDelayBetweenRAs", self._min_delay_between_ras),
+                    ("AdvManagedFlag", self._adv_managed_flag),
+                    ("AdvOtherConfigFlag", self._adv_other_config_flag),
+                    ("AdvLinkMTU", self._adv_link_mtu),
+                    ("AdvReachableTime", self._adv_reachable_time),
+                    ("AdvRetransTimer", self._adv_retrans_timer),
+                    ("AdvCurHopLimit", self._adv_cur_hop_limit),
+                    ("AdvDefaultLifetime", self._adv_default_lifetime),
+                    ("AdvDefaultPreference", self._adv_default_preference),
+                    ("AdvSourceLLAddress", self._adv_source_ll_address),
+                    ("AdvHomeAgentFlag", self._adv_home_agent_flag),
+                    ("AdvHomeAgentInfo", self._adv_home_agent_info),
+                    ("HomeAgentLifetime", self._home_agent_lifetime),
+                    ("HomeAgentPreference", self._home_agent_preference),
+                    ("AdvMobRtrSupportFlag", self._adv_mob_rtr_support_flag),
+                    ("AdvIntervalOpt", self._adv_interval_opt),
+                ),
+            )
+        )
+
+        conf["prefix_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvOnLink", self._adv_on_link),
+                    ("AdvAutonomous", self._adv_autonomous),
+                    ("AdvRouterAddr", self._adv_router_addr),
+                    ("AdvValidLifetime", self._adv_valid_lifetime),
+                    ("AdvPreferredLifetime", self._adv_preferred_lifetime),
+                    ("Base6to4Interface", self._base_6to4_interface),
+                ),
+            )
+        )
+
+        conf["route_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvRouteLifetime", self._adv_route_lifetime),
+                    ("AdvRoutePreference", self._adv_route_preference),
+                ),
+            )
+        )
+
+        conf["rdnss_options"] = collections.OrderedDict(
+            filter(
+                lambda pair: pair[1] is not None,
+                (
+                    ("AdvRDNSSPreference", self._adv_rdnss_preference),
+                    ("AdvRDNSSOpen", self._adv_rdnss_open),
+                    ("AdvRDNSSLifetime", self._adv_rdnss_lifetime),
+                ),
+            )
+        )
+
+        return conf
diff --git a/packages/antlion/controllers/ap_lib/radvd_constants.py b/packages/antlion/controllers/ap_lib/radvd_constants.py
new file mode 100644
index 0000000..b02a694
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/radvd_constants.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DEFAULT_PREFIX = "fd00::/64"
+
+IGNORE_IF_MISSING_ON = "on"
+IGNORE_IF_MISSING_OFF = "off"
+
+ADV_SEND_ADVERT_ON = "on"
+ADV_SEND_ADVERT_OFF = "off"
+
+UNICAST_ONLY_ON = "on"
+UNICAST_ONLY_OFF = "off"
+
+ADV_MANAGED_FLAG_ON = "on"
+ADV_MANAGED_FLAG_OFF = "off"
+
+ADV_OTHER_CONFIG_FLAG_ON = "on"
+ADV_OTHER_CONFIG_FLAG_OFF = "off"
+
+ADV_DEFAULT_PREFERENCE_ON = "on"
+ADV_DEFAULT_PREFERENCE_OFF = "off"
+
+ADV_SOURCE_LL_ADDRESS_ON = "on"
+ADV_SOURCE_LL_ADDRESS_OFF = "off"
+
+ADV_HOME_AGENT_FLAG_ON = "on"
+ADV_HOME_AGENT_FLAG_OFF = "off"
+
+ADV_HOME_AGENT_INFO_ON = "on"
+ADV_HOME_AGENT_INFO_OFF = "off"
+
+ADV_MOB_RTR_SUPPORT_FLAG_ON = "on"
+ADV_MOB_RTR_SUPPORT_FLAG_OFF = "off"
+
+ADV_INTERVAL_OPT_ON = "on"
+ADV_INTERVAL_OPT_OFF = "off"
+
+ADV_ON_LINK_ON = "on"
+ADV_ON_LINK_OFF = "off"
+
+ADV_AUTONOMOUS_ON = "on"
+ADV_AUTONOMOUS_OFF = "off"
+
+ADV_ROUTER_ADDR_ON = "on"
+ADV_ROUTER_ADDR_OFF = "off"
+
+ADV_ROUTE_PREFERENCE_LOW = "low"
+ADV_ROUTE_PREFERENCE_MED = "medium"
+ADV_ROUTE_PREFERENCE_HIGH = "high"
+
+ADV_RDNSS_OPEN_ON = "on"
+ADV_RDNSS_OPEN_OFF = "off"
diff --git a/packages/antlion/controllers/ap_lib/regulatory_channels.py b/packages/antlion/controllers/ap_lib/regulatory_channels.py
new file mode 100644
index 0000000..432607c
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/regulatory_channels.py
@@ -0,0 +1,710 @@
+from dataclasses import dataclass
+
+Channel = int
+Bandwidth = int
+# TODO(http://b/281728764): Add device requirements to each frequency e.g.
+# "MUST be used indoors only" or "MUST be used with DFS".
+ChannelBandwidthMap = dict[Channel, list[Bandwidth]]
+
+
+@dataclass
+class CountryChannels:
+    country_code: str
+    allowed_channels: ChannelBandwidthMap
+
+
+# All antlion-supported channels and frequencies for use in regulatory testing.
+TEST_CHANNELS: ChannelBandwidthMap = {
+    1: [20],
+    2: [20],
+    3: [20],
+    4: [20],
+    5: [20],
+    6: [20],
+    7: [20],
+    8: [20],
+    9: [20],
+    10: [20],
+    11: [20],
+    12: [20],
+    13: [20],
+    14: [20],
+    36: [20, 40, 80],
+    40: [20, 40, 80],
+    44: [20, 40, 80],
+    48: [20, 40, 80],
+    52: [20, 40, 80],
+    56: [20, 40, 80],
+    60: [20, 40, 80],
+    64: [20, 40, 80],
+    100: [20, 40, 80],
+    104: [20, 40, 80],
+    108: [20, 40, 80],
+    112: [20, 40, 80],
+    116: [20, 40, 80],
+    120: [20, 40, 80],
+    124: [20, 40, 80],
+    128: [20, 40, 80],
+    132: [20, 40, 80],
+    136: [20, 40, 80],
+    140: [20, 40, 80],
+    144: [20, 40, 80],
+    149: [20, 40, 80],
+    153: [20, 40, 80],
+    157: [20, 40, 80],
+    161: [20, 40, 80],
+    165: [20],
+}
+
+# All universally accepted 2.4GHz channels and frequencies.
+WORLD_WIDE_2G_CHANNELS: ChannelBandwidthMap = {
+    1: [20],
+    2: [20],
+    3: [20],
+    4: [20],
+    5: [20],
+    6: [20],
+    7: [20],
+    8: [20],
+    9: [20],
+    10: [20],
+    11: [20],
+}
+
+# List of supported channels and frequencies by country.
+#
+# Please keep this alphabetically ordered. Thanks!
+#
+# TODO: Add missing countries: Russia, Israel, Korea, Turkey, South Africa,
+# Brazil, Bahrain, Vietnam
+COUNTRY_CHANNELS = {
+    "Australia": CountryChannels(
+        country_code="AU",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Austria": CountryChannels(
+        country_code="AT",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Belgium": CountryChannels(
+        country_code="BE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Canada": CountryChannels(
+        country_code="CA",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "China": CountryChannels(
+        country_code="CH",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Denmark": CountryChannels(
+        country_code="DK",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "France": CountryChannels(
+        country_code="FR",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Germany": CountryChannels(
+        country_code="DE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "India": CountryChannels(
+        country_code="IN",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Ireland": CountryChannels(
+        country_code="IE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Italy": CountryChannels(
+        country_code="IT",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Japan": CountryChannels(
+        country_code="JP",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+        },
+    ),
+    "Mexico": CountryChannels(
+        country_code="MX",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Netherlands": CountryChannels(
+        country_code="NL",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "New Zealand": CountryChannels(
+        country_code="NZ",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Norway": CountryChannels(
+        country_code="NO",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Singapore": CountryChannels(
+        country_code="SG",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Spain": CountryChannels(
+        country_code="ES",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Sweden": CountryChannels(
+        country_code="SE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Taiwan": CountryChannels(
+        country_code="TW",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "United Kingdom of Great Britain": CountryChannels(
+        country_code="GB",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            11: [20],
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "United States of America": CountryChannels(
+        country_code="US",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+}
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
similarity index 100%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
new file mode 100644
index 0000000..f04f60b
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
@@ -0,0 +1,150 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def actiontec_pk5000(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of what a Actiontec PK5000 AP
+    Args:
+        iface_wlan_2g: The 2.4 interface of the test AP.
+        channel: What channel to use.  Only 2.4Ghz is supported for this profile
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the PK5000.
+        ssid: Network name
+    Returns:
+        A hostapd config
+
+    Differences from real pk5000:
+        Supported Rates IE:
+            PK5000: Supported: 1, 2, 5.5, 11
+                    Extended: 6, 9, 12, 18, 24, 36, 48, 54
+            Simulated: Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                       Extended: 24, 36, 48, 54
+    """
+    if channel > 11:
+        # Technically this should be 14 but since the PK5000 is a US only AP,
+        # 11 is the highest allowable channel.
+        raise ValueError(
+            f"The Actiontec PK5000 does not support 5Ghz. Invalid channel ({channel})"
+        )
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    interface = iface_wlan_2g
+    short_preamble = False
+    force_wmm = False
+    beacon_interval = 100
+    dtim_period = 3
+    # Sets the basic rates and supported rates of the PK5000
+    additional_params = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    )
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11G,
+        force_wmm=force_wmm,
+        beacon_interval=beacon_interval,
+        dtim_period=dtim_period,
+        short_preamble=short_preamble,
+        additional_parameters=additional_params,
+    )
+
+    return config
+
+
+def actiontec_mi424wr(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    """A simulated implementation of an Actiontec MI424WR AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        channel: What channel to use (2.4Ghz or 5Ghz).
+        security: A security profile.
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+
+    Differences from real MI424WR:
+        HT Capabilities:
+            MI424WR:
+                HT Rx STBC: Support for 1, 2, and 3
+            Simulated:
+                HT Rx STBC: Support for 1
+        HT Information:
+            MI424WR:
+                RIFS: Premitted
+            Simulated:
+                RIFS: Prohibited
+    """
+    if channel > 11:
+        raise ValueError(
+            f"The Actiontec MI424WR does not support 5Ghz. Invalid channel ({channel})"
+        )
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+    ]
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES
+        | hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+    )
+    # Proprietary Atheros Communication: Adv Capability IE
+    # Proprietary Atheros Communication: Unknown IE
+    # Country Info: US Only IE
+    vendor_elements = {
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd0a00037f04010000000000"
+        "0706555320010b1b"
+    }
+
+    additional_params = rates | vendor_elements
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=iface_wlan_2g,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=True,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
new file mode 100644
index 0000000..6a9ae27
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
@@ -0,0 +1,554 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def asus_rtac66u(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of an Asus RTAC66U AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTAC66U.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTAC66U:
+        2.4 GHz:
+            Rates:
+                RTAC66U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+            HT Capab:
+                Info
+                    RTAC66U: Green Field supported
+                    Simulated: Green Field not supported on Whirlwind.
+        5GHz:
+            VHT Capab:
+                RTAC66U:
+                    SU Beamformer Supported,
+                    SU Beamformee Supported,
+                    Beamformee STS Capability: 3,
+                    Number of Sounding Dimensions: 3,
+                    VHT Link Adaptation: Both
+                Simulated:
+                    Above are not supported on Whirlwind.
+            VHT Operation Info:
+                RTAC66U: Basic MCS Map (0x0000)
+                Simulated: Basic MCS Map (0xfffc)
+            VHT Tx Power Envelope:
+                RTAC66U: Local Max Tx Pwr Constraint: 1.0 dBm
+                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
+        Both:
+            HT Capab:
+                A-MPDU
+                    RTAC66U: MPDU Density 4
+                    Simulated: MPDU Density 8
+            HT Info:
+                RTAC66U: RIFS Permitted
+                Simulated: RIFS Prohibited
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    vht_channel_width = 20
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+        hostapd_constants.N_CAPABILITY_SGI20,
+    ]
+    # WPS IE
+    # Broadcom IE
+    vendor_elements = {
+        "vendor_elements": "dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33"
+        "d7103c0001031049000600372a000120"
+        "dd090010180200001c0000"
+    }
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        mode = hostapd_constants.MODE_11N_MIXED
+        ac_capabilities = None
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        mode = hostapd_constants.MODE_11AC_MIXED
+        ac_capabilities = [
+            hostapd_constants.AC_CAPABILITY_RXLDPC,
+            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+        ]
+
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        additional_parameters=additional_params,
+    )
+
+    return config
+
+
+def asus_rtac86u(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of an Asus RTAC86U AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTAC86U.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTAC86U:
+        2.4GHz:
+            Rates:
+                RTAC86U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        5GHz:
+            Country Code:
+                Simulated: Has two country code IEs, one that matches
+                the actual, and another explicit IE that was required for
+                hostapd's 802.11d to work.
+        Both:
+            RSN Capabilities (w/ WPA2):
+                RTAC86U:
+                    RSN PTKSA Replay Counter Capab: 16
+                Simulated:
+                    RSN PTKSA Replay Counter Capab: 1
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        mode = hostapd_constants.MODE_11G
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        spectrum_mgmt = False
+        # Measurement Pilot Transmission IE
+        vendor_elements = {"vendor_elements": "42020000"}
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        mode = hostapd_constants.MODE_11A
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        spectrum_mgmt = True
+        # Country Information IE (w/ individual channel info)
+        # TPC Report Transmit Power IE
+        # Measurement Pilot Transmission IE
+        vendor_elements = {
+            "vendor_elements": "074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e"
+            "68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e"
+            "a5011e"
+            "23021300"
+            "42020000"
+        }
+
+    additional_params = rates | qbss | vendor_elements
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=False,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        spectrum_mgmt_required=spectrum_mgmt,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def asus_rtac5300(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of an Asus RTAC5300 AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTAC5300.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTAC5300:
+        2.4GHz:
+            Rates:
+                RTAC86U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        5GHz:
+            VHT Capab:
+                RTAC5300:
+                    SU Beamformer Supported,
+                    SU Beamformee Supported,
+                    Beamformee STS Capability: 4,
+                    Number of Sounding Dimensions: 4,
+                    MU Beamformer Supported,
+                    VHT Link Adaptation: Both
+                Simulated:
+                    Above are not supported on Whirlwind.
+            VHT Operation Info:
+                RTAC5300: Basic MCS Map (0x0000)
+                Simulated: Basic MCS Map (0xfffc)
+            VHT Tx Power Envelope:
+                RTAC5300: Local Max Tx Pwr Constraint: 1.0 dBm
+                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
+        Both:
+            HT Capab:
+                A-MPDU
+                    RTAC5300: MPDU Density 4
+                    Simulated: MPDU Density 8
+            HT Info:
+                RTAC5300: RIFS Permitted
+                Simulated: RIFS Prohibited
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    vht_channel_width = 20
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_SGI20,
+    ]
+
+    # Broadcom IE
+    vendor_elements = {"vendor_elements": "dd090010180200009c0000"}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        mode = hostapd_constants.MODE_11N_MIXED
+        # AsusTek IE
+        # Epigram 2.4GHz IE
+        vendor_elements["vendor_elements"] += (
+            "dd25f832e4010101020100031411b5"
+            "2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85"
+            "dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002"
+        )
+        ac_capabilities = None
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        mode = hostapd_constants.MODE_11AC_MIXED
+        # Epigram 5GHz IE
+        vendor_elements["vendor_elements"] += "dd0500904c0410"
+        ac_capabilities = [
+            hostapd_constants.AC_CAPABILITY_RXLDPC,
+            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+        ]
+
+    additional_params = rates | qbss | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def asus_rtn56u(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of an Asus RTN56U AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTN56U.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTN56U:
+        2.4GHz:
+            Rates:
+                RTN56U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        Both:
+            Fixed Parameters:
+                RTN56U: APSD Implemented
+                Simulated: APSD Not Implemented
+            HT Capab:
+                A-MPDU
+                    RTN56U: MPDU Density 4
+                    Simulated: MPDU Density 8
+            RSN Capabilities (w/ WPA2):
+                RTN56U:
+                    RSN PTKSA Replay Counter Capab: 1
+                Simulated:
+                    RSN PTKSA Replay Counter Capab: 16
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+    ]
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        # Ralink Technology IE
+        # US Country Code IE
+        # AP Channel Report IEs (2)
+        # WPS IE
+        vendor_elements = {
+            "vendor_elements": "dd07000c4307000000"
+            "0706555320010b14"
+            "33082001020304050607"
+            "33082105060708090a0b"
+            "dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c"
+            "d33448103c000101"
+        }
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        # Ralink Technology IE
+        # US Country Code IE
+        vendor_elements = {"vendor_elements": "dd07000c4307000000" "0706555320010b14"}
+
+    additional_params = rates | vendor_elements | qbss | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
+
+
+def asus_rtn66u(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    """A simulated implementation of an Asus RTN66U AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile.  Must be open or WPA2 as this is what is
+            supported by the RTN66U.
+        ssid: Network name
+    Returns:
+        A hostapd config
+    Differences from real RTN66U:
+        2.4GHz:
+            Rates:
+                RTN66U:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        Both:
+            HT Info:
+                RTN66U: RIFS Permitted
+                Simulated: RIFS Prohibited
+            HT Capab:
+                Info:
+                    RTN66U: Green Field supported
+                    Simulated: Green Field not supported on Whirlwind.
+                A-MPDU
+                    RTN66U: MPDU Density 4
+                    Simulated: MPDU Density 8
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+    ]
+    # Broadcom IE
+    vendor_elements = {"vendor_elements": "dd090010180200001c0000"}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
new file mode 100644
index 0000000..62a9d66
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
@@ -0,0 +1,98 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def belkin_f9k1001v5(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    """A simulated implementation of what a Belkin F9K1001v5 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real F9K1001v5:
+        Rates:
+            F9K1001v5:
+                Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                Extended: 6, 9, 12, 48
+            Simulated:
+                Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                Extended: 24, 36, 48, 54
+        HT Info:
+            F9K1001v5:
+                RIFS: Permitted
+            Simulated:
+                RIFS: Prohibited
+        RSN Capabilities (w/ WPA2):
+            F9K1001v5:
+                RSN PTKSA Replay Counter Capab: 1
+            Simulated:
+                RSN PTKSA Replay Counter Capab: 16
+    """
+    if channel > 11:
+        raise ValueError(
+            f"The Belkin F9k1001v5 does not support 5Ghz. Invalid channel ({channel})"
+        )
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+    ]
+
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    )
+
+    # Broadcom IE
+    # WPS IE
+    vendor_elements = {
+        "vendor_elements": "dd090010180200100c0000"
+        "dd180050f204104a00011010440001021049000600372a000120"
+    }
+
+    additional_params = rates | vendor_elements
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=iface_wlan_2g,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=3,
+        short_preamble=False,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
new file mode 100644
index 0000000..21f3fb1
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
@@ -0,0 +1,305 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def linksys_ea4500(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of what a Linksys EA4500 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real EA4500:
+        CF (Contention-Free) Parameter IE:
+            EA4500: has CF Parameter IE
+            Simulated: does not have CF Parameter IE
+        HT Capab:
+            Info:
+                EA4500: Green Field supported
+                Simulated: Green Field not supported on Whirlwind.
+            A-MPDU
+                RTAC66U: MPDU Density 4
+                Simulated: MPDU Density 8
+        RSN Capab (w/ WPA2):
+            EA4500:
+                RSN PTKSA Replay Counter Capab: 1
+            Simulated:
+                RSN PTKSA Replay Counter Capab: 16
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+    ]
+
+    # Epigram HT Capabilities IE
+    # Epigram HT Additional Capabilities IE
+    # Marvell Semiconductor, Inc. IE
+    vendor_elements = {
+        "vendor_elements": "dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000"
+        "dd1a00904c3424000000000000000000000000000000000000000000"
+        "dd06005043030000"
+    }
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        obss_interval = 180
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        obss_interval = None
+
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=True,
+        obss_interval=obss_interval,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
+
+
+def linksys_ea9500(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of what a Linksys EA9500 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real EA9500:
+        2.4GHz:
+            Rates:
+                EA9500:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+        RSN Capab (w/ WPA2):
+            EA9500:
+                RSN PTKSA Replay Counter Capab: 16
+            Simulated:
+                RSN PTKSA Replay Counter Capab: 1
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+    # Measurement Pilot Transmission IE
+    vendor_elements = {"vendor_elements": "42020000"}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        mode = hostapd_constants.MODE_11G
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        mode = hostapd_constants.MODE_11A
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+
+    additional_params = rates | qbss | vendor_elements
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=False,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=False,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def linksys_wrt1900acv2(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of what a Linksys WRT1900ACV2 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real WRT1900ACV2:
+        5 GHz:
+            Simulated: Has two country code IEs, one that matches
+                the actual, and another explicit IE that was required for
+                hostapd's 802.11d to work.
+        Both:
+            HT Capab:
+                A-MPDU
+                    WRT1900ACV2: MPDU Density 4
+                    Simulated: MPDU Density 8
+            VHT Capab:
+                WRT1900ACV2:
+                    SU Beamformer Supported,
+                    SU Beamformee Supported,
+                    Beamformee STS Capability: 4,
+                    Number of Sounding Dimensions: 4,
+                Simulated:
+                    Above are not supported on Whirlwind.
+            RSN Capabilities (w/ WPA2):
+                WRT1900ACV2:
+                    RSN PTKSA Replay Counter Capab: 1
+                Simulated:
+                    RSN PTKSA Replay Counter Capab: 16
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+    ]
+    ac_capabilities = [
+        hostapd_constants.AC_CAPABILITY_RXLDPC,
+        hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+        hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+        hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+        hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+        hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+    ]
+    vht_channel_width = 20
+    # Epigram, Inc. HT Capabilities IE
+    # Epigram, Inc. HT Additional Capabilities IE
+    # Marvell Semiconductor IE
+    vendor_elements = {
+        "vendor_elements": "dd1e00904c336c0017ffffff0001000000000000000000000000001fff071800"
+        "dd1a00904c3424000000000000000000000000000000000000000000"
+        "dd06005043030000"
+    }
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        obss_interval = 180
+        spectrum_mgmt = False
+        local_pwr_constraint = {}
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        obss_interval = None
+        spectrum_mgmt = True
+        local_pwr_constraint = {"local_pwr_constraint": 3}
+        # Country Information IE (w/ individual channel info)
+        vendor_elements["vendor_elements"] += (
+            "071e5553202401112801112c011130" "01119501179901179d0117a10117a50117"
+        )
+
+    additional_params = (
+        rates | vendor_elements | hostapd_constants.UAPSD_ENABLED | local_pwr_constraint
+    )
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11AC_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=True,
+        obss_interval=obss_interval,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        spectrum_mgmt_required=spectrum_mgmt,
+        additional_parameters=additional_params,
+    )
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
new file mode 100644
index 0000000..69c1845
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
@@ -0,0 +1,268 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def netgear_r7000(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of what a Netgear R7000 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real R7000:
+        2.4GHz:
+            Rates:
+                R7000:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48,
+        5GHz:
+            VHT Capab:
+                R7000:
+                    SU Beamformer Supported,
+                    SU Beamformee Supported,
+                    Beamformee STS Capability: 3,
+                    Number of Sounding Dimensions: 3,
+                    VHT Link Adaptation: Both
+                Simulated:
+                    Above are not supported on Whirlwind.
+            VHT Operation Info:
+                R7000: Basic MCS Map (0x0000)
+                Simulated: Basic MCS Map (0xfffc)
+            VHT Tx Power Envelope:
+                R7000: Local Max Tx Pwr Constraint: 1.0 dBm
+                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
+        Both:
+            HT Capab:
+                A-MPDU
+                    R7000: MPDU Density 4
+                    Simulated: MPDU Density 8
+            HT Info:
+                R7000: RIFS Permitted
+                Simulated: RIFS Prohibited
+            RM Capabilities:
+                R7000:
+                    Beacon Table Measurement: Not Supported
+                    Statistic Measurement: Enabled
+                    AP Channel Report Capability: Enabled
+                Simulated:
+                    Beacon Table Measurement: Supported
+                    Statistic Measurement: Disabled
+                    AP Channel Report Capability: Disabled
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    vht_channel_width = 80
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+        hostapd_constants.N_CAPABILITY_SGI20,
+    ]
+    # Netgear IE
+    # WPS IE
+    # Epigram, Inc. IE
+    # Broadcom IE
+    vendor_elements = {
+        "vendor_elements": "dd0600146c000000"
+        "dd310050f204104a00011010440001021047001066189606f1e967f9c0102048817a7"
+        "69e103c0001031049000600372a000120"
+        "dd1e00904c0408bf0cb259820feaff0000eaff0000c0050001000000c3020002"
+        "dd090010180200001c0000"
+    }
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        mode = hostapd_constants.MODE_11N_MIXED
+        obss_interval = 300
+        ac_capabilities = None
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        mode = hostapd_constants.MODE_11AC_MIXED
+        n_capabilities += [
+            hostapd_constants.N_CAPABILITY_SGI40,
+        ]
+
+        if hostapd_config.ht40_plus_allowed(channel):
+            n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
+        elif hostapd_config.ht40_minus_allowed(channel):
+            n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_MINUS)
+
+        obss_interval = None
+        ac_capabilities = [
+            hostapd_constants.AC_CAPABILITY_RXLDPC,
+            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+        ]
+
+    additional_params = (
+        rates
+        | vendor_elements
+        | qbss
+        | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+        | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        | hostapd_constants.UAPSD_ENABLED
+    )
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=2,
+        short_preamble=False,
+        obss_interval=obss_interval,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def netgear_wndr3400(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS on 5GHz once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of what a Netgear WNDR3400 AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real WNDR3400:
+        2.4GHz:
+            Rates:
+                WNDR3400:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48,
+        5GHz:
+            HT Info:
+                WNDR3400: RIFS Permitted
+                Simulated: RIFS Prohibited
+        Both:
+            HT Capab:
+                A-MPDU
+                    WNDR3400: MPDU Density 16
+                    Simulated: MPDU Density 8
+                Info
+                    WNDR3400: Green Field supported
+                    Simulated: Green Field not supported on Whirlwind.
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+    ]
+    # WPS IE
+    # Broadcom IE
+    vendor_elements = {
+        "vendor_elements": "dd310050f204104a0001101044000102104700108c403eb883e7e225ab139828703ade"
+        "dc103c0001031049000600372a000120"
+        "dd090010180200f0040000"
+    }
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        obss_interval = 300
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        obss_interval = None
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
+
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=2,
+        short_preamble=False,
+        obss_interval=obss_interval,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
new file mode 100644
index 0000000..8b2d0eb
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
@@ -0,0 +1,103 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def securifi_almond(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
+    """A simulated implementation of a Securifi Almond AP
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real Almond:
+            Rates:
+                Almond:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+            HT Capab:
+                A-MPDU
+                    Almond: MPDU Density 4
+                    Simulated: MPDU Density 8
+            RSN Capab (w/ WPA2):
+                Almond:
+                    RSN PTKSA Replay Counter Capab: 1
+                Simulated:
+                    RSN PTKSA Replay Counter Capab: 16
+    """
+    if channel > 11:
+        raise ValueError(
+            f"The Securifi Almond does not support 5Ghz. Invalid channel ({channel})"
+        )
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_HT40_PLUS,
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_SGI40,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
+    ]
+
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    )
+
+    # Ralink Technology IE
+    # Country Information IE
+    # AP Channel Report IEs
+    vendor_elements = {
+        "vendor_elements": "dd07000c4307000000"
+        "0706555320010b14"
+        "33082001020304050607"
+        "33082105060708090a0b"
+    }
+
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+
+    additional_params = rates | vendor_elements | qbss
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=iface_wlan_2g,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=True,
+        obss_interval=300,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
new file mode 100644
index 0000000..1a01303
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
@@ -0,0 +1,466 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def tplink_archerc5(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of an TPLink ArcherC5 AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real ArcherC5:
+        2.4GHz:
+            Rates:
+                ArcherC5:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+            HT Capab:
+                Info:
+                    ArcherC5: Green Field supported
+                    Simulated: Green Field not supported on Whirlwind.
+        5GHz:
+            VHT Capab:
+                ArcherC5:
+                    SU Beamformer Supported,
+                    SU Beamformee Supported,
+                    Beamformee STS Capability: 3,
+                    Number of Sounding Dimensions: 3,
+                    VHT Link Adaptation: Both
+                Simulated:
+                    Above are not supported on Whirlwind.
+            VHT Operation Info:
+                ArcherC5: Basic MCS Map (0x0000)
+                Simulated: Basic MCS Map (0xfffc)
+            VHT Tx Power Envelope:
+                ArcherC5: Local Max Tx Pwr Constraint: 1.0 dBm
+                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
+        Both:
+            HT Capab:
+                A-MPDU
+                    ArcherC5: MPDU Density 4
+                    Simulated: MPDU Density 8
+            HT Info:
+                ArcherC5: RIFS Permitted
+                Simulated: RIFS Prohibited
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    vht_channel_width = 20
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+    ]
+    # WPS IE
+    # Broadcom IE
+    vendor_elements = {
+        "vendor_elements": "dd310050f204104a000110104400010210470010d96c7efc2f8938f1efbd6e5148bfa8"
+        "12103c0001031049000600372a000120"
+        "dd090010180200001c0000"
+    }
+    qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        short_preamble = True
+        mode = hostapd_constants.MODE_11N_MIXED
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
+        ac_capabilities = None
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        short_preamble = False
+        mode = hostapd_constants.MODE_11AC_MIXED
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_LDPC)
+        ac_capabilities = [
+            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+            hostapd_constants.AC_CAPABILITY_RXLDPC,
+            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+        ]
+
+    additional_params = (
+        rates
+        | vendor_elements
+        | qbss
+        | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+        | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        | hostapd_constants.UAPSD_ENABLED
+    )
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=short_preamble,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def tplink_archerc7(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    """A simulated implementation of an TPLink ArcherC7 AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real ArcherC7:
+        5GHz:
+            Country Code:
+                Simulated: Has two country code IEs, one that matches
+                the actual, and another explicit IE that was required for
+                hostapd's 802.11d to work.
+        Both:
+            HT Info:
+                ArcherC7: RIFS Permitted
+                Simulated: RIFS Prohibited
+            RSN Capabilities (w/ WPA2):
+                ArcherC7:
+                    RSN PTKSA Replay Counter Capab: 1
+                Simulated:
+                    RSN PTKSA Replay Counter Capab: 16
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    vht_channel_width: int | None = 80
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_LDPC,
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+    ]
+    # Atheros IE
+    # WPS IE
+    vendor_elements = {
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd180050f204104a00011010440001021049000600372a000120"
+    }
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        short_preamble = True
+        mode = hostapd_constants.MODE_11N_MIXED
+        spectrum_mgmt = False
+        pwr_constraint = {}
+        ac_capabilities = None
+        vht_channel_width = None
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        short_preamble = False
+        mode = hostapd_constants.MODE_11AC_MIXED
+        spectrum_mgmt = True
+        # Country Information IE (w/ individual channel info)
+        vendor_elements["vendor_elements"] += (
+            "074255532024011e28011e2c011e30"
+            "011e3401173801173c01174001176401176801176c0117700117740117840117"
+            "8801178c011795011e99011e9d011ea1011ea5011e"
+        )
+        pwr_constraint = {"local_pwr_constraint": 3}
+        n_capabilities += [
+            hostapd_constants.N_CAPABILITY_SGI40,
+            hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+        ]
+
+        if hostapd_config.ht40_plus_allowed(channel):
+            n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
+        elif hostapd_config.ht40_minus_allowed(channel):
+            n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_MINUS)
+
+        ac_capabilities = [
+            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+            hostapd_constants.AC_CAPABILITY_RXLDPC,
+            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+            hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
+            hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
+        ]
+
+    additional_params = (
+        rates | vendor_elements | hostapd_constants.UAPSD_ENABLED | pwr_constraint
+    )
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=short_preamble,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        spectrum_mgmt_required=spectrum_mgmt,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def tplink_c1200(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
+    """A simulated implementation of an TPLink C1200 AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        iface_wlan_5g: The 5GHz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real C1200:
+        2.4GHz:
+            Rates:
+                C1200:
+                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
+                    Extended: 6, 9, 12, 48
+                Simulated:
+                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
+                    Extended: 24, 36, 48, 54
+            HT Capab:
+                Info:
+                    C1200: Green Field supported
+                    Simulated: Green Field not supported on Whirlwind.
+        5GHz:
+            VHT Operation Info:
+                C1200: Basic MCS Map (0x0000)
+                Simulated: Basic MCS Map (0xfffc)
+            VHT Tx Power Envelope:
+                C1200: Local Max Tx Pwr Constraint: 7.0 dBm
+                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
+        Both:
+            HT Info:
+                C1200: RIFS Permitted
+                Simulated: RIFS Prohibited
+    """
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    # Common Parameters
+    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    vht_channel_width = 20
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+    ]
+    # WPS IE
+    # Broadcom IE
+    vendor_elements = {
+        "vendor_elements": "dd350050f204104a000110104400010210470010000000000000000000000000000000"
+        "00103c0001031049000a00372a00012005022688"
+        "dd090010180200000c0000"
+    }
+
+    # 2.4GHz
+    if channel <= 11:
+        interface = iface_wlan_2g
+        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
+        short_preamble = True
+        mode = hostapd_constants.MODE_11N_MIXED
+        ac_capabilities = None
+
+    # 5GHz
+    else:
+        interface = iface_wlan_5g
+        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
+        short_preamble = False
+        mode = hostapd_constants.MODE_11AC_MIXED
+        n_capabilities.append(hostapd_constants.N_CAPABILITY_LDPC)
+        ac_capabilities = [
+            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
+            hostapd_constants.AC_CAPABILITY_RXLDPC,
+            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
+            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
+            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+        ]
+
+    additional_params = (
+        rates
+        | vendor_elements
+        | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+        | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        | hostapd_constants.UAPSD_ENABLED
+    )
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=interface,
+        mode=mode,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=short_preamble,
+        n_capabilities=n_capabilities,
+        ac_capabilities=ac_capabilities,
+        vht_channel_width=vht_channel_width,
+        additional_parameters=additional_params,
+    )
+    return config
+
+
+def tplink_tlwr940n(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
+    # TODO(b/143104825): Permit RIFS once it is supported
+    """A simulated implementation of an TPLink TLWR940N AP.
+    Args:
+        iface_wlan_2g: The 2.4Ghz interface of the test AP.
+        channel: What channel to use.
+        security: A security profile (open or WPA2).
+        ssid: The network name.
+    Returns:
+        A hostapd config.
+    Differences from real TLWR940N:
+        HT Info:
+            TLWR940N: RIFS Permitted
+            Simulated: RIFS Prohibited
+        RSN Capabilities (w/ WPA2):
+            TLWR940N:
+                RSN PTKSA Replay Counter Capab: 1
+            Simulated:
+                RSN PTKSA Replay Counter Capab: 16
+    """
+    if channel > 11:
+        raise ValueError(
+            "The mock TP-Link TLWR940N does not support 5Ghz. "
+            "Invalid channel (%s)" % channel
+        )
+    # Verify interface and security
+    hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
+        hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
+
+    n_capabilities = [
+        hostapd_constants.N_CAPABILITY_SGI20,
+        hostapd_constants.N_CAPABILITY_TX_STBC,
+        hostapd_constants.N_CAPABILITY_RX_STBC1,
+    ]
+
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
+    )
+
+    # Atheros Communications, Inc. IE
+    # WPS IE
+    vendor_elements = {
+        "vendor_elements": "dd0900037f01010000ff7f"
+        "dd260050f204104a0001101044000102104900140024e2600200010160000002000160"
+        "0100020001"
+    }
+
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
+
+    config = hostapd_config.HostapdConfig(
+        ssid=ssid,
+        channel=channel,
+        hidden=False,
+        security=security,
+        interface=iface_wlan_2g,
+        mode=hostapd_constants.MODE_11N_MIXED,
+        force_wmm=True,
+        beacon_interval=100,
+        dtim_period=1,
+        short_preamble=True,
+        n_capabilities=n_capabilities,
+        additional_parameters=additional_params,
+    )
+
+    return config
diff --git a/packages/antlion/controllers/ap_lib/wireless_network_management.py b/packages/antlion/controllers/ap_lib/wireless_network_management.py
new file mode 100644
index 0000000..848cf5f
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/wireless_network_management.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import NewType
+
+from antlion.controllers.ap_lib.radio_measurement import NeighborReportElement
+
+BssTransitionCandidateList = NewType(
+    "BssTransitionCandidateList", list[NeighborReportElement]
+)
+
+
+class BssTerminationDuration:
+    """Representation of BSS Termination Duration subelement.
+
+    See IEEE 802.11-2020 Figure 9-341.
+    """
+
+    def __init__(self, duration: int):
+        """Create a BSS Termination Duration subelement.
+
+        Args:
+            duration: number of minutes the BSS will be offline.
+        """
+        # Note: hostapd does not currently support setting BSS Termination TSF,
+        # which is the other value held in this subelement.
+        self._duration = duration
+
+    @property
+    def duration(self) -> int:
+        return self._duration
+
+
+class BssTransitionManagementRequest:
+    """Representation of BSS Transition Management request.
+
+    See IEEE 802.11-2020 9.6.13.9.
+    """
+
+    def __init__(
+        self,
+        preferred_candidate_list_included: bool = False,
+        abridged: bool = False,
+        disassociation_imminent: bool = False,
+        ess_disassociation_imminent: bool = False,
+        disassociation_timer: int = 0,
+        validity_interval: int = 1,
+        bss_termination_duration: BssTerminationDuration | None = None,
+        session_information_url: str | None = None,
+        candidate_list: BssTransitionCandidateList | None = None,
+    ):
+        """Create a BSS Transition Management request.
+
+        Args:
+            preferred_candidate_list_included: whether the candidate list is a
+                preferred candidate list, or (if False) a list of known
+                candidates.
+            abridged: whether a preference value of 0 is assigned to all BSSIDs
+                that do not appear in the candidate list, or (if False) AP has
+                no recommendation for/against anything not in the candidate
+                list.
+            disassociation_imminent: whether the STA is about to be
+                disassociated by the AP.
+            ess_disassociation_imminent: whether the STA will be disassociated
+                from the ESS.
+            disassociation_timer: the number of beacon transmission times
+                (TBTTs) until the AP disassociates this STA (default 0, meaning
+                AP has not determined when it will disassociate this STA).
+            validity_interval: number of TBTTs until the candidate list is no
+                longer valid (default 1).
+            bss_termination_duration: BSS Termination Duration subelement.
+            session_information_url: this URL is included if ESS disassociation
+                is immiment.
+            candidate_list: zero or more neighbor report elements.
+        """
+        # Request mode field, see IEEE 802.11-2020 Figure 9-924.
+        self._preferred_candidate_list_included = preferred_candidate_list_included
+        self._abridged = abridged
+        self._disassociation_imminent = disassociation_imminent
+        self._ess_disassociation_imminent = ess_disassociation_imminent
+
+        # Disassociation Timer, see IEEE 802.11-2020 Figure 9-925
+        self._disassociation_timer = disassociation_timer
+
+        # Validity Interval, see IEEE 802.11-2020 9.6.13.9
+        self._validity_interval = validity_interval
+
+        # BSS Termination Duration, see IEEE 802.11-2020 9.6.13.9 and Figure 9-341
+        self._bss_termination_duration = bss_termination_duration
+
+        # Session Information URL, see IEEE 802.11-2020 Figure 9-926
+        self._session_information_url = session_information_url
+
+        # BSS Transition Candidate List Entries, IEEE 802.11-2020 9.6.13.9.
+        self._candidate_list = candidate_list
+
+    @property
+    def preferred_candidate_list_included(self) -> bool:
+        return self._preferred_candidate_list_included
+
+    @property
+    def abridged(self) -> bool:
+        return self._abridged
+
+    @property
+    def disassociation_imminent(self) -> bool:
+        return self._disassociation_imminent
+
+    @property
+    def bss_termination_included(self) -> bool:
+        return self._bss_termination_duration is not None
+
+    @property
+    def ess_disassociation_imminent(self) -> bool:
+        return self._ess_disassociation_imminent
+
+    @property
+    def disassociation_timer(self) -> int | None:
+        if self.disassociation_imminent:
+            return self._disassociation_timer
+        # Otherwise, field is reserved.
+        return None
+
+    @property
+    def validity_interval(self) -> int:
+        return self._validity_interval
+
+    @property
+    def bss_termination_duration(self) -> BssTerminationDuration | None:
+        return self._bss_termination_duration
+
+    @property
+    def session_information_url(self) -> str | None:
+        return self._session_information_url
+
+    @property
+    def candidate_list(self) -> BssTransitionCandidateList | None:
+        return self._candidate_list
diff --git a/packages/antlion/controllers/attenuator.py b/packages/antlion/controllers/attenuator.py
new file mode 100644
index 0000000..ccfdfec
--- /dev/null
+++ b/packages/antlion/controllers/attenuator.py
@@ -0,0 +1,364 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import enum
+import logging
+from typing import Protocol, runtime_checkable
+
+from antlion.libs.proc import job
+from antlion.types import ControllerConfig
+from antlion.validation import MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME = "Attenuator"
+ACTS_CONTROLLER_REFERENCE_NAME = "attenuators"
+_ATTENUATOR_OPEN_RETRIES = 3
+
+
+class Model(enum.StrEnum):
+    AEROFLEX_TELNET = "aeroflex.telnet"
+    MINICIRCUITS_HTTP = "minicircuits.http"
+    MINICIRCUITS_TELNET = "minicircuits.telnet"
+
+    def create(self, instrument_count: int) -> AttenuatorInstrument:
+        match self:
+            case Model.AEROFLEX_TELNET:
+                import antlion.controllers.attenuator_lib.aeroflex.telnet
+
+                return antlion.controllers.attenuator_lib.aeroflex.telnet.AttenuatorInstrument(
+                    instrument_count
+                )
+            case Model.MINICIRCUITS_HTTP:
+                import antlion.controllers.attenuator_lib.minicircuits.http
+
+                return antlion.controllers.attenuator_lib.minicircuits.http.AttenuatorInstrument(
+                    instrument_count
+                )
+            case Model.MINICIRCUITS_TELNET:
+                import antlion.controllers.attenuator_lib.minicircuits.telnet
+
+                return antlion.controllers.attenuator_lib.minicircuits.telnet.AttenuatorInstrument(
+                    instrument_count
+                )
+
+
+def create(configs: list[ControllerConfig]) -> list[Attenuator]:
+    attenuators: list[Attenuator] = []
+    for config in configs:
+        c = MapValidator(config)
+        attn_model = c.get(str, "Model")
+        protocol = c.get(str, "Protocol", "telnet")
+        model = Model(f"{attn_model}.{protocol}")
+
+        instrument_count = c.get(int, "InstrumentCount")
+        attenuator_instrument = model.create(instrument_count)
+
+        address = c.get(str, "Address")
+        port = c.get(int, "Port")
+
+        for attempt_number in range(1, _ATTENUATOR_OPEN_RETRIES + 1):
+            try:
+                attenuator_instrument.open(address, port)
+            except Exception as e:
+                logging.error(
+                    "Attempt %s to open connection to attenuator " "failed: %s",
+                    attempt_number,
+                    e,
+                )
+                if attempt_number == _ATTENUATOR_OPEN_RETRIES:
+                    ping_output = job.run(
+                        f"ping {address} -c 1 -w 1", ignore_status=True
+                    )
+                    if ping_output.returncode == 1:
+                        logging.error("Unable to ping attenuator at %s", address)
+                    else:
+                        logging.error("Able to ping attenuator at %s", address)
+                        job.run(
+                            f'echo "q" | telnet {address} {port}',
+                            ignore_status=True,
+                        )
+                    raise
+        for i in range(instrument_count):
+            attenuators.append(Attenuator(attenuator_instrument, idx=i))
+    return attenuators
+
+
+def get_info(attenuators: list[Attenuator]) -> list[dict[str, int | str | None]]:
+    """Get information on a list of Attenuator objects.
+
+    Args:
+        attenuators: A list of Attenuator objects.
+
+    Returns:
+        A list of dict, each representing info for Attenuator objects.
+    """
+    device_info = []
+    for attenuator in attenuators:
+        info = {
+            "Address": attenuator.instrument.address,
+            "Attenuator_Port": attenuator.idx,
+        }
+        device_info.append(info)
+    return device_info
+
+
+def destroy(objs: list[Attenuator]) -> None:
+    for attn in objs:
+        attn.instrument.close()
+
+
+def get_attenuators_for_device(
+    device_attenuator_configs: list[ControllerConfig],
+    attenuators: list[Attenuator],
+    attenuator_key: str,
+) -> list[Attenuator]:
+    """Gets the list of attenuators associated to a specified device and builds
+    a list of the attenuator objects associated to the ip address in the
+    device's section of the ACTS config and the Attenuator's IP address.  In the
+    example below the access point object has an attenuator dictionary with
+    IP address associated to an attenuator object.  The address is the only
+    mandatory field and the 'attenuator_ports_wifi_2g' and
+    'attenuator_ports_wifi_5g' are the attenuator_key specified above.  These
+    can be anything and is sent in as a parameter to this function.  The numbers
+    in the list are ports that are in the attenuator object.  Below is an
+    standard Access_Point object and the link to a standard Attenuator object.
+    Notice the link is the IP address, which is why the IP address is mandatory.
+
+    "AccessPoint": [
+        {
+          "ssh_config": {
+            "user": "root",
+            "host": "192.168.42.210"
+          },
+          "Attenuator": [
+            {
+              "Address": "192.168.42.200",
+              "attenuator_ports_wifi_2g": [
+                0,
+                1,
+                3
+              ],
+              "attenuator_ports_wifi_5g": [
+                0,
+                1
+              ]
+            }
+          ]
+        }
+      ],
+      "Attenuator": [
+        {
+          "Model": "minicircuits",
+          "InstrumentCount": 4,
+          "Address": "192.168.42.200",
+          "Port": 23
+        }
+      ]
+    Args:
+        device_attenuator_configs: A list of attenuators config information in
+            the acts config that are associated a particular device.
+        attenuators: A list of all of the available attenuators objects
+            in the testbed.
+        attenuator_key: A string that is the key to search in the device's
+            configuration.
+
+    Returns:
+        A list of attenuator objects for the specified device and the key in
+        that device's config.
+    """
+    attenuator_list = []
+    for device_attenuator_config in device_attenuator_configs:
+        c = MapValidator(device_attenuator_config)
+        ports = c.list(attenuator_key).all(int)
+        for port in ports:
+            for attenuator in attenuators:
+                if (
+                    attenuator.instrument.address == device_attenuator_config["Address"]
+                    and attenuator.idx is port
+                ):
+                    attenuator_list.append(attenuator)
+    return attenuator_list
+
+
+#
+# Classes for accessing, managing, and manipulating attenuators.
+#
+# Users will instantiate a specific child class, but almost all operation should
+# be performed on the methods and data members defined here in the base classes
+# or the wrapper classes.
+#
+
+
+class AttenuatorError(Exception):
+    """Base class for all errors generated by Attenuator-related modules."""
+
+
+class InvalidDataError(AttenuatorError):
+    """ "Raised when an unexpected result is seen on the transport layer.
+
+    When this exception is seen, closing an re-opening the link to the
+    attenuator instrument is probably necessary. Something has gone wrong in
+    the transport.
+    """
+
+
+class InvalidOperationError(AttenuatorError):
+    """Raised when the attenuator's state does not allow the given operation.
+
+    Certain methods may only be accessed when the instance upon which they are
+    invoked is in a certain state. This indicates that the object is not in the
+    correct state for a method to be called.
+    """
+
+
+INVALID_MAX_ATTEN: float = 999.9
+
+
+@runtime_checkable
+class AttenuatorInstrument(Protocol):
+    """Defines the primitive behavior of all attenuator instruments.
+
+    The AttenuatorInstrument class is designed to provide a simple low-level
+    interface for accessing any step attenuator instrument comprised of one or
+    more attenuators and a controller. All AttenuatorInstruments should override
+    all the methods below and call AttenuatorInstrument.__init__ in their
+    constructors. Outside of setup/teardown, devices should be accessed via
+    this generic "interface".
+    """
+
+    @property
+    def address(self) -> str | None:
+        """Return the address to the attenuator."""
+        ...
+
+    @property
+    def num_atten(self) -> int:
+        """Return the index used to identify this attenuator in an instrument."""
+        ...
+
+    @property
+    def max_atten(self) -> float:
+        """Return the maximum allowed attenuation value."""
+        ...
+
+    def open(self, host: str, port: int, timeout_sec: int = 5) -> None:
+        """Initiate a connection to the attenuator.
+
+        Args:
+            host: A valid hostname to an attenuator
+            port: Port number to attempt connection
+            timeout_sec: Seconds to wait to initiate a connection
+        """
+        ...
+
+    def close(self) -> None:
+        """Close the connection to the attenuator."""
+        ...
+
+    def set_atten(
+        self, idx: int, value: float, strict: bool = True, retry: bool = False
+    ) -> None:
+        """Sets the attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            value: Value for nominal attenuation to be set
+            strict: If True, raise an error when given out of bounds attenuation
+            retry: If True, command will be retried if possible
+        """
+        ...
+
+    def get_atten(self, idx: int, retry: bool = False) -> float:
+        """Returns the current attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            retry: If True, command will be retried if possible
+
+        Returns:
+            The current attenuation value
+        """
+        ...
+
+
+class Attenuator(object):
+    """An object representing a single attenuator in a remote instrument.
+
+    A user wishing to abstract the mapping of attenuators to physical
+    instruments should use this class, which provides an object that abstracts
+    the physical implementation and allows the user to think only of attenuators
+    regardless of their location.
+    """
+
+    def __init__(
+        self, instrument: AttenuatorInstrument, idx: int = 0, offset: int = 0
+    ) -> None:
+        """This is the constructor for Attenuator
+
+        Args:
+            instrument: Reference to an AttenuatorInstrument on which the
+                Attenuator resides
+            idx: This zero-based index is the identifier for a particular
+                attenuator in an instrument.
+            offset: A power offset value for the attenuator to be used when
+                performing future operations. This could be used for either
+                calibration or to allow group operations with offsets between
+                various attenuators.
+
+        Raises:
+            TypeError if an invalid AttenuatorInstrument is passed in.
+            IndexError if the index is out of range.
+        """
+        if not isinstance(instrument, AttenuatorInstrument):
+            raise TypeError("Must provide an Attenuator Instrument Ref")
+        self.instrument = instrument
+        self.idx = idx
+        self.offset = offset
+
+        if self.idx >= instrument.num_atten:
+            raise IndexError("Attenuator index out of range for attenuator instrument")
+
+    def set_atten(self, value: float, strict: bool = True, retry: bool = False) -> None:
+        """Sets the attenuation.
+
+        Args:
+            value: A floating point value for nominal attenuation to be set.
+            strict: if True, function raises an error when given out of
+                bounds attenuation values, if false, the function sets out of
+                bounds values to 0 or max_atten.
+            retry: if True, command will be retried if possible
+
+        Raises:
+            ValueError if value + offset is greater than the maximum value.
+        """
+        if value + self.offset > self.instrument.max_atten and strict:
+            raise ValueError("Attenuator Value+Offset greater than Max Attenuation!")
+
+        self.instrument.set_atten(
+            self.idx, value + self.offset, strict=strict, retry=retry
+        )
+
+    def get_atten(self, retry: bool = False) -> float:
+        """Returns the attenuation as a float, normalized by the offset."""
+        return self.instrument.get_atten(self.idx, retry) - self.offset
+
+    def get_max_atten(self) -> float:
+        """Returns the max attenuation as a float, normalized by the offset."""
+        if self.instrument.max_atten == INVALID_MAX_ATTEN:
+            raise ValueError("Invalid Max Attenuator Value")
+
+        return self.instrument.max_atten - self.offset
diff --git a/src/antlion/controllers/attenuator_lib/__init__.py b/packages/antlion/controllers/attenuator_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/attenuator_lib/__init__.py
rename to packages/antlion/controllers/attenuator_lib/__init__.py
diff --git a/packages/antlion/controllers/attenuator_lib/_tnhelper.py b/packages/antlion/controllers/attenuator_lib/_tnhelper.py
new file mode 100644
index 0000000..4f037aa
--- /dev/null
+++ b/packages/antlion/controllers/attenuator_lib/_tnhelper.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A helper module to communicate over telnet with AttenuatorInstruments.
+
+User code shouldn't need to directly access this class.
+"""
+
+import logging
+import re
+import telnetlib
+
+from antlion.controllers import attenuator
+from antlion.libs.proc import job
+
+
+def _ascii_string(uc_string):
+    return str(uc_string).encode("ASCII")
+
+
+class TelnetHelper(object):
+    """An internal helper class for Telnet+SCPI command-based instruments.
+
+    It should only be used by those implementation control libraries and not by
+    any user code directly.
+    """
+
+    def __init__(
+        self,
+        tx_cmd_separator: str = "\n",
+        rx_cmd_separator: str = "\n",
+        prompt: str = "",
+    ) -> None:
+        self._tn: telnetlib.Telnet | None = None
+        self._ip_address: str | None = None
+        self._port: int | None = None
+
+        self.tx_cmd_separator = tx_cmd_separator
+        self.rx_cmd_separator = rx_cmd_separator
+        self.prompt = prompt
+
+    def open(self, host: str, port: int = 23) -> None:
+        self._ip_address = host
+        self._port = port
+        if self._tn:
+            self._tn.close()
+        logging.debug("Telnet Server IP = %s", host)
+        self._tn = telnetlib.Telnet(host, port, timeout=10)
+
+    def is_open(self) -> bool:
+        return self._tn is not None
+
+    def close(self) -> None:
+        if self._tn:
+            self._tn.close()
+            self._tn = None
+
+    def diagnose_telnet(self, host: str, port: int) -> bool:
+        """Function that diagnoses telnet connections.
+
+        This function diagnoses telnet connections and can be used in case of
+        command failures. The function checks if the devices is still reachable
+        via ping, and whether or not it can close and reopen the telnet
+        connection.
+
+        Returns:
+            False when telnet server is unreachable or unresponsive
+            True when telnet server is reachable and telnet connection has been
+            successfully reopened
+        """
+        logging.debug("Diagnosing telnet connection")
+        try:
+            job_result = job.run(f"ping {host} -c 5 -i 0.2")
+        except Exception as e:
+            logging.error("Unable to ping telnet server: %s", e)
+            return False
+        ping_output = job_result.stdout
+        if not re.search(r" 0% packet loss", ping_output):
+            logging.error("Ping Packets Lost. Result: %s", ping_output)
+            return False
+        try:
+            self.close()
+        except Exception as e:
+            logging.error("Cannot close telnet connection: %s", e)
+            return False
+        try:
+            self.open(host, port)
+        except Exception as e:
+            logging.error("Cannot reopen telnet connection: %s", e)
+            return False
+        logging.debug("Telnet connection likely recovered")
+        return True
+
+    def cmd(self, cmd_str: str, retry: bool = False) -> str:
+        if not isinstance(cmd_str, str):
+            raise TypeError("Invalid command string", cmd_str)
+
+        if self._tn is None or self._ip_address is None or self._port is None:
+            raise attenuator.InvalidOperationError(
+                "Telnet connection not open for commands"
+            )
+
+        cmd_str.strip(self.tx_cmd_separator)
+        self._tn.read_until(_ascii_string(self.prompt), 2)
+        self._tn.write(_ascii_string(cmd_str + self.tx_cmd_separator))
+
+        match_idx, match_val, ret_text = self._tn.expect(
+            [_ascii_string(f"\\S+{self.rx_cmd_separator}")], 1
+        )
+
+        logging.debug("Telnet Command: %s", cmd_str)
+        logging.debug("Telnet Reply: (%s, %s, %s)", match_idx, match_val, ret_text)
+
+        if match_idx == -1:
+            telnet_recovered = self.diagnose_telnet(self._ip_address, self._port)
+            if telnet_recovered and retry:
+                logging.debug("Retrying telnet command once.")
+                return self.cmd(cmd_str, retry=False)
+            else:
+                raise attenuator.InvalidDataError(
+                    "Telnet command failed to return valid data"
+                )
+
+        ret_str = ret_text.decode()
+        ret_str = ret_str.strip(
+            self.tx_cmd_separator + self.rx_cmd_separator + self.prompt
+        )
+        return ret_str
diff --git a/src/antlion/controllers/attenuator_lib/aeroflex/__init__.py b/packages/antlion/controllers/attenuator_lib/aeroflex/__init__.py
similarity index 100%
rename from src/antlion/controllers/attenuator_lib/aeroflex/__init__.py
rename to packages/antlion/controllers/attenuator_lib/aeroflex/__init__.py
diff --git a/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py b/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py
new file mode 100644
index 0000000..f4544f3
--- /dev/null
+++ b/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Class for Telnet control of Aeroflex 832X and 833X Series Attenuator Modules
+
+This class provides a wrapper to the Aeroflex attenuator modules for purposes
+of simplifying and abstracting control down to the basic necessities. It is
+not the intention of the module to expose all functionality, but to allow
+interchangeable HW to be used.
+
+See http://www.aeroflex.com/ams/weinschel/PDFILES/IM-608-Models-8320-&-8321-preliminary.pdf
+"""
+
+from antlion.controllers import attenuator
+from antlion.controllers.attenuator_lib import _tnhelper
+
+
+class AttenuatorInstrument(attenuator.AttenuatorInstrument):
+    def __init__(self, num_atten: int = 0) -> None:
+        self._num_atten = num_atten
+        self._max_atten = attenuator.INVALID_MAX_ATTEN
+
+        self._tnhelper = _tnhelper.TelnetHelper(
+            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=">"
+        )
+        self._properties: dict[str, str] | None = None
+        self._address: str | None = None
+
+    @property
+    def address(self) -> str | None:
+        return self._address
+
+    @property
+    def num_atten(self) -> int:
+        return self._num_atten
+
+    @property
+    def max_atten(self) -> float:
+        return self._max_atten
+
+    def open(self, host: str, port: int, _timeout_sec: int = 5) -> None:
+        """Initiate a connection to the attenuator.
+
+        Args:
+            host: A valid hostname to an attenuator
+            port: Port number to attempt connection
+            timeout_sec: Seconds to wait to initiate a connection
+        """
+        self._tnhelper.open(host, port)
+
+        # work around a bug in IO, but this is a good thing to do anyway
+        self._tnhelper.cmd("*CLS", False)
+        self._address = host
+
+        if self._num_atten == 0:
+            self._num_atten = int(self._tnhelper.cmd("RFCONFIG? CHAN"))
+
+        configstr = self._tnhelper.cmd("RFCONFIG? ATTN 1")
+
+        self._properties = dict(
+            zip(
+                ["model", "max_atten", "min_step", "unknown", "unknown2", "cfg_str"],
+                configstr.split(", ", 5),
+            )
+        )
+
+        self._max_atten = float(self._properties["max_atten"])
+
+    def close(self) -> None:
+        """Close the connection to the attenuator."""
+        self._tnhelper.close()
+
+    def set_atten(
+        self, idx: int, value: float, _strict: bool = True, _retry: bool = False
+    ) -> None:
+        """Sets the attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            value: Value for nominal attenuation to be set
+            strict: If True, raise an error when given out of bounds attenuation
+            retry: If True, command will be retried if possible
+
+        Raises:
+            InvalidOperationError if the telnet connection is not open.
+            IndexError if the index is not valid for this instrument.
+            ValueError if the requested set value is greater than the maximum
+                attenuation value.
+        """
+        if not self._tnhelper.is_open():
+            raise attenuator.InvalidOperationError("Connection not open!")
+
+        if idx >= self._num_atten:
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+
+        if value > self._max_atten:
+            raise ValueError("Attenuator value out of range!", self._max_atten, value)
+
+        self._tnhelper.cmd(f"ATTN {idx + 1} {value}", False)
+
+    def get_atten(self, idx: int, _retry: bool = False) -> float:
+        """Returns the current attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            retry: If True, command will be retried if possible
+
+        Raises:
+            InvalidOperationError if the telnet connection is not open.
+
+        Returns:
+            The current attenuation value
+        """
+        if not self._tnhelper.is_open():
+            raise attenuator.InvalidOperationError("Connection not open!")
+
+        #       Potentially redundant safety check removed for the moment
+        #       if idx >= self.num_atten:
+        #           raise IndexError("Attenuator index out of range!", self.num_atten, idx)
+
+        atten_val = self._tnhelper.cmd(f"ATTN? {idx + 1}")
+
+        return float(atten_val)
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/__init__.py b/packages/antlion/controllers/attenuator_lib/minicircuits/__init__.py
similarity index 100%
rename from src/antlion/controllers/attenuator_lib/minicircuits/__init__.py
rename to packages/antlion/controllers/attenuator_lib/minicircuits/__init__.py
diff --git a/packages/antlion/controllers/attenuator_lib/minicircuits/http.py b/packages/antlion/controllers/attenuator_lib/minicircuits/http.py
new file mode 100644
index 0000000..98118ad
--- /dev/null
+++ b/packages/antlion/controllers/attenuator_lib/minicircuits/http.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Class for HTTP control of Mini-Circuits RCDAT series attenuators
+
+This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
+of simplifying and abstracting control down to the basic necessities. It is
+not the intention of the module to expose all functionality, but to allow
+interchangeable HW to be used.
+
+See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
+"""
+
+import urllib.request
+
+from antlion.controllers import attenuator
+
+
+class AttenuatorInstrument(attenuator.AttenuatorInstrument):
+    """A specific HTTP-controlled implementation of AttenuatorInstrument for
+    Mini-Circuits RC-DAT attenuators.
+
+    With the exception of HTTP-specific commands, all functionality is defined
+    by the AttenuatorInstrument class.
+    """
+
+    def __init__(self, num_atten: int = 1) -> None:
+        self._num_atten = num_atten
+        self._max_atten = attenuator.INVALID_MAX_ATTEN
+
+        self._ip_address: str | None = None
+        self._port: int | None = None
+        self._timeout: int | None = None
+        self._address: str | None = None
+
+    @property
+    def address(self) -> str | None:
+        return self._address
+
+    @property
+    def num_atten(self) -> int:
+        return self._num_atten
+
+    @property
+    def max_atten(self) -> float:
+        return self._max_atten
+
+    def open(self, host: str, port: int = 80, timeout_sec: int = 2) -> None:
+        """Initiate a connection to the attenuator.
+
+        Args:
+            host: A valid hostname to an attenuator
+            port: Port number to attempt connection
+            timeout_sec: Seconds to wait to initiate a connection
+        """
+        self._ip_address = host
+        self._port = port
+        self._timeout = timeout_sec
+        self._address = host
+
+        att_req = urllib.request.urlopen(f"http://{self._ip_address}:{self._port}/MN?")
+        config_str = att_req.read().decode("utf-8").strip()
+        if not config_str.startswith("MN="):
+            raise attenuator.InvalidDataError(
+                f"Attenuator returned invalid data. Attenuator returned: {config_str}"
+            )
+
+        config_str = config_str[len("MN=") :]
+        properties = dict(
+            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
+        )
+        self._max_atten = float(properties["max_atten"])
+
+    def close(self) -> None:
+        """Close the connection to the attenuator."""
+        # Since this controller is based on HTTP requests, there is no
+        # connection teardown required.
+
+    def set_atten(
+        self, idx: int, value: float, strict: bool = True, retry: bool = False
+    ) -> None:
+        """Sets the attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            value: Value for nominal attenuation to be set
+            strict: If True, raise an error when given out of bounds attenuation
+            retry: If True, command will be retried if possible
+
+        Raises:
+            InvalidDataError if the attenuator does not respond with the
+            expected output.
+        """
+        if not (0 <= idx < self._num_atten):
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+
+        if value > self._max_atten and strict:
+            raise ValueError("Attenuator value out of range!", self._max_atten, value)
+        # The actual device uses one-based index for channel numbers.
+        adjusted_value = min(max(0, value), self._max_atten)
+        att_req = urllib.request.urlopen(
+            "http://{}:{}/CHAN:{}:SETATT:{}".format(
+                self._ip_address, self._port, idx + 1, adjusted_value
+            ),
+            timeout=self._timeout,
+        )
+        att_resp = att_req.read().decode("utf-8").strip()
+        if att_resp != "1":
+            if retry:
+                self.set_atten(idx, value, strict, retry=False)
+            else:
+                raise attenuator.InvalidDataError(
+                    f"Attenuator returned invalid data. Attenuator returned: {att_resp}"
+                )
+
+    def get_atten(self, idx: int, retry: bool = False) -> float:
+        """Returns the current attenuation of the attenuator at the given index.
+
+        Args:
+            idx: The index of the attenuator.
+            retry: if True, command will be retried if possible
+
+        Raises:
+            InvalidDataError if the attenuator does not respond with the
+            expected output
+
+        Returns:
+            the current attenuation value as a float
+        """
+        if not (0 <= idx < self._num_atten):
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+        att_req = urllib.request.urlopen(
+            f"http://{self._ip_address}:{self._port}/CHAN:{idx + 1}:ATT?",
+            timeout=self._timeout,
+        )
+        att_resp = att_req.read().decode("utf-8").strip()
+        try:
+            return float(att_resp)
+        except TypeError as e:
+            if retry:
+                return self.get_atten(idx, retry=False)
+
+            raise attenuator.InvalidDataError(
+                f"Attenuator returned invalid data. Attenuator returned: {att_resp}"
+            ) from e
diff --git a/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py b/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py
new file mode 100644
index 0000000..bd70386
--- /dev/null
+++ b/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Class for Telnet control of Mini-Circuits RCDAT series attenuators
+
+This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
+of simplifying and abstracting control down to the basic necessities. It is
+not the intention of the module to expose all functionality, but to allow
+interchangeable HW to be used.
+
+See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
+"""
+
+from antlion.controllers import attenuator
+from antlion.controllers.attenuator_lib import _tnhelper
+
+
+class AttenuatorInstrument(attenuator.AttenuatorInstrument):
+    """A specific telnet-controlled implementation of AttenuatorInstrument for
+    Mini-Circuits RC-DAT attenuators.
+
+    With the exception of telnet-specific commands, all functionality is defined
+    by the AttenuatorInstrument class. Because telnet is a stateful protocol,
+    the functionality of AttenuatorInstrument is contingent upon a telnet
+    connection being established.
+    """
+
+    def __init__(self, num_atten: int = 0) -> None:
+        self._num_atten = num_atten
+        self._max_atten = attenuator.INVALID_MAX_ATTEN
+        self.properties: dict[str, str] | None = None
+        self._tnhelper = _tnhelper.TelnetHelper(
+            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=""
+        )
+        self._address: str | None = None
+
+    @property
+    def address(self) -> str | None:
+        return self._address
+
+    @property
+    def num_atten(self) -> int:
+        return self._num_atten
+
+    @property
+    def max_atten(self) -> float:
+        return self._max_atten
+
+    def __del__(self) -> None:
+        if self._tnhelper.is_open():
+            self.close()
+
+    def open(self, host: str, port: int, _timeout_sec: int = 5) -> None:
+        """Initiate a connection to the attenuator.
+
+        Args:
+            host: A valid hostname to an attenuator
+            port: Port number to attempt connection
+            timeout_sec: Seconds to wait to initiate a connection
+        """
+        self._tnhelper.open(host, port)
+        self._address = host
+
+        if self._num_atten == 0:
+            self._num_atten = 1
+
+        config_str = self._tnhelper.cmd("MN?")
+
+        if config_str.startswith("MN="):
+            config_str = config_str[len("MN=") :]
+
+        self.properties = dict(
+            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
+        )
+        self._max_atten = float(self.properties["max_atten"])
+
+    def close(self) -> None:
+        """Close the connection to the attenuator."""
+        self._tnhelper.close()
+
+    def set_atten(
+        self, idx: int, value: float, strict: bool = True, retry: bool = False
+    ) -> None:
+        """Sets the attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            value: Value for nominal attenuation to be set
+            strict: If True, raise an error when given out of bounds attenuation
+            retry: If True, command will be retried if possible
+
+        Raises:
+            InvalidOperationError if the telnet connection is not open.
+            IndexError if the index is not valid for this instrument.
+            ValueError if the requested set value is greater than the maximum
+                attenuation value.
+        """
+
+        if not self._tnhelper.is_open():
+            raise attenuator.InvalidOperationError("Connection not open!")
+
+        if idx >= self._num_atten:
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+
+        if value > self._max_atten and strict:
+            raise ValueError("Attenuator value out of range!", self._max_atten, value)
+        # The actual device uses one-based index for channel numbers.
+        adjusted_value = min(max(0, value), self._max_atten)
+        self._tnhelper.cmd(f"CHAN:{idx + 1}:SETATT:{adjusted_value}", retry=retry)
+
+    def get_atten(self, idx: int, retry: bool = False) -> float:
+        """Returns the current attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            retry: If True, command will be retried if possible
+
+        Returns:
+            The current attenuation value
+
+        Raises:
+            InvalidOperationError if the telnet connection is not open.
+        """
+        if not self._tnhelper.is_open():
+            raise attenuator.InvalidOperationError("Connection not open!")
+
+        if idx >= self._num_atten or idx < 0:
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+
+        if self._num_atten == 1:
+            atten_val_str = self._tnhelper.cmd(":ATT?", retry=retry)
+        else:
+            atten_val_str = self._tnhelper.cmd(f"CHAN:{idx + 1}:ATT?", retry=retry)
+        atten_val = float(atten_val_str)
+        return atten_val
diff --git a/packages/antlion/controllers/fastboot.py b/packages/antlion/controllers/fastboot.py
new file mode 100755
index 0000000..40fa702
--- /dev/null
+++ b/packages/antlion/controllers/fastboot.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion import error
+from antlion.libs.proc import job
+
+
+class FastbootError(error.ActsError):
+    """Raised when there is an error in fastboot operations."""
+
+    def __init__(self, cmd, stdout, stderr, ret_code):
+        super().__init__()
+        self.cmd = cmd
+        self.stdout = stdout
+        self.stderr = stderr
+        self.ret_code = ret_code
+
+    def __str__(self):
+        return (
+            "Error executing fastboot cmd '%s'. ret: %d, stdout: %s," " stderr: %s"
+        ) % (self.cmd, self.ret_code, self.stdout, self.stderr)
+
+
+class FastbootProxy:
+    """Proxy class for fastboot.
+
+    For syntactic reasons, the '-' in fastboot commands need to be replaced
+    with '_'. Can directly execute fastboot commands on an object:
+    >> fb = FastbootProxy(<serial>)
+    >> fb.devices() # will return the console output of "fastboot devices".
+    """
+
+    def __init__(self, serial="", ssh_connection=None):
+        self.serial = serial
+        if serial:
+            self.fastboot_str = f"fastboot -s {serial}"
+        else:
+            self.fastboot_str = "fastboot"
+        self.ssh_connection = ssh_connection
+
+    def _exec_fastboot_cmd(self, name, arg_str, ignore_status=False, timeout=60):
+        command = f"{self.fastboot_str} {name} {arg_str}"
+        if self.ssh_connection:
+            result = self.ssh_connection.run(
+                command, ignore_status=True, timeout_sec=timeout
+            )
+        else:
+            result = job.run(command, ignore_status=True, timeout_sec=timeout)
+        ret, out, err = result.exit_status, result.stdout, result.stderr
+        # TODO: This is only a temporary workaround for b/34815412.
+        # fastboot getvar outputs to stderr instead of stdout
+        if "getvar" in command:
+            out = err
+        if ret == 0 or ignore_status:
+            return out
+        else:
+            raise FastbootError(cmd=command, stdout=out, stderr=err, ret_code=ret)
+
+    def args(self, *args, **kwargs):
+        return job.run(" ".join((self.fastboot_str,) + args), **kwargs).stdout
+
+    def __getattr__(self, name):
+        def fastboot_call(*args, **kwargs):
+            clean_name = name.replace("_", "-")
+            arg_str = " ".join(str(elem) for elem in args)
+            return self._exec_fastboot_cmd(clean_name, arg_str, **kwargs)
+
+        return fastboot_call
diff --git a/packages/antlion/controllers/fuchsia_device.py b/packages/antlion/controllers/fuchsia_device.py
new file mode 100644
index 0000000..7204040
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_device.py
@@ -0,0 +1,819 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+import os
+import re
+import socket
+import textwrap
+import time
+from typing import Any
+
+import fuchsia_controller_py as fuchsia_controller
+import honeydew
+from honeydew.interfaces.device_classes.fuchsia_device import (
+    FuchsiaDevice as HdFuchsiaDevice,
+)
+from honeydew.typing.custom_types import TRANSPORT, FFXConfig
+from honeydew.typing.wlan import CountryCode
+from mobly import logger, signals
+
+from antlion import context
+from antlion import logger as acts_logger
+from antlion import utils
+from antlion.capabilities.ssh import DEFAULT_SSH_PORT, SSHConfig, SSHError
+from antlion.controllers import pdu
+from antlion.controllers.fuchsia_lib.ffx import FFX
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import (
+    WlanController,
+)
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyController,
+)
+from antlion.controllers.fuchsia_lib.package_server import PackageServer
+from antlion.controllers.fuchsia_lib.sl4f import SL4F
+from antlion.controllers.fuchsia_lib.ssh import (
+    DEFAULT_SSH_PRIVATE_KEY,
+    DEFAULT_SSH_USER,
+    FuchsiaSSHProvider,
+)
+from antlion.controllers.fuchsia_lib.utils_lib import flash
+from antlion.decorators import cached_property
+from antlion.types import ControllerConfig
+from antlion.utils import (
+    PingResult,
+    get_fuchsia_mdns_ipv6_address,
+    get_interface_ip_addresses,
+)
+from antlion.validation import FieldNotFoundError, MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME = "FuchsiaDevice"
+ACTS_CONTROLLER_REFERENCE_NAME = "fuchsia_devices"
+
+FUCHSIA_RECONNECT_AFTER_REBOOT_TIME = 5
+
+FUCHSIA_REBOOT_TYPE_SOFT = "soft"
+FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH = "flash"
+FUCHSIA_REBOOT_TYPE_HARD = "hard"
+
+FUCHSIA_DEFAULT_CONNECT_TIMEOUT = 90
+FUCHSIA_DEFAULT_COMMAND_TIMEOUT = 60
+
+FUCHSIA_DEFAULT_CLEAN_UP_COMMAND_TIMEOUT = 15
+
+FUCHSIA_COUNTRY_CODE_TIMEOUT = 15
+FUCHSIA_DEFAULT_COUNTRY_CODE_US = "US"
+
+MDNS_LOOKUP_RETRY_MAX = 3
+
+# Duration to wait for the Fuchsia device to acquire an IP address after
+# requested to join a network.
+#
+# Acquiring an IP address after connecting to a WLAN network could take up to
+# 15 seconds if we get unlucky:
+#
+#  1. An outgoing passive scan just started (~7s)
+#  2. An active scan is queued for the newly saved network (~7s)
+#  3. The initial connection attempt fails (~1s)
+IP_ADDRESS_TIMEOUT = 30
+
+
+class FuchsiaDeviceError(signals.ControllerError):
+    pass
+
+
+class FuchsiaConfigError(signals.ControllerError):
+    """Incorrect FuchsiaDevice configuration."""
+
+
+def create(configs: list[ControllerConfig]) -> list[FuchsiaDevice]:
+    return [FuchsiaDevice(c) for c in configs]
+
+
+def destroy(fds: list[FuchsiaDevice]) -> None:
+    for fd in fds:
+        fd.clean_up()
+        del fd
+
+
+def get_info(fds: list[FuchsiaDevice]) -> list[dict[str, str]]:
+    """Get information on a list of FuchsiaDevice objects."""
+    device_info = []
+    for fd in fds:
+        info = {"ip": fd.ip}
+        device_info.append(info)
+    return device_info
+
+
+class FuchsiaDevice:
+    """Class representing a Fuchsia device.
+
+    Each object of this class represents one Fuchsia device in ACTS.
+
+    Attributes:
+        ip: The full address or Fuchsia abstract name to contact the Fuchsia
+            device at
+        log: A logger object.
+        ssh_port: The SSH TCP port number of the Fuchsia device.
+        sl4f_port: The SL4F HTTP port number of the Fuchsia device.
+        ssh_config: The ssh_config for connecting to the Fuchsia device.
+    """
+
+    def __init__(self, controller_config: ControllerConfig) -> None:
+        config = MapValidator(controller_config)
+        self.ip = config.get(str, "ip")
+        if "%" in self.ip:
+            addr, scope_id = self.ip.split("%", 1)
+            try:
+                if_name = socket.if_indextoname(int(scope_id))
+                self.ip = f"{addr}%{if_name}"
+            except ValueError:
+                # Scope ID is likely already the interface name, no change necessary.
+                pass
+        self.orig_ip = self.ip
+        self.sl4f_port = config.get(int, "sl4f_port", 80)
+        self.ssh_username = config.get(str, "ssh_username", DEFAULT_SSH_USER)
+        self.ssh_port = config.get(int, "ssh_port", DEFAULT_SSH_PORT)
+        self.ssh_binary_path = config.get(str, "ssh_binary_path", "ssh")
+
+        def expand(path: str) -> str:
+            return os.path.expandvars(os.path.expanduser(path))
+
+        def path_from_config(name: str, default: str | None = None) -> str | None:
+            path = config.get(str, name, default)
+            return None if path is None else expand(path)
+
+        def assert_exists(name: str, path: str | None) -> None:
+            if path is None:
+                raise FuchsiaDeviceError(
+                    f'Please specify "${name}" in your configuration file'
+                )
+            if not os.path.exists(path):
+                raise FuchsiaDeviceError(
+                    f'Please specify a correct "${name}" in your configuration '
+                    f'file: "{path}" does not exist'
+                )
+
+        self.specific_image: str | None = path_from_config("specific_image")
+        if self.specific_image:
+            assert_exists("specific_image", self.specific_image)
+
+        # Path to a tar.gz archive with pm and amber-files, as necessary for
+        # starting a package server.
+        self.packages_archive_path: str | None = path_from_config(
+            "packages_archive_path"
+        )
+        if self.packages_archive_path:
+            assert_exists("packages_archive_path", self.packages_archive_path)
+
+        def required_path_from_config(name: str, default: str | None = None) -> str:
+            path = path_from_config(name, default)
+            if path is None:
+                raise FuchsiaConfigError(f"{name} is a required config field")
+            assert_exists(name, path)
+            return path
+
+        self.ssh_priv_key: str = required_path_from_config(
+            "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY
+        )
+        self.ffx_binary_path: str = required_path_from_config(
+            "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx"
+        )
+        self.ffx_subtools_search_path: str | None = path_from_config(
+            "ffx_subtools_search_path"
+        )
+
+        self.authorized_file = config.get(str, "authorized_file_loc", None)
+        self.serial_number = config.get(str, "serial_number", None)
+        self.device_type = config.get(str, "device_type", None)
+        self.product_type = config.get(str, "product_type", None)
+        self.board_type = config.get(str, "board_type", None)
+        self.build_number = config.get(str, "build_number", None)
+        self.build_type = config.get(str, "build_type", None)
+        self.mdns_name = config.get(str, "mdns_name", None)
+
+        self.enable_honeydew = config.get(bool, "enable_honeydew", False)
+        self.hard_reboot_on_fail = config.get(bool, "hard_reboot_on_fail", False)
+        self.take_bug_report_on_fail = config.get(
+            bool, "take_bug_report_on_fail", False
+        )
+        self.device_pdu_config = config.get(dict, "PduDevice", {})
+        self.config_country_code = config.get(
+            str, "country_code", FUCHSIA_DEFAULT_COUNTRY_CODE_US
+        ).upper()
+
+        output_path = context.get_current_context().get_base_output_path()
+        self.ssh_config = os.path.join(output_path, f"ssh_config_{self.ip}")
+        self._generate_ssh_config(self.ssh_config)
+
+        # WLAN interface info is populated inside configure_wlan
+        self.wlan_client_interfaces: dict[str, Any] = {}
+        self.wlan_ap_interfaces: dict[str, Any] = {}
+        self.wlan_client_test_interface_name = config.get(
+            str, "wlan_client_test_interface", None
+        )
+        self.wlan_ap_test_interface_name = config.get(
+            str, "wlan_ap_test_interface", None
+        )
+        try:
+            self.wlan_features: list[str] = config.list("wlan_features").all(str)
+        except FieldNotFoundError:
+            self.wlan_features = []
+
+        # Whether to use 'policy' or 'drivers' for WLAN connect/disconnect calls
+        # If set to None, wlan is not configured.
+        self.association_mechanism: str | None = None
+        # Defaults to policy layer, unless otherwise specified in the config
+        self.default_association_mechanism = config.get(
+            str, "association_mechanism", "policy"
+        )
+
+        # Whether to clear and preserve existing saved networks and client
+        # connections state, to be restored at device teardown.
+        self.default_preserve_saved_networks = config.get(
+            bool, "preserve_saved_networks", True
+        )
+
+        if not utils.is_valid_ipv4_address(self.ip) and not utils.is_valid_ipv6_address(
+            self.ip
+        ):
+            mdns_ip = None
+            for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
+                mdns_ip = get_fuchsia_mdns_ipv6_address(self.ip)
+                if mdns_ip:
+                    break
+                else:
+                    time.sleep(1)
+            if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
+                # self.ip was actually an mdns name. Use it for self.mdns_name
+                # unless one was explicitly provided.
+                self.mdns_name = self.mdns_name or self.ip
+                self.ip = mdns_ip
+            else:
+                raise ValueError(f"Invalid IP: {self.ip}")
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[FuchsiaDevice | {self.orig_ip}]",
+            },
+        )
+
+        self.ping_rtt_match = re.compile(
+            r"RTT Min/Max/Avg = \[ ([0-9.]+) / ([0-9.]+) / ([0-9.]+) \] ms"
+        )
+        self.serial = re.sub("[.:%]", "_", self.ip)
+        self.package_server: PackageServer | None = None
+
+        # Create honeydew fuchsia_device if the flag "use_honeydew" is True.
+        self.honeydew_fd: HdFuchsiaDevice | None = None
+        if self.enable_honeydew:
+            if not self.mdns_name:
+                raise FuchsiaConfigError(
+                    'Must provide "mdns_name: <device mDNS name>" in the device config '
+                    "if use_honeydew is True"
+                )
+
+            hd_ffx_config = FFXConfig(
+                binary_path=self.ffx_binary_path,
+                isolate_dir=fuchsia_controller.IsolateDir(None),
+                # TODO(http://b/324454126): Remove type ignore
+                logs_dir=f"{logging.log_path}/ffx/",  # type: ignore[attr-defined]
+                logs_level="None",
+                mdns_enabled=False,
+                subtools_search_path=None,
+            )
+
+            self.honeydew_fd = honeydew.create_device(
+                device_name=self.mdns_name,
+                transport=TRANSPORT.FUCHSIA_CONTROLLER_PREFERRED,
+                ffx_config=hd_ffx_config,
+            )
+
+    @cached_property
+    def sl4f(self) -> SL4F:
+        """Get the sl4f module configured for this device."""
+        self.log.info("Started SL4F server")
+        return SL4F(self.ssh, self.sl4f_port, self.honeydew_fd)
+
+    @cached_property
+    def ssh(self) -> FuchsiaSSHProvider:
+        """Get the SSH provider module configured for this device."""
+        if not self.ssh_port:
+            raise FuchsiaConfigError(
+                'Must provide "ssh_port: <int>" in the device config'
+            )
+        if not self.ssh_priv_key:
+            raise FuchsiaConfigError(
+                'Must provide "ssh_priv_key: <file path>" in the device config'
+            )
+        return FuchsiaSSHProvider(
+            SSHConfig(
+                self.ssh_username,
+                self.ip,
+                self.ssh_priv_key,
+                port=self.ssh_port,
+                ssh_binary=self.ssh_binary_path,
+            )
+        )
+
+    @cached_property
+    def ffx(self) -> FFX:
+        """Get the ffx module configured for this device.
+
+        The ffx module uses lazy-initialization; it will initialize an ffx
+        connection to the device when it is required.
+
+        If ffx needs to be reinitialized, delete the "ffx" property and attempt
+        access again. Note re-initialization will interrupt any running ffx
+        calls.
+        """
+        if not self.mdns_name:
+            raise FuchsiaConfigError(
+                'Must provide "mdns_name: <device mDNS name>" in the device config'
+            )
+        return FFX(
+            self.ffx_binary_path,
+            self.mdns_name,
+            self.ip,
+            self.ssh_priv_key,
+            self.ffx_subtools_search_path,
+        )
+
+    @ffx.deleter
+    # TODO(https://github.com/python/mypy/issues/11008): Rename to ffx
+    def ffx_deleter(self, ffx: FFX) -> None:
+        self.log.debug("Cleaning up ffx")
+        ffx.clean_up()
+
+    @cached_property
+    def wlan_policy_controller(self) -> WlanPolicyController:
+        return WlanPolicyController(self.sl4f, self.ssh)
+
+    @cached_property
+    def wlan_controller(self) -> WlanController:
+        return WlanController(self.sl4f)
+
+    def _generate_ssh_config(self, file_path: str) -> None:
+        """Generate and write an SSH config for Fuchsia to disk.
+
+        Args:
+            file_path: Path to write the generated SSH config
+        """
+        content = textwrap.dedent(
+            f"""\
+            Host *
+                CheckHostIP no
+                StrictHostKeyChecking no
+                ForwardAgent no
+                ForwardX11 no
+                GSSAPIDelegateCredentials no
+                UserKnownHostsFile /dev/null
+                User fuchsia
+                IdentitiesOnly yes
+                IdentityFile {self.ssh_priv_key}
+                ControlPersist yes
+                ControlMaster auto
+                ControlPath /tmp/fuchsia--%r@%h:%p
+                ServerAliveInterval 1
+                ServerAliveCountMax 1
+                LogLevel ERROR
+            """
+        )
+
+        with open(file_path, "w", encoding="utf-8") as file:
+            file.write(content)
+
+    def start_package_server(self) -> None:
+        if not self.packages_archive_path:
+            self.log.warn(
+                "packages_archive_path is not specified. "
+                "Assuming a package server is already running and configured on "
+                "the DUT. If this is not the case, either run your own package "
+                "server, or configure these fields appropriately. "
+                "This is usually required for the Fuchsia iPerf3 client or "
+                "other testing utilities not on device cache."
+            )
+            return
+        if self.package_server:
+            self.log.warn(
+                "Skipping to start the package server since is already running"
+            )
+            return
+
+        self.package_server = PackageServer(self.packages_archive_path)
+        self.package_server.start()
+        self.package_server.configure_device(self.ssh)
+
+    def update_wlan_interfaces(self) -> None:
+        """Retrieves WLAN interfaces from device and sets the FuchsiaDevice
+        attributes.
+        """
+        wlan_interfaces = self.wlan_controller.get_interfaces_by_role()
+        self.wlan_client_interfaces = wlan_interfaces.client
+        self.wlan_ap_interfaces = wlan_interfaces.ap
+
+        # Set test interfaces to value from config, else the first found
+        # interface, else None
+        if self.wlan_client_test_interface_name is None:
+            self.wlan_client_test_interface_name = next(
+                iter(self.wlan_client_interfaces), None
+            )
+
+        if self.wlan_ap_test_interface_name is None:
+            self.wlan_ap_test_interface_name = next(iter(self.wlan_ap_interfaces), None)
+
+    def configure_wlan(
+        self,
+        association_mechanism: str | None = None,
+        preserve_saved_networks: bool | None = None,
+    ) -> None:
+        """
+        Readies device for WLAN functionality. If applicable, connects to the
+        policy layer and clears/saves preexisting saved networks.
+
+        Args:
+            association_mechanism: either 'policy' or 'drivers'. If None, uses
+                the default value from init (can be set by ACTS config)
+            preserve_saved_networks: whether to clear existing saved
+                networks, and preserve them for restoration later. If None, uses
+                the default value from init (can be set by ACTS config)
+
+        Raises:
+            FuchsiaDeviceError, if configuration fails
+        """
+        self.wlan_controller.set_country_code(CountryCode(self.config_country_code))
+
+        # If args aren't provided, use the defaults, which can be set in the
+        # config.
+        if association_mechanism is None:
+            association_mechanism = self.default_association_mechanism
+        if preserve_saved_networks is None:
+            preserve_saved_networks = self.default_preserve_saved_networks
+
+        if association_mechanism not in {None, "policy", "drivers"}:
+            raise FuchsiaDeviceError(
+                f"Invalid FuchsiaDevice association_mechanism: {association_mechanism}"
+            )
+
+        # Allows for wlan to be set up differently in different tests
+        if self.association_mechanism:
+            self.log.info("Deconfiguring WLAN")
+            self.deconfigure_wlan()
+
+        self.association_mechanism = association_mechanism
+
+        self.log.info(
+            "Configuring WLAN w/ association mechanism: " f"{association_mechanism}"
+        )
+        if association_mechanism == "drivers":
+            self.log.warn(
+                "You may encounter unusual device behavior when using the "
+                "drivers directly for WLAN. This should be reserved for "
+                "debugging specific issues. Normal test runs should use the "
+                "policy layer."
+            )
+            if preserve_saved_networks:
+                self.log.warn(
+                    "Unable to preserve saved networks when using drivers "
+                    "association mechanism (requires policy layer control)."
+                )
+        else:
+            # This requires SL4F calls, so it can only happen with actual
+            # devices, not with unit tests.
+            self.wlan_policy_controller.configure_wlan(preserve_saved_networks)
+
+        # Retrieve WLAN client and AP interfaces
+        self.update_wlan_interfaces()
+
+    def deconfigure_wlan(self) -> None:
+        """
+        Stops WLAN functionality (if it has been started). Used to allow
+        different tests to use WLAN differently (e.g. some tests require using
+        wlan policy, while the abstract wlan_device can be setup to use policy
+        or drivers)
+
+        Raises:
+            FuchsiaDeviveError, if deconfigure fails.
+        """
+        if not self.association_mechanism:
+            self.log.debug("WLAN not configured before deconfigure was called.")
+            return
+        # If using policy, stop client connections. Otherwise, just clear
+        # variables.
+        if self.association_mechanism != "drivers":
+            self.wlan_policy_controller._deconfigure_wlan()
+        self.association_mechanism = None
+
+    def reboot(
+        self,
+        use_ssh: bool = False,
+        unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
+        reboot_type: str = FUCHSIA_REBOOT_TYPE_SOFT,
+        testbed_pdus: list[pdu.PduDevice] | None = None,
+    ) -> None:
+        """Reboot a FuchsiaDevice.
+
+        Soft reboots the device, verifies it becomes unreachable, then verifies
+        it comes back online. Re-initializes services so the tests can continue.
+
+        Args:
+            use_ssh: if True, use fuchsia shell command via ssh to reboot
+                instead of SL4F.
+            unreachable_timeout: time to wait for device to become unreachable.
+            reboot_type: 'soft', 'hard' or 'flash'.
+            testbed_pdus: all testbed PDUs.
+
+        Raises:
+            ConnectionError, if device fails to become unreachable or fails to
+                come back up.
+        """
+        if reboot_type == FUCHSIA_REBOOT_TYPE_SOFT:
+            if use_ssh:
+                self.log.info("Soft rebooting via SSH")
+                try:
+                    self.ssh.run(
+                        "dm reboot",
+                        timeout_sec=FUCHSIA_RECONNECT_AFTER_REBOOT_TIME,
+                    )
+                except SSHError as e:
+                    if "closed by remote host" not in e.result.stderr:
+                        raise e
+            else:
+                self.log.info("Soft rebooting via SL4F")
+                self.sl4f.hardware_power_statecontrol_lib.suspendReboot(timeout=3)
+            self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout)
+
+        elif reboot_type == FUCHSIA_REBOOT_TYPE_HARD:
+            self.log.info("Hard rebooting via PDU")
+            if not testbed_pdus:
+                raise AttributeError(
+                    "Testbed PDUs must be supplied " "to hard reboot a fuchsia_device."
+                )
+            device_pdu, device_pdu_port = pdu.get_pdu_port_for_device(
+                self.device_pdu_config, testbed_pdus
+            )
+            self.log.info("Killing power to FuchsiaDevice")
+            device_pdu.off(device_pdu_port)
+            self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout)
+            self.log.info("Restoring power to FuchsiaDevice")
+            device_pdu.on(device_pdu_port)
+
+        elif reboot_type == FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH:
+            flash(self, use_ssh, FUCHSIA_RECONNECT_AFTER_REBOOT_TIME)
+
+        else:
+            raise ValueError(f"Invalid reboot type: {reboot_type}")
+
+        # Cleanup services
+        self.stop_services()
+
+        # TODO(http://b/246852449): Move configure_wlan to other controllers.
+        # If wlan was configured before reboot, it must be configured again
+        # after rebooting, as it was before reboot. No preserving should occur.
+        if self.association_mechanism:
+            pre_reboot_association_mechanism = self.association_mechanism
+            # Prevent configure_wlan from thinking it needs to deconfigure first
+            self.association_mechanism = None
+            self.configure_wlan(
+                association_mechanism=pre_reboot_association_mechanism,
+                preserve_saved_networks=False,
+            )
+
+        self.log.info("Device has rebooted")
+
+    def version(self) -> str:
+        """Return the version of Fuchsia running on the device."""
+        resp = self.sl4f.device_lib.get_version()
+        return MapValidator(resp).get(str, "result")
+
+    def device_name(self) -> str:
+        """Return the name of the device."""
+        resp = self.sl4f.device_lib.get_device_name()
+        return MapValidator(resp).get(str, "result")
+
+    def product_name(self) -> str:
+        """Return the product name of the device."""
+        resp = self.sl4f.device_lib.get_product_name()
+        return MapValidator(resp).get(str, "result")
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+        additional_ping_params: str | None = None,
+    ) -> PingResult:
+        """Pings from a Fuchsia device to an IPv4 address or hostname
+
+        Args:
+            dest_ip: (str) The ip or hostname to ping.
+            count: (int) How many icmp packets to send.
+            interval: (int) How long to wait between pings (ms)
+            timeout: (int) How long to wait before having the icmp packet
+                timeout (ms).
+            size: (int) Size of the icmp packet.
+            additional_ping_params: (str) command option flags to
+                append to the command string
+
+        Returns:
+            A dictionary for the results of the ping.  The dictionary contains
+            the following items:
+                status: Whether the ping was successful.
+                rtt_min: The minimum round trip time of the ping.
+                rtt_max: The minimum round trip time of the ping.
+                rtt_avg: The avg round trip time of the ping.
+                stdout: The standard out of the ping command.
+                stderr: The standard error of the ping command.
+        """
+        self.log.debug(f"Pinging {dest_ip}...")
+        if not additional_ping_params:
+            additional_ping_params = ""
+
+        try:
+            ping_result = self.ssh.run(
+                f"ping -c {count} -i {interval} -t {timeout} -s {size} "
+                f"{additional_ping_params} {dest_ip}"
+            )
+        except SSHError as e:
+            ping_result = e.result
+
+        rtt_stats: re.Match[str] | None = None
+
+        if not ping_result.stderr:
+            rtt_lines = ping_result.stdout.split("\n")[:-1]
+            rtt_line = rtt_lines[-1]
+            rtt_stats = re.search(self.ping_rtt_match, rtt_line)
+            if rtt_stats is None:
+                raise FuchsiaDeviceError(f'Unable to parse ping output: "{rtt_line}"')
+
+        return PingResult(
+            exit_status=ping_result.exit_status,
+            stdout=ping_result.stdout,
+            stderr=ping_result.stderr,
+            transmitted=None,
+            received=None,
+            time_ms=None,
+            rtt_min_ms=float(rtt_stats.group(1)) if rtt_stats else None,
+            rtt_avg_ms=float(rtt_stats.group(3)) if rtt_stats else None,
+            rtt_max_ms=float(rtt_stats.group(2)) if rtt_stats else None,
+            rtt_mdev_ms=None,
+        )
+
+    def clean_up(self) -> None:
+        """Cleans up the FuchsiaDevice object, releases any resources it
+        claimed, and restores saved networks if applicable. For reboots, use
+        clean_up_services only.
+
+        Note: Any exceptions thrown in this method must be caught and handled,
+        ensuring that clean_up_services is run. Otherwise, the syslog listening
+        thread will never join and will leave tests hanging.
+        """
+        # If and only if wlan is configured, and using the policy layer
+        if self.association_mechanism == "policy":
+            try:
+                self.wlan_policy_controller.clean_up()
+            except Exception as err:
+                self.log.warning(f"Unable to clean up WLAN Policy layer: {err}")
+
+        self.stop_services()
+
+        if self.package_server:
+            self.package_server.clean_up()
+
+    def get_interface_ip_addresses(self, interface: str) -> dict[str, list[str]]:
+        return get_interface_ip_addresses(self, interface)
+
+    def wait_for_ipv4_addr(self, interface: str) -> None:
+        """Checks if device has an ipv4 private address. Sleeps 1 second between
+        retries.
+
+        Args:
+            interface: name of interface from which to get ipv4 address.
+
+        Raises:
+            ConnectionError, if device does not have an ipv4 address after all
+            timeout.
+        """
+        self.log.info(
+            f"Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
+        )
+        timeout = time.time() + IP_ADDRESS_TIMEOUT
+        while time.time() < timeout:
+            ip_addrs = self.get_interface_ip_addresses(interface)
+
+            if len(ip_addrs["ipv4_private"]) > 0:
+                self.log.info(
+                    "Device has an ipv4 address: " f"{ip_addrs['ipv4_private'][0]}"
+                )
+                break
+            else:
+                self.log.debug(
+                    "Device does not yet have an ipv4 address...retrying in 1 "
+                    "second."
+                )
+                time.sleep(1)
+        else:
+            raise ConnectionError("Device failed to get an ipv4 address.")
+
+    def wait_for_ipv6_addr(self, interface: str) -> None:
+        """Checks if device has an ipv6 private local address. Sleeps 1 second
+        between retries.
+
+        Args:
+            interface: name of interface from which to get ipv6 address.
+
+        Raises:
+            ConnectionError, if device does not have an ipv6 address after all
+            timeout.
+        """
+        self.log.info(
+            f"Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
+        )
+        timeout = time.time() + IP_ADDRESS_TIMEOUT
+        while time.time() < timeout:
+            ip_addrs = self.get_interface_ip_addresses(interface)
+            if len(ip_addrs["ipv6_private_local"]) > 0:
+                self.log.info(
+                    "Device has an ipv6 private local address: "
+                    f"{ip_addrs['ipv6_private_local'][0]}"
+                )
+                break
+            else:
+                self.log.debug(
+                    "Device does not yet have an ipv6 address...retrying in 1 "
+                    "second."
+                )
+                time.sleep(1)
+        else:
+            raise ConnectionError("Device failed to get an ipv6 address.")
+
+    def stop_services(self) -> None:
+        """Stops all host-side clients to the Fuchsia device.
+
+        This is necessary whenever the device's state is unknown. These cases can be
+        found after device reboots, for example.
+        """
+        self.log.info("Stopping host device services.")
+        del self.wlan_policy_controller
+        del self.wlan_controller
+        del self.sl4f
+        del self.ssh
+        del self.ffx
+
+    def take_bug_report(self) -> None:
+        """Takes a bug report on the device and stores it in a file."""
+        self.log.info(f"Taking snapshot of {self.mdns_name}")
+
+        time_stamp = acts_logger.normalize_log_line_timestamp(
+            acts_logger.epoch_to_log_line_timestamp(utils.get_current_epoch_time())
+        )
+        out_dir = context.get_current_context().get_full_output_path()
+        out_path = os.path.join(out_dir, f"{self.mdns_name}_{time_stamp}.zip")
+
+        try:
+            with open(out_path, "wb") as file:
+                bytes = self.ssh.run("snapshot").stdout_bytes
+                file.write(bytes)
+            self.log.info(f"Snapshot saved to {out_path}")
+        except Exception as err:
+            self.log.error(f"Failed to take snapshot: {err}")
+
+    def take_bt_snoop_log(self, custom_name: str | None = None) -> None:
+        """Takes a the bt-snoop log from the device and stores it in a file
+        in a pcap format.
+        """
+        bt_snoop_path = context.get_current_context().get_full_output_path()
+        time_stamp = acts_logger.normalize_log_line_timestamp(
+            acts_logger.epoch_to_log_line_timestamp(time.time())
+        )
+        out_name = "FuchsiaDevice%s_%s" % (
+            self.serial,
+            time_stamp.replace(" ", "_").replace(":", "-"),
+        )
+        out_name = f"{out_name}.pcap"
+        if custom_name:
+            out_name = f"{self.serial}_{custom_name}.pcap"
+        else:
+            out_name = f"{out_name}.pcap"
+        full_out_path = os.path.join(bt_snoop_path, out_name)
+        with open(full_out_path, "wb") as file:
+            bytes = self.ssh.run("bt-snoop-cli -d -f pcap").stdout_bytes
+            file.write(bytes)
diff --git a/src/antlion/controllers/fuchsia_lib/OWNERS b/packages/antlion/controllers/fuchsia_lib/OWNERS
similarity index 100%
rename from src/antlion/controllers/fuchsia_lib/OWNERS
rename to packages/antlion/controllers/fuchsia_lib/OWNERS
diff --git a/src/antlion/controllers/fuchsia_lib/__init__.py b/packages/antlion/controllers/fuchsia_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/fuchsia_lib/__init__.py
rename to packages/antlion/controllers/fuchsia_lib/__init__.py
diff --git a/packages/antlion/controllers/fuchsia_lib/base_lib.py b/packages/antlion/controllers/fuchsia_lib/base_lib.py
new file mode 100644
index 0000000..1171d98
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/base_lib.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+from typing import Any, Mapping
+from urllib.request import Request, urlopen
+
+from mobly.logger import PrefixLoggerAdapter
+
+DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC = 30
+
+
+class DeviceOffline(Exception):
+    """Exception if the device is no longer reachable via the network."""
+
+
+class SL4FCommandFailed(Exception):
+    """A SL4F command to the server failed."""
+
+
+class BaseLib:
+    def __init__(self, addr: str, logger_tag: str) -> None:
+        self.address = addr
+        self.log = PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"SL4F | {self.address} | {logger_tag}"
+            },
+        )
+
+    def send_command(
+        self,
+        cmd: str,
+        args: Mapping[str, object] | None = None,
+        response_timeout: float = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC,
+    ) -> dict[str, Any]:
+        """Builds and sends a JSON command to SL4F server.
+
+        Args:
+            cmd: SL4F method name of command.
+            args: Arguments required to execute cmd.
+            response_timeout: Seconds to wait for a response before
+                throwing an exception.
+
+        Returns:
+            Response from SL4F server.
+
+        Throws:
+            TimeoutError: The HTTP request timed out waiting for a response
+        """
+        data = {
+            "jsonrpc": "2.0",
+            # id is required by the SL4F server to parse test_data but is not
+            # currently used.
+            "id": "",
+            "method": cmd,
+            "params": args,
+        }
+        data_json = json.dumps(data).encode("utf-8")
+        req = Request(
+            self.address,
+            data=data_json,
+            headers={
+                "Content-Type": "application/json; charset=utf-8",
+                "Content-Length": str(len(data_json)),
+            },
+        )
+
+        self.log.debug(
+            f'Sending request "{cmd}" with args: {args} with timeout {response_timeout}'
+        )
+        response = urlopen(req, timeout=response_timeout)
+
+        response_body = response.read().decode("utf-8")
+        try:
+            response_json = json.loads(response_body)
+            self.log.debug(f'Received response for "{cmd}": {response_json}')
+        except json.JSONDecodeError as e:
+            raise SL4FCommandFailed(response_body) from e
+
+        # If the SL4F command fails it returns a str, without an 'error' field
+        # to get.
+        if not isinstance(response_json, dict):
+            raise SL4FCommandFailed(response_json)
+
+        return response_json
diff --git a/packages/antlion/controllers/fuchsia_lib/device_lib.py b/packages/antlion/controllers/fuchsia_lib/device_lib.py
new file mode 100644
index 0000000..7b6768c
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/device_lib.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Any
+
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+
+
+class DeviceLib(BaseLib):
+    def __init__(self, addr: str) -> None:
+        super().__init__(addr, "device")
+
+    def get_device_name(self) -> dict[str, Any]:
+        """Get the device name."""
+
+        return self.send_command("device_facade.GetDeviceName", {})
+
+    def get_product_name(self) -> dict[str, Any]:
+        """Get the product name."""
+
+        return self.send_command("device_facade.GetProduct", {})
+
+    def get_version(self) -> dict[str, Any]:
+        """Get the device version."""
+
+        return self.send_command("device_facade.GetVersion", {})
diff --git a/packages/antlion/controllers/fuchsia_lib/ffx.py b/packages/antlion/controllers/fuchsia_lib/ffx.py
new file mode 100644
index 0000000..0877d51
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/ffx.py
@@ -0,0 +1,431 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+import io
+import json
+import logging
+import os
+import subprocess
+import tempfile
+import time
+from pathlib import Path, PurePath
+from shutil import rmtree
+
+from mobly import logger, signals
+from tenacity import retry
+from tenacity.stop import stop_after_delay
+from tenacity.wait import wait_fixed
+
+from antlion import context, utils
+
+FFX_DEFAULT_COMMAND_TIMEOUT: int = 60
+FFX_CONFIG_TIMEOUT_SEC: float = 20
+FFX_TARGET_ADD_TIMEOUT_SEC: float = 20
+FFX_DAEMON_STOP_TIMEOUT_SEC: float = 4
+
+
+class FFXError(signals.TestError):
+    """Non-zero error code returned from a ffx command."""
+
+    def __init__(self, command: str, process: subprocess.CalledProcessError) -> None:
+        self.command = command
+        self.stdout: str = process.stdout.decode("utf-8", errors="replace")
+        self.stderr: str = process.stderr.decode("utf-8", errors="replace")
+        self.exit_status = process.returncode
+        super().__init__(self.__str__())
+
+    def __str__(self) -> str:
+        return f'ffx subcommand "{self.command}" returned {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
+
+
+class FFXTimeout(signals.TestError):
+    """Timed out running a ffx command."""
+
+
+class OutputFormat(enum.StrEnum):
+    TEXT = "text"
+    JSON = "json"
+    JSON_PRETTY = "json-pretty"
+
+
+class FFX:
+    """Device-specific controller for the ffx tool.
+
+    Attributes:
+        log: Logger for the device-specific instance of ffx.
+        binary_path: Path to the ffx binary.
+        mdns_name: mDNS nodename of the default Fuchsia target.
+        ip: IP address of the default Fuchsia target.
+        ssh_private_key_path: Path to Fuchsia DUT SSH private key.
+    """
+
+    def __init__(
+        self,
+        binary_path: str,
+        mdns_name: str,
+        ip: str | None = None,
+        ssh_private_key_path: str | None = None,
+        subtools_search_path: str | None = None,
+    ):
+        """
+        Args:
+            binary_path: Path to ffx binary.
+            target: Fuchsia mDNS nodename of default target.
+            ssh_private_key_path: Path to SSH private key for talking to the
+                Fuchsia DUT.
+        """
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[ffx | {mdns_name}]",
+            },
+        )
+        self._binary_path = binary_path
+        self._mdns_name = mdns_name
+        self._ip = ip
+        self._ssh_private_key_path = ssh_private_key_path
+        self._subtools_search_path = subtools_search_path
+
+        self._daemon: subprocess.Popen | None = None
+        self._daemon_log: io.TextIOWrapper | None = None
+        self._isolate_dir: str | None = None
+        self._sock_dir: str | None = None
+        self._ssh_auth_sock_path: str | None = None
+        self._overnet_socket_path: str | None = None
+        self._has_been_reachable = False
+        self._has_logged_version = False
+
+    def clean_up(self) -> None:
+        self._stop_daemon()
+        self._has_been_reachable = False
+        self._has_logged_version = False
+
+    def run(
+        self,
+        command: list[str],
+        timeout_sec: float = FFX_DEFAULT_COMMAND_TIMEOUT,
+        skip_status_code_check: bool = False,
+        skip_reachability_check: bool = False,
+        output_format: OutputFormat = OutputFormat.TEXT,
+    ) -> subprocess.CompletedProcess:
+        """Runs an ffx command.
+
+        Verifies reachability before running, if it hasn't already.
+
+        Args:
+            command: Command to run with ffx.
+            timeout_sec: Seconds to wait for a command to complete.
+            skip_status_code_check: Whether to check for the status code.
+            verify_reachable: Whether to verify reachability before running.
+            output_format: Desired output format; useful for parsing output.
+
+        Raises:
+            FFXTimeout: when the command times out.
+            FFXError: when the command returns non-zero and skip_status_code_check is False.
+
+        Returns:
+            The results of the command. Note subprocess.CompletedProcess returns
+            stdout and stderr as a byte-array, not a string. Treat these members
+            as such or convert to a string using bytes.decode('utf-8').
+        """
+        if not self._daemon:
+            self._start_daemon()
+        if not self._has_been_reachable and not skip_reachability_check:
+            self.log.info(f'Verifying reachability before running "{command}"')
+            self.verify_reachable()
+        return self._exec(
+            command,
+            timeout_sec,
+            check=not skip_status_code_check,
+            output_format=output_format,
+        )
+
+    def _exec(
+        self,
+        command: list[str],
+        timeout_sec: float,
+        check: bool = True,
+        output_format: OutputFormat = OutputFormat.TEXT,
+    ) -> subprocess.CompletedProcess[bytes]:
+        """Execute a ffx command without any other arguments.
+
+        Args:
+            command: Command to run with ffx.
+            timeout_sec: Seconds to wait for a command to complete.
+            check: Whether to check for the status code.
+
+        Raises:
+            FFXTimeout: when the command times out.
+            FFXError: when the command returns non-zero and skip_status_code_check is False.
+
+        Returns:
+            The results of the command. Note subprocess.CompletedProcess returns
+            stdout and stderr as a byte-array, not a string. Treat these members
+            as such or convert to a string using bytes.decode('utf-8').
+        """
+        if not self._isolate_dir:
+            raise TypeError(
+                f"Expected _isolate_dir to be a str, got {type(self._isolate_dir)}"
+            )
+
+        self.log.debug(f'Running "{" ".join(command)}".')
+
+        full_command = [self._binary_path, "--isolate-dir", self._isolate_dir]
+        match output_format:
+            case OutputFormat.TEXT:
+                full_command += command
+            case OutputFormat.JSON:
+                full_command += ["--machine", "json"] + command
+            case OutputFormat.JSON_PRETTY:
+                full_command += ["--machine", "json-pretty"] + command
+
+        try:
+            result = subprocess.run(
+                full_command,
+                capture_output=True,
+                timeout=timeout_sec,
+                check=check,
+            )
+            self.log.debug(
+                f'Result of "{" ".join(command)}":\n'
+                f'stdout: {result.stdout.decode("utf-8")}\n'
+                f'stderr: {result.stderr.decode("utf-8")}'
+            )
+            return result
+        except subprocess.CalledProcessError as e:
+            raise FFXError(" ".join(command), e) from e
+        except subprocess.TimeoutExpired as e:
+            raise FFXTimeout(f'Timed out running "{" ".join(command)}"') from e
+
+    def _start_daemon(self) -> None:
+        """Create a new isolated environment for ffx.
+
+        This is needed to avoid overlapping ffx daemons while testing in
+        parallel, causing the ffx invocations to “upgrade” one daemon to
+        another, which appears as a flap/restart to another test.
+        """
+        # Store ffx files in a unique directory. Timestamp is used to prevent
+        # files from being overwritten in the case when a test intentionally
+        # reboots or resets the device such that a new isolated ffx environment
+        # is created.
+        root_dir = context.get_current_context().get_full_output_path()
+        epoch = utils.get_current_epoch_time()
+        time_stamp = logger.normalize_log_line_timestamp(
+            logger.epoch_to_log_line_timestamp(epoch)
+        )
+        self._isolate_dir = os.path.join(root_dir, f"{self._mdns_name}_{time_stamp}")
+        os.makedirs(self._isolate_dir, exist_ok=True)
+
+        # Sockets need to be created in a different directory to be guaranteed
+        # to stay under the maximum socket path length of 104 characters.
+        # See https://unix.stackexchange.com/q/367008
+        self._sock_dir = tempfile.mkdtemp()
+        # On MacOS, the socket paths need to be just paths (not pre-created
+        # Python tempfiles, which are not socket files).
+        self._ssh_auth_sock_path = str(PurePath(self._sock_dir, "ssh_auth_sock"))
+        self._overnet_socket_path = str(PurePath(self._sock_dir, "overnet_socket"))
+
+        cmds = [
+            ["config", "set", "log.dir", os.path.join(self._isolate_dir, "ffx_logs")],
+            ["config", "set", "log.level", "debug"],
+            ["config", "set", "target.default", self._mdns_name],
+            # Use user-specific and device-specific locations for sockets.
+            # Avoids user permission errors in a multi-user test environment.
+            # Avoids daemon upgrades when running tests in parallel in a CI
+            # environment.
+            ["config", "set", "ssh.auth-sock", self._ssh_auth_sock_path],
+            ["config", "set", "overnet.socket", self._overnet_socket_path],
+            # Alias to disable metrics, device discovery, device auto connection, etc.
+            ["config", "set", "ffx.isolated", "true"],
+            # Control the daemon's lifecycle directly
+            ["config", "set", "daemon.autostart", "false"],
+        ]
+
+        if not self._ip:
+            cmds.append(["config", "set", "discovery.mdns.enabled", "true"])
+
+        # ffx looks for the private key in several default locations. For
+        # testbeds which have the private key in another location, set it now.
+        if self._ssh_private_key_path:
+            cmds.append(
+                ["config", "set", "ssh.priv", f'["{self._ssh_private_key_path}"]']
+            )
+
+        if self._subtools_search_path:
+            cmds.append(
+                [
+                    "config",
+                    "set",
+                    "ffx.subtool-search-paths",
+                    self._subtools_search_path,
+                ]
+            )
+
+        for cmd in cmds:
+            self._exec(cmd, FFX_CONFIG_TIMEOUT_SEC)
+
+        self._daemon_log = open(
+            os.path.join(self._isolate_dir, "daemon.log"), "a+", encoding="utf-8"
+        )
+
+        # Start the daemon
+        self._daemon = subprocess.Popen(
+            [self._binary_path, "--isolate-dir", self._isolate_dir, "daemon", "start"],
+            stdout=self._daemon_log,
+        )
+
+        # Wait for overnet_socket to be created
+        @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1))
+        def wait_for_socket(path: str) -> None:
+            if not Path(path).is_socket():
+                raise FileNotFoundError(f"Socket not found: {path}")
+
+        wait_for_socket(self._overnet_socket_path)
+
+        if self._ip:
+            self._exec(
+                ["target", "add", self._ip, "--nowait"], FFX_TARGET_ADD_TIMEOUT_SEC
+            )
+
+        result = self._exec(["config", "get"], FFX_CONFIG_TIMEOUT_SEC)
+        self.log.debug(f'Config:\n{result.stdout.decode("utf-8")}')
+
+    def _stop_daemon(self) -> None:
+        if self._daemon:
+            self.run(
+                # TODO(b/332983529): Add the following arguments once ffx daemon
+                # stops correctly.
+                # ["-t", str(FFX_DAEMON_STOP_TIMEOUT_SEC * 1000)]
+                ["daemon", "stop"],
+                skip_reachability_check=True,
+            )
+            self._daemon.wait(timeout=FFX_DAEMON_STOP_TIMEOUT_SEC)
+            self._daemon = None
+
+        if self._daemon_log:
+            self._daemon_log.close()
+            self._daemon_log = None
+
+        if self._ssh_auth_sock_path:
+            Path(self._ssh_auth_sock_path).unlink(missing_ok=True)
+            self._ssh_auth_sock_path = None
+
+        if self._overnet_socket_path:
+            Path(self._overnet_socket_path).unlink(missing_ok=True)
+            self._overnet_socket_path = None
+
+        if self._sock_dir:
+            rmtree(self._sock_dir)
+            self._sock_dir = None
+
+        self._isolate_dir = None
+
+    def verify_reachable(self, timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT) -> None:
+        """Verify the target is reachable via RCS and various services.
+
+        Blocks until the device allows for an RCS connection. If the device
+        isn't reachable within a short time, logs a warning before waiting
+        longer.
+
+        Verifies the RCS connection by fetching information from the device,
+        which exercises several debug and informational FIDL services.
+
+        When called for the first time, the versions will be checked for
+        compatibility.
+
+        Args:
+            timeout_sec: Seconds to wait for reachability check
+
+        Raises:
+            FFXError: when an unknown error occurs
+            FFXTimeout: when the target is unreachable
+        """
+        last_err: Exception | None = None
+        timeout = time.perf_counter() + timeout_sec
+        while True:
+            try:
+                self.run(
+                    ["target", "wait"],
+                    timeout_sec=FFX_CONFIG_TIMEOUT_SEC,
+                    skip_reachability_check=True,
+                )
+                break
+            except FFXError as e:
+                if "took too long connecting to ascendd socket" in e.stderr:
+                    last_err = e
+                else:
+                    raise e
+            except FFXTimeout as e:
+                last_err = e
+
+            if time.perf_counter() > timeout:
+                raise FFXTimeout(
+                    f"Waited over {timeout_sec}s for ffx to become reachable"
+                ) from last_err
+
+        # Use a shorter timeout than default because device information
+        # gathering can hang for a long time if the device is not actually
+        # connectable.
+        try:
+            result = self.run(
+                ["target", "show"],
+                timeout_sec=15,
+                skip_reachability_check=True,
+                output_format=OutputFormat.JSON_PRETTY,
+            )
+        except Exception as e:
+            self.log.error(
+                f'Failed to reach target device. Try running "{self._binary_path}'
+                + ' doctor" to diagnose issues.'
+            )
+            raise e
+
+        self._has_been_reachable = True
+
+        if not self._has_logged_version:
+            self._has_logged_version = True
+            self.compare_version(result)
+
+    def compare_version(self, target_show_result: subprocess.CompletedProcess) -> None:
+        """Compares the version of Fuchsia with the version of ffx.
+
+        Args:
+            target_show_result: Result of the target show command with JSON
+                output mode enabled
+        """
+        result_raw = target_show_result.stdout
+        try:
+            result_json = json.loads(result_raw)
+            build_info = next(filter(lambda s: s.get("label") == "build", result_json))
+            version_info = next(
+                filter(lambda s: s.get("label") == "version", build_info["child"])
+            )
+            device_version = version_info.get("value")
+        except (AttributeError, json.JSONDecodeError) as e:
+            raise signals.TestAbortClass(
+                f'Failed to parse response of "ffx target show":\n{result_raw}'
+            ) from e
+
+        ffx_version = self.run(["version"]).stdout.decode("utf-8")
+
+        self.log.info(f"Device version: {device_version}, ffx version: {ffx_version}")
+        if device_version != ffx_version:
+            self.log.warning(
+                "ffx versions that differ from device versions may"
+                + " have compatibility issues. It is recommended to"
+                + " use versions within 6 weeks of each other."
+            )
diff --git a/packages/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py b/packages/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
new file mode 100644
index 0000000..6df2b9d
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import http
+import logging
+
+import antlion.controllers.fuchsia_lib.base_lib as base_lib
+
+HW_PWR_STATE_CONTROL_TIMEOUT = 5
+
+
+class FuchsiaHardwarePowerStatecontrolLib(base_lib.BaseLib):
+    def __init__(self, addr: str) -> None:
+        super().__init__(addr, "hardware_power_statecontrol")
+
+    def send_command(self, test_cmd, test_args, response_timeout=30):
+        """Wrap send_command to allow disconnects after sending the request."""
+        try:
+            response = super().send_command(test_cmd, test_args, response_timeout)
+        except (
+            TimeoutError,
+            http.client.RemoteDisconnected,
+            base_lib.DeviceOffline,
+        ) as e:
+            logging.warn(f"Error while sending power command: {e}")
+            return
+        return response
+
+    def suspendReboot(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
+        """Call Suspend Reboot.
+
+        Returns:
+            None if success.
+        """
+        test_cmd = "hardware_power_statecontrol_facade.SuspendReboot"
+        return self.send_command(test_cmd, {}, response_timeout=timeout)
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
similarity index 100%
rename from src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
rename to packages/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
diff --git a/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
new file mode 100644
index 0000000..3f902f1
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+from dataclasses import dataclass
+
+from honeydew.typing.wlan import CountryCode, QueryIfaceResponse, WlanMacRole
+from mobly import logger, signals
+
+from antlion import utils
+from antlion.controllers.fuchsia_lib.sl4f import SL4F
+from antlion.validation import MapValidator
+
+TIME_TO_SLEEP_BETWEEN_RETRIES = 1
+TIME_TO_WAIT_FOR_COUNTRY_CODE = 10
+
+
+@dataclass(frozen=True)
+class WlanInterfaces:
+    client: dict[str, QueryIfaceResponse]
+    ap: dict[str, QueryIfaceResponse]
+
+
+class WlanControllerError(signals.ControllerError):
+    pass
+
+
+class WlanController:
+    """Contains methods related to wlan core, to be used in FuchsiaDevice object"""
+
+    def __init__(self, sl4f: SL4F) -> None:
+        self.sl4f = sl4f
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[WlanController | {self.sl4f.address}]",
+            },
+        )
+
+    def get_interfaces_by_role(self) -> WlanInterfaces:
+        """Retrieves WLAN interface information."""
+
+        # Retrieve WLAN interface IDs
+        wlan_iface_ids = self.sl4f.wlan_lib.get_iface_id_list()
+        if len(wlan_iface_ids) < 1:
+            return WlanInterfaces(client={}, ap={})
+
+        # Use IDs to get WLAN interface info and mac addresses
+        wlan_ifaces_by_mac: dict[str, QueryIfaceResponse] = {}
+        for id in wlan_iface_ids:
+            result = self.sl4f.wlan_lib.query_iface(id)
+            mac = utils.mac_address_list_to_str(bytes(result.sta_addr))
+            wlan_ifaces_by_mac[mac] = result
+
+        # Use mac addresses to query the interfaces from the netstack view,
+        # which allows us to supplement the interface information with the name,
+        # netstack_id, etc.
+
+        # TODO(fxb/75909): This tedium is necessary to get the interface name
+        # because only netstack has that information. The bug linked here is
+        # to reconcile some of the information between the two perspectives, at
+        # which point we can eliminate step.
+        net_ifaces_response = self.sl4f.netstack_lib.netstackListInterfaces()
+        if net_ifaces_response.get("error"):
+            raise WlanControllerError(
+                f"Failed to get network interfaces list: {net_ifaces_response['error']}"
+            )
+        net_ifaces = net_ifaces_response["result"]
+
+        client: dict[str, QueryIfaceResponse] = {}
+        ap: dict[str, QueryIfaceResponse] = {}
+        for iface in net_ifaces:
+            iface = MapValidator(iface)
+            try:
+                # Some interfaces might not have a MAC
+                mac_raw = iface.list("mac").all(int)
+                iface_mac = utils.mac_address_list_to_str(bytes(mac_raw))
+            except Exception as e:
+                self.log.debug(f"Error {e} getting MAC for iface {iface}")
+                continue
+            if iface_mac in wlan_ifaces_by_mac:
+                result = wlan_ifaces_by_mac[iface_mac]
+                name = iface.get(str, "name")
+                match result.role:
+                    case WlanMacRole.CLIENT:
+                        client[name] = result
+                    case WlanMacRole.AP:
+                        ap[name] = result
+                    case _:
+                        raise ValueError(f'Unexpected WlanMacRole "{result.role}"')
+
+        return WlanInterfaces(client, ap)
+
+    def set_country_code(self, country_code: CountryCode) -> None:
+        """Sets country code through the regulatory region service and waits
+        for the code to be applied to WLAN PHY.
+
+        Args:
+            country_code: the 2 character country code to set
+
+        Raises:
+            EnvironmentError - failure to get/set regulatory region
+            ConnectionError - failure to query PHYs
+        """
+        self.log.info(f"Setting DUT country code to {country_code}")
+        self.sl4f.wlan_lib.set_region(country_code)
+
+        self.log.info(
+            f"Verifying DUT country code was correctly set to {country_code}."
+        )
+        phy_ids_response = self.sl4f.wlan_lib.get_phy_id_list()
+
+        end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
+        while time.time() < end_time:
+            for id in phy_ids_response:
+                resp = self.sl4f.wlan_lib.get_country(id)
+                if resp == country_code:
+                    return
+                time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
+        else:
+            raise EnvironmentError(f"Failed to set DUT country code to {country_code}.")
diff --git a/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
new file mode 100644
index 0000000..1999d68
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
@@ -0,0 +1,384 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+from dataclasses import dataclass
+
+from honeydew.typing.wlan import (
+    ConnectionState,
+    DisconnectStatus,
+    NetworkConfig,
+    NetworkState,
+    WlanClientState,
+)
+from mobly import logger, signals
+
+from antlion.controllers.fuchsia_lib.sl4f import SL4F
+from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
+from antlion.controllers.fuchsia_lib.wlan_policy_lib import WlanPolicyError
+
+SESSION_MANAGER_TIMEOUT_SEC = 10
+FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT = 30
+DEFAULT_GET_UPDATE_TIMEOUT = 60
+
+
+class WlanPolicyControllerError(signals.ControllerError):
+    pass
+
+
+@dataclass
+class PreservedState:
+    saved_networks: list[NetworkConfig] | None
+    client_connections_state: WlanClientState | None
+
+
+@dataclass
+class ClientState:
+    state: str
+    networks: list[dict[str, object]]
+
+
+# TODO(http://b/309854439): Add a ClientStateWatcher and refactor tests to allow test
+# developers more control when update listeners are set and the client update state is
+# reset.
+class WlanPolicyController:
+    """Contains methods related to the wlan policy layer, to be used in the
+    FuchsiaDevice object.
+
+    Attributes:
+        sl4f: sl4f module for communicating to the WLAN policy controller.
+        ssh: transport to fuchsia device to stop component processes.
+    """
+
+    def __init__(self, sl4f: SL4F, ssh: FuchsiaSSHProvider) -> None:
+        """
+        Args:
+            sl4f: sl4f module for communicating to the WLAN policy controller.
+            ssh: transport to fuchsia device to stop component processes.
+        """
+        self.preserved_networks_and_client_state: PreservedState | None = None
+        self.policy_configured = False
+        self.sl4f = sl4f
+        self.ssh = ssh
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[WlanPolicyController | {self.ssh.config.host_name}]",
+            },
+        )
+
+    def configure_wlan(
+        self,
+        preserve_saved_networks: bool,
+        timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT,
+    ) -> None:
+        """Sets up wlan policy layer.
+
+        Args:
+            preserve_saved_networks: whether to clear existing saved
+                networks and client state, to be restored at test close.
+            timeout_sec: time to wait for device to configure WLAN.
+        """
+
+        # We need to stop session manager to free control of
+        # fuchsia.wlan.policy.ClientController, which can only be used by a
+        # single caller at a time. SL4F needs the ClientController to trigger
+        # WLAN policy state changes. On eng builds the session_manager can be
+        # restarted after being stopped during reboot so we attempt killing the
+        # session manager process for 10 seconds.
+        # See https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/client_provider.fidl
+        if "cast_agent.cm" in self.ssh.run("ps").stdout:
+            session_manager_expiration = time.time() + SESSION_MANAGER_TIMEOUT_SEC
+            while time.time() < session_manager_expiration:
+                self.ssh.stop_component("session_manager", is_cfv2_component=True)
+
+        # Acquire control of policy layer
+        self.sl4f.wlan_policy_lib.create_client_controller()
+        self.log.info("ACTS tests now have control of the WLAN policy layer.")
+
+        if preserve_saved_networks and not self.preserved_networks_and_client_state:
+            self.preserved_networks_and_client_state = (
+                self.remove_and_preserve_networks_and_client_state()
+            )
+
+        self.sl4f.wlan_policy_lib.start_client_connections()
+        self.policy_configured = True
+
+    def _deconfigure_wlan(self) -> None:
+        self.sl4f.wlan_policy_lib.stop_client_connections()
+        self.policy_configured = False
+
+    def clean_up(self) -> None:
+        if self.preserved_networks_and_client_state is not None:
+            # It is possible for policy to have been configured before, but
+            # deconfigured before test end. In this case, in must be setup
+            # before restoring networks
+            if not self.policy_configured:
+                self.configure_wlan(False)
+
+        self.restore_preserved_networks_and_client_state()
+
+    def _find_network(
+        self, ssid: str, networks: list[NetworkState]
+    ) -> NetworkState | None:
+        """Helper method to find network in list of network states.
+
+        Args:
+            ssid: The network name to look for.
+            networks: The list of network states to look in.
+
+        Returns:
+            Network state of target ssid or None if not found in networks.
+        """
+        for network in networks:
+            if network.network_identifier.ssid == ssid:
+                return network
+        return None
+
+    def wait_for_network_state(
+        self,
+        ssid: str,
+        expected_states: ConnectionState | set[ConnectionState],
+        expected_status: DisconnectStatus | None = None,
+        timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT,
+    ) -> ConnectionState:
+        """Waits until the device returns with expected network state.
+
+        Args:
+            ssid: The network name to check the state of.
+            expected_states: The network state or states we are expecting to see.
+            expected_status: The disconnect status of the network. Only relevant when
+                expected_state is FAILED or DISCONNECTED.
+            timeout_sec: The number of seconds to wait for a update showing connection.
+
+        Returns:
+            Current network state if network converges on one of the expected states.
+
+        Raises:
+            TypeError: If DisconnectStatus provided with a CONNECTING or CONNECTED
+                state.
+            WlanPolicyControllerError: If no network is found before timeout or fails to
+                converge to one of the expected states.
+        """
+
+        if not isinstance(expected_states, set):
+            expected_states = {expected_states}
+
+        if (
+            expected_states == {ConnectionState.CONNECTING, ConnectionState.CONNECTED}
+            or expected_states.issubset(
+                {ConnectionState.CONNECTING, ConnectionState.CONNECTED}
+            )
+            and expected_status is not None
+        ):
+            raise TypeError(
+                "Disconnect status not valid for CONNECTING or CONNECTED states."
+            )
+
+        self.sl4f.wlan_policy_lib.set_new_update_listener()
+
+        end_time = time.time() + timeout_sec
+        while time.time() < end_time:
+            time_left = max(1, int(end_time - time.time()))
+            try:
+                client = self.sl4f.wlan_policy_lib.get_update(timeout=time_left)
+            except WlanPolicyError as e:
+                # WlanPolicyError can be thrown if the SL4F command was not successfully
+                # sent, if the command timed out, or if the command returned with an
+                # error code in the 'error' field. We retry here to handle the cases
+                # in negative testing where we expect to recieve an 'error'.
+                self.log.debug(f"Last error: {e}")
+                time.sleep(1)
+                continue
+
+            # If we don't find the network initially, wait and retry.
+            network = self._find_network(ssid, client.networks)
+            if network is None:
+                self.log.debug(
+                    f"{ssid} not found in client networks: {client.networks}"
+                )
+                time.sleep(1)
+                continue
+
+            if network.connection_state in expected_states:
+                # Check optional disconnect status matches.
+                if expected_status:
+                    if network.disconnect_status is not expected_status:
+                        raise WlanPolicyControllerError(
+                            f"Disconnect status is not {expected_status}"
+                        )
+            elif network.connection_state is ConnectionState.CONNECTING:
+                self.log.debug(f"Network {ssid} still attempting to connect.")
+                time.sleep(1)
+                continue
+            else:
+                raise WlanPolicyControllerError(
+                    f'Expected network "{ssid}" to be in state {expected_states}, '
+                    f"got {network.connection_state}"
+                )
+
+            # Successfully converged on expected state and status
+            return network.connection_state
+        else:
+            if network is None:
+                raise WlanPolicyControllerError(
+                    f"Timed out trying to find ssid: {ssid}"
+                )
+            raise WlanPolicyControllerError(
+                f'Timed out waiting for "{ssid}" to reach state {expected_states} and '
+                f"status {expected_status}"
+            )
+
+    def wait_for_client_state(
+        self,
+        expected_state: WlanClientState,
+        timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT,
+    ) -> None:
+        """Waits until the client converges to expected state.
+
+        Args:
+            expected_state: The client state we are waiting to see.
+            timeout_sec: Duration to wait for the desired_state.
+
+        Raises:
+            WlanPolicyControllerError: If client still has not converged to expected
+                state at end of timeout.
+        """
+        self.sl4f.wlan_policy_lib.set_new_update_listener()
+
+        last_err: WlanPolicyError | None = None
+        end_time = time.time() + timeout_sec
+        while time.time() < end_time:
+            time_left = max(1, int(end_time - time.time()))
+            try:
+                client = self.sl4f.wlan_policy_lib.get_update(timeout=time_left)
+            except WlanPolicyError as e:
+                # WlanPolicyError can be thrown if the SL4F command was not successfully
+                # sent, if the command timed out, or if the command returned with an
+                # error code in the 'error' field. We retry here to handle the cases
+                # in negative testing where we expect to recieve an 'error'.
+                last_err = e
+                time.sleep(1)
+                continue
+            if client.state is not expected_state:
+                # Continue getting updates.
+                time.sleep(1)
+                continue
+            else:
+                return
+        else:
+            self.log.error(
+                f"Client state did not converge to the expected state: {expected_state}"
+                f" Waited:{timeout_sec}s"
+            )
+            raise WlanPolicyControllerError from last_err
+
+    def wait_for_no_connections(
+        self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
+    ) -> None:
+        """Waits to see that there are no connections to the device.
+
+        Args:
+            timeout_sec: The time to wait to see no connections.
+
+        Raises:
+            WlanPolicyControllerError: If client update has no networks or if client
+                still has connections at end of timeout.
+        """
+        self.sl4f.wlan_policy_lib.set_new_update_listener()
+
+        last_err: WlanPolicyError | None = None
+        end_time = time.time() + timeout_sec
+        while time.time() < end_time:
+            curr_connected_networks: list[NetworkState] = []
+            time_left = max(1, int(end_time - time.time()))
+            try:
+                client = self.sl4f.wlan_policy_lib.get_update(timeout=time_left)
+            except WlanPolicyError as e:
+                # WlanPolicyError can be thrown if the SL4F command was not successfully
+                # sent, if the command timed out, or if the command returned with an
+                # error code in the 'error' field. We retry here to handle the cases
+                # in negative testing where we expect to recieve an 'error'.
+                last_err = e
+                time.sleep(1)
+                continue
+
+            # Iterate through networks checking to see if any are still connected.
+            for network in client.networks:
+                if network.connection_state in {
+                    ConnectionState.CONNECTING,
+                    ConnectionState.CONNECTED,
+                }:
+                    curr_connected_networks.append(network)
+
+            if len(curr_connected_networks) != 0:
+                # Continue getting updates.
+                time.sleep(1)
+                continue
+            else:
+                return
+
+        self.log.error(f"Networks still connected. Waited: {timeout_sec}s")
+        raise WlanPolicyControllerError from last_err
+
+    def remove_and_preserve_networks_and_client_state(self) -> PreservedState:
+        """Preserves networks already saved on devices before removing them.
+
+        This method is used to set up a clean test environment. Records the state of
+        client connections before tests.
+
+        Returns:
+            PreservedState: State of the client containing NetworkConfigs and client
+                connection state.
+        """
+        client = self.sl4f.wlan_policy_lib.get_update()
+        networks = self.sl4f.wlan_policy_lib.get_saved_networks()
+        self.sl4f.wlan_policy_lib.remove_all_networks()
+        self.log.info("Saved networks cleared and preserved.")
+        return PreservedState(
+            saved_networks=networks, client_connections_state=client.state
+        )
+
+    def restore_preserved_networks_and_client_state(self) -> None:
+        """Restore preserved networks and client state onto device."""
+        if self.preserved_networks_and_client_state is None:
+            self.log.info("No preserved networks or client state to restore")
+            return
+
+        self.sl4f.wlan_policy_lib.remove_all_networks()
+
+        saved_networks = self.preserved_networks_and_client_state.saved_networks
+        if saved_networks is not None:
+            for network in saved_networks:
+                try:
+                    self.sl4f.wlan_policy_lib.save_network(
+                        network.ssid,
+                        network.security_type,
+                        network.credential_value,
+                    )
+                except WlanPolicyError:
+                    self.log.warn(f'Failed to restore network "{network.ssid}"')
+
+        client_state = self.preserved_networks_and_client_state.client_connections_state
+        if client_state is not None:
+            if client_state is WlanClientState.CONNECTIONS_ENABLED:
+                self.sl4f.wlan_policy_lib.start_client_connections()
+            else:
+                self.sl4f.wlan_policy_lib.stop_client_connections()
+
+        self.log.info("Preserved networks and client state restored.")
+        self.preserved_networks_and_client_state = None
diff --git a/packages/antlion/controllers/fuchsia_lib/logging_lib.py b/packages/antlion/controllers/fuchsia_lib/logging_lib.py
new file mode 100644
index 0000000..b2f280d
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/logging_lib.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+
+
+class FuchsiaLoggingLib(BaseLib):
+    def __init__(self, addr: str) -> None:
+        super().__init__(addr, "logging")
+
+    def logE(self, message):
+        """Log a message of level Error directly to the syslog.
+
+        Args:
+            message: The message to log.
+
+        Returns:
+            Dictionary, None if success, error if error.
+        """
+        test_cmd = "logging_facade.LogErr"
+        test_args = {
+            "message": f"[{datetime.datetime.now()}] {message}",
+        }
+
+        return self.send_command(test_cmd, test_args)
+
+    def logI(self, message):
+        """Log a message of level Info directly to the syslog.
+
+        Args:
+            message: The message to log.
+
+        Returns:
+            Dictionary, None if success, error if error.
+        """
+        test_cmd = "logging_facade.LogInfo"
+        test_args = {"message": f"[{datetime.datetime.now()}] {message}"}
+
+        return self.send_command(test_cmd, test_args)
+
+    def logW(self, message):
+        """Log a message of level Warning directly to the syslog.
+
+        Args:
+            message: The message to log.
+
+        Returns:
+            Dictionary, None if success, error if error.
+        """
+        test_cmd = "logging_facade.LogWarn"
+        test_args = {"message": f"[{datetime.datetime.now()}] {message}"}
+
+        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/netstack/__init__.py b/packages/antlion/controllers/fuchsia_lib/netstack/__init__.py
similarity index 100%
rename from src/antlion/controllers/fuchsia_lib/netstack/__init__.py
rename to packages/antlion/controllers/fuchsia_lib/netstack/__init__.py
diff --git a/packages/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py b/packages/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
new file mode 100644
index 0000000..732b180
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+
+
+class FuchsiaNetstackLib(BaseLib):
+    def __init__(self, addr: str) -> None:
+        super().__init__(addr, "netstack")
+
+    def netstackListInterfaces(self):
+        """ListInterfaces command
+
+        Returns:
+            List of interface paths
+        """
+        test_cmd = "netstack_facade.ListInterfaces"
+        return self.send_command(test_cmd, {})
diff --git a/packages/antlion/controllers/fuchsia_lib/package_server.py b/packages/antlion/controllers/fuchsia_lib/package_server.py
new file mode 100644
index 0000000..6c3883c
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/package_server.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import shutil
+import socket
+import subprocess
+import tarfile
+import tempfile
+from dataclasses import dataclass
+from datetime import datetime
+from typing import TextIO
+
+from mobly import logger, signals
+
+from antlion import context, utils
+from antlion.controllers.fuchsia_lib.ssh import SSHError, SSHProvider
+from antlion.net import wait_for_port
+
+DEFAULT_FUCHSIA_REPO_NAME = "fuchsia.com"
+PM_SERVE_STOP_TIMEOUT_SEC = 5
+
+
+class PackageServerError(signals.TestAbortClass):
+    pass
+
+
+def random_port() -> int:
+    s = socket.socket()
+    s.bind(("", 0))
+    return int(s.getsockname()[1])
+
+
+@dataclass
+class Route:
+    """Represent a route in the routing table."""
+
+    preferred_source: str | None
+
+
+def find_routes_to(dest_ip) -> list[Route]:
+    """Find the routes used to reach a destination.
+
+    Look through the routing table for the routes that would be used without
+    sending any packets. This is especially helpful for when the device is
+    currently unreachable.
+
+    Only natively supported on Linux. MacOS has iproute2mac, but it doesn't
+    support JSON formatted output.
+
+    TODO(http://b/238924195): Add support for MacOS.
+
+    Args:
+        dest_ip: IP address of the destination
+
+    Throws:
+        CalledProcessError: if the ip command returns a non-zero exit code
+        JSONDecodeError: if the ip command doesn't return JSON
+
+    Returns:
+        Routes with destination to dest_ip.
+    """
+    resp = subprocess.run(
+        f"ip -json route get {dest_ip}".split(), capture_output=True, check=True
+    )
+    routes = json.loads(resp.stdout)
+    return [Route(r.get("prefsrc")) for r in routes]
+
+
+def find_host_ip(device_ip: str) -> str:
+    """Find the host's source IP used to reach a device.
+
+    Not all host interfaces can talk to a given device. This limitation can
+    either be physical through hardware or virtual through routing tables.
+    Look through the routing table without sending any packets then return the
+    preferred source IP address.
+
+    Args:
+        device_ip: IP address of the device
+
+    Raises:
+        PackageServerError: if there are multiple or no routes to device_ip, or
+            if the route doesn't contain "prefsrc"
+
+    Returns:
+        The host IP used to reach device_ip.
+    """
+    routes = find_routes_to(device_ip)
+    if len(routes) != 1:
+        raise PackageServerError(
+            f"Expected only one route to {device_ip}, got {routes}"
+        )
+
+    route = routes[0]
+    if not route.preferred_source:
+        raise PackageServerError(f'Route does not contain "prefsrc": {route}')
+    return route.preferred_source
+
+
+class PackageServer:
+    """Package manager for Fuchsia; an interface to the "pm" CLI tool."""
+
+    def __init__(self, packages_archive_path: str) -> None:
+        """
+        Args:
+            packages_archive_path: Path to an archive containing the pm binary
+                and amber-files.
+        """
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: "[pm]",
+            },
+        )
+
+        self._server_log: TextIO | None = None
+        self._server_proc: subprocess.Popen | None = None
+        self._log_path: str | None = None
+
+        self._tmp_dir = tempfile.mkdtemp(prefix="packages-")
+        tar = tarfile.open(packages_archive_path, "r:gz")
+        tar.extractall(self._tmp_dir)
+
+        self._binary_path = os.path.join(self._tmp_dir, "pm")
+        self._packages_path = os.path.join(self._tmp_dir, "amber-files")
+        self._port = random_port()
+
+        self._assert_repo_has_not_expired()
+
+    def clean_up(self) -> None:
+        if self._server_proc:
+            self.stop_server()
+        if self._tmp_dir:
+            shutil.rmtree(self._tmp_dir)
+
+    def _assert_repo_has_not_expired(self) -> None:
+        """Abort if the repository metadata has expired.
+
+        Raises:
+            TestAbortClass: when the timestamp.json file has expired
+        """
+        with open(f"{self._packages_path}/repository/timestamp.json", "r") as f:
+            data = json.load(f)
+            expiresAtRaw = data["signed"]["expires"]
+            expiresAt = datetime.strptime(expiresAtRaw, "%Y-%m-%dT%H:%M:%SZ")
+            if expiresAt <= datetime.now():
+                raise signals.TestAbortClass(
+                    f"{self._packages_path}/repository/timestamp.json has expired on {expiresAtRaw}"
+                )
+
+    def start(self) -> None:
+        """Start the package server.
+
+        Does not check for errors; view the log file for any errors.
+        """
+        if self._server_proc:
+            self.log.warn(
+                "Skipping to start the server since it has already been started"
+            )
+            return
+
+        pm_command = f"{self._binary_path} serve -c 2 -repo {self._packages_path} -l :{self._port}"
+
+        root_dir = context.get_current_context().get_full_output_path()
+        epoch = utils.get_current_epoch_time()
+        time_stamp = logger.normalize_log_line_timestamp(
+            logger.epoch_to_log_line_timestamp(epoch)
+        )
+        self._log_path = os.path.join(root_dir, f"pm_server.{time_stamp}.log")
+
+        self._server_log = open(self._log_path, "a+")
+        self._server_proc = subprocess.Popen(
+            pm_command.split(),
+            preexec_fn=os.setpgrp,
+            stdout=self._server_log,
+            stderr=subprocess.STDOUT,
+        )
+        try:
+            wait_for_port("127.0.0.1", self._port)
+        except TimeoutError as e:
+            if self._server_log:
+                self._server_log.close()
+            if self._log_path:
+                with open(self._log_path, "r") as f:
+                    logs = f.read()
+            raise TimeoutError(
+                f"pm serve failed to expose port {self._port}. Logs:\n{logs}"
+            ) from e
+
+        self.log.info(f"Serving packages on port {self._port}")
+
+    def configure_device(
+        self, ssh: SSHProvider, repo_name=DEFAULT_FUCHSIA_REPO_NAME
+    ) -> None:
+        """Configure the device to use this package server.
+
+        Args:
+            ssh: Device SSH transport channel
+            repo_name: Name of the repo to alias this package server
+        """
+        # Remove any existing repositories that may be stale.
+        try:
+            ssh.run(f"pkgctl repo rm fuchsia-pkg://{repo_name}")
+        except SSHError as e:
+            if "NOT_FOUND" not in e.result.stderr:
+                raise e
+
+        # Configure the device with the new repository.
+        host_ip = find_host_ip(ssh.config.host_name)
+        repo_url = f"http://{host_ip}:{self._port}"
+        ssh.run(f"pkgctl repo add url -f 2 -n {repo_name} {repo_url}/config.json")
+        self.log.info(
+            f'Added repo "{repo_name}" as {repo_url} on device {ssh.config.host_name}'
+        )
+
+    def stop_server(self) -> None:
+        """Stop the package server."""
+        if not self._server_proc:
+            self.log.warn(
+                "Skipping to stop the server since it hasn't been started yet"
+            )
+            return
+
+        self._server_proc.terminate()
+        try:
+            self._server_proc.wait(timeout=PM_SERVE_STOP_TIMEOUT_SEC)
+        except subprocess.TimeoutExpired:
+            self.log.warn(
+                f"Taking over {PM_SERVE_STOP_TIMEOUT_SEC}s to stop. Killing the server"
+            )
+            self._server_proc.kill()
+            self._server_proc.wait(timeout=PM_SERVE_STOP_TIMEOUT_SEC)
+        finally:
+            if self._server_log:
+                self._server_log.close()
+
+        self._server_proc = None
+        self._log_path = None
+        self._server_log = None
diff --git a/packages/antlion/controllers/fuchsia_lib/sl4f.py b/packages/antlion/controllers/fuchsia_lib/sl4f.py
new file mode 100644
index 0000000..d43df5c
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/sl4f.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import logging
+
+from honeydew.interfaces.device_classes.fuchsia_device import (
+    FuchsiaDevice as HdFuchsiaDevice,
+)
+from mobly import logger
+
+from antlion.controllers.fuchsia_lib.device_lib import DeviceLib
+from antlion.controllers.fuchsia_lib.hardware_power_statecontrol_lib import (
+    FuchsiaHardwarePowerStatecontrolLib,
+)
+from antlion.controllers.fuchsia_lib.logging_lib import FuchsiaLoggingLib
+from antlion.controllers.fuchsia_lib.netstack.netstack_lib import FuchsiaNetstackLib
+from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider, SSHError
+from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import FuchsiaWlanApPolicyLib
+from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import (
+    FuchsiaWlanDeprecatedConfigurationLib,
+)
+from antlion.controllers.fuchsia_lib.wlan_lib import FuchsiaWlanLib
+from antlion.controllers.fuchsia_lib.wlan_policy_lib import FuchsiaWlanPolicyLib
+from antlion.net import wait_for_port
+
+DEFAULT_SL4F_PORT = 80
+START_SL4F_V2_CMD = "start_sl4f"
+
+
+class SL4F:
+    """Module for Fuchsia devices to interact with the SL4F tool.
+
+    Attributes:
+        ssh: Transport to start and stop SL4F.
+        address: http address for SL4F server including SL4F port.
+        log: Logger for the device-specific instance of SL4F.
+    """
+
+    def __init__(
+        self,
+        ssh: FuchsiaSSHProvider,
+        port: int = DEFAULT_SL4F_PORT,
+        honeydew_fd: HdFuchsiaDevice | None = None,
+    ) -> None:
+        """
+        Args:
+            ssh: Transport to start and stop SL4F.
+            port: Port for the SL4F server to listen on.
+        """
+        self.honeydew_fd = honeydew_fd
+        ip = ipaddress.ip_address(ssh.config.host_name)
+        if ip.version == 4:
+            self.address = f"http://{ip}:{port}"
+        elif ip.version == 6:
+            self.address = f"http://[{ip}]:{port}"
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4F | {self.address}]",
+            },
+        )
+
+        try:
+            ssh.stop_component("sl4f")
+            ssh.run(START_SL4F_V2_CMD).stdout
+        except SSHError:
+            # TODO(fxbug.dev/42181764) Remove support to run SL4F in CFv1 mode
+            # once ACTS no longer use images that comes with only CFv1 SL4F.
+            self.log.warn(
+                "Running SL4F in CFv1 mode, "
+                "this is deprecated for images built after 5/9/2022, "
+                "see https://fxbug.dev/42157029 for more info."
+            )
+            ssh.stop_component("sl4f")
+            ssh.start_v1_component("sl4f")
+
+        if not self.honeydew_fd:
+            try:
+                wait_for_port(ssh.config.host_name, port)
+                self.log.info("SL4F server is reachable")
+            except TimeoutError as e:
+                raise TimeoutError("SL4F server is unreachable") from e
+        else:
+            # Honeydew has its own transport-specific logic
+            self.log.info("Using Honeydew controller")
+
+        self._init_libraries(self.honeydew_fd)
+
+    def _init_libraries(self, honeydew_fd: HdFuchsiaDevice | None = None) -> None:
+        # Grab commands from DeviceLib
+        self.device_lib = DeviceLib(self.address)
+
+        # Grab commands from FuchsiaHardwarePowerStatecontrolLib
+        self.hardware_power_statecontrol_lib = FuchsiaHardwarePowerStatecontrolLib(
+            self.address
+        )
+
+        # Grab commands from FuchsiaLoggingLib
+        self.logging_lib = FuchsiaLoggingLib(self.address)
+
+        # Grab commands from FuchsiaNetstackLib
+        self.netstack_lib = FuchsiaNetstackLib(self.address)
+
+        # Grab commands from FuchsiaWlanApPolicyLib
+        self.wlan_ap_policy_lib = FuchsiaWlanApPolicyLib(self.address)
+
+        # Grabs command from FuchsiaWlanDeprecatedConfigurationLib
+        self.wlan_deprecated_configuration_lib = FuchsiaWlanDeprecatedConfigurationLib(
+            self.address
+        )
+
+        # Grab commands from FuchsiaWlanLib
+        self.wlan_lib = FuchsiaWlanLib(self.address)
+
+        # Grab commands from FuchsiaWlanPolicyLib
+        self.wlan_policy_lib = FuchsiaWlanPolicyLib(self.address, honeydew_fd)
diff --git a/packages/antlion/controllers/fuchsia_lib/ssh.py b/packages/antlion/controllers/fuchsia_lib/ssh.py
new file mode 100644
index 0000000..1d1f421
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/ssh.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+
+from antlion.capabilities.ssh import SSHError, SSHProvider
+
+DEFAULT_SSH_USER: str = "fuchsia"
+DEFAULT_SSH_PRIVATE_KEY: str = "~/.ssh/fuchsia_ed25519"
+# The default package repository for all components.
+FUCHSIA_PACKAGE_REPO_NAME = "fuchsia.com"
+
+
+class FuchsiaSSHProvider(SSHProvider):
+    """Device-specific provider for SSH clients."""
+
+    def start_v1_component(
+        self,
+        component: str,
+        timeout_sec: int = 5,
+        repo: str = FUCHSIA_PACKAGE_REPO_NAME,
+    ) -> None:
+        """Start a CFv1 component in the background.
+
+        Args:
+            component: Name of the component without ".cmx".
+            timeout_sec: Seconds to wait for the process to show up in 'ps'.
+            repo: Default package repository for all components.
+
+        Raises:
+            TimeoutError: when the component doesn't launch within timeout_sec
+        """
+        # The "run -d" command will hang when executed without a pseudo-tty
+        # allocated.
+        self.run(
+            f"run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx",
+            force_tty=True,
+        )
+
+        timeout = time.perf_counter() + timeout_sec
+        while True:
+            ps_cmd = self.run("ps")
+            if f"{component}.cmx" in ps_cmd.stdout:
+                return
+            if time.perf_counter() > timeout:
+                raise TimeoutError(
+                    f'Failed to start "{component}.cmx" after {timeout_sec}s'
+                )
+
+    def stop_component(self, component: str, is_cfv2_component: bool = False) -> None:
+        """Stop all instances of a CFv1 or CFv2 component.
+
+        Args:
+            component: Name of the component without suffix("cm" or "cmx").
+            is_cfv2_component: Determines the component suffix to use.
+        """
+        suffix = "cm" if is_cfv2_component else "cmx"
+
+        try:
+            self.run(f"killall {component}.{suffix}")
+            self.log.info(f"Stopped component: {component}.{suffix}")
+        except SSHError as e:
+            if "no tasks found" in e.result.stderr:
+                self.log.debug(f"Could not find component: {component}.{suffix}")
+                return
+            raise e
diff --git a/packages/antlion/controllers/fuchsia_lib/utils_lib.py b/packages/antlion/controllers/fuchsia_lib/utils_lib.py
new file mode 100644
index 0000000..90030f0
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/utils_lib.py
@@ -0,0 +1,247 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO(http://b/259746643): Remove this file once we no longer rely on antlion for
+# flashing the device. This should be the responsibility of the person or software
+# dispatching antlion; removing flashing from antlion increases opportunities for
+# runtime optimization and increases device lifetime.
+
+import logging
+import os
+import tarfile
+import tempfile
+import time
+
+from antlion import utils
+from antlion.libs.proc import job
+from antlion.utils import get_fuchsia_mdns_ipv6_address
+
+MDNS_LOOKUP_RETRY_MAX = 3
+FASTBOOT_TIMEOUT = 30
+FLASH_TIMEOUT_SEC = 60 * 5  # 5 minutes
+AFTER_FLASH_BOOT_TIME = 30
+WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC = 360
+PROCESS_CHECK_WAIT_TIME_SEC = 30
+
+FUCHSIA_SDK_URL = "gs://fuchsia-sdk/development"
+FUCHSIA_RELEASE_TESTING_URL = "gs://fuchsia-release-testing/images"
+
+
+def flash(fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5):
+    """A function to flash, not pave, a fuchsia_device
+
+    Args:
+        fuchsia_device: An ACTS fuchsia_device
+
+    Returns:
+        True if successful.
+    """
+    if not fuchsia_device.authorized_file:
+        raise ValueError(
+            "A ssh authorized_file must be present in the "
+            "ACTS config to flash fuchsia_devices."
+        )
+    # This is the product type from the fx set command.
+    # Do 'fx list-products' to see options in Fuchsia source tree.
+    if not fuchsia_device.product_type:
+        raise ValueError(
+            "A product type must be specified to flash " "fuchsia_devices."
+        )
+    # This is the board type from the fx set command.
+    # Do 'fx list-boards' to see options in Fuchsia source tree.
+    if not fuchsia_device.board_type:
+        raise ValueError("A board type must be specified to flash " "fuchsia_devices.")
+    if not fuchsia_device.build_number:
+        fuchsia_device.build_number = "LATEST"
+    if not fuchsia_device.mdns_name:
+        raise ValueError(
+            "Either fuchsia_device mdns_name must be specified or "
+            "ip must be the mDNS name to be able to flash."
+        )
+
+    file_to_download = None
+    image_archive_path = None
+    image_path = None
+
+    if not fuchsia_device.specific_image:
+        product_build = fuchsia_device.product_type
+        if fuchsia_device.build_type:
+            product_build = f"{product_build}_{fuchsia_device.build_type}"
+        if "LATEST" in fuchsia_device.build_number:
+            sdk_version = "sdk"
+            if "LATEST_F" in fuchsia_device.build_number:
+                f_branch = fuchsia_device.build_number.split("LATEST_F", 1)[1]
+                sdk_version = f"f{f_branch}_sdk"
+            file_to_download = (
+                f"{FUCHSIA_RELEASE_TESTING_URL}/"
+                f"{sdk_version}-{product_build}.{fuchsia_device.board_type}-release.tgz"
+            )
+        else:
+            # Must be a fully qualified build number (e.g. 5.20210721.4.1215)
+            file_to_download = (
+                f"{FUCHSIA_SDK_URL}/{fuchsia_device.build_number}/images/"
+                f"{product_build}.{fuchsia_device.board_type}-release.tgz"
+            )
+    elif "gs://" in fuchsia_device.specific_image:
+        file_to_download = fuchsia_device.specific_image
+    elif os.path.isdir(fuchsia_device.specific_image):
+        image_path = fuchsia_device.specific_image
+    elif tarfile.is_tarfile(fuchsia_device.specific_image):
+        image_archive_path = fuchsia_device.specific_image
+    else:
+        raise ValueError(f'Invalid specific_image "{fuchsia_device.specific_image}"')
+
+    if image_path:
+        reboot_to_bootloader(
+            fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time
+        )
+        logging.info(
+            f'Flashing {fuchsia_device.mdns_name} with {image_path} using authorized keys "{fuchsia_device.authorized_file}".'
+        )
+        run_flash_script(fuchsia_device, image_path)
+    else:
+        suffix = fuchsia_device.board_type
+        with tempfile.TemporaryDirectory(suffix=suffix) as image_path:
+            if file_to_download:
+                logging.info(f"Downloading {file_to_download} to {image_path}")
+                job.run(f"gsutil cp {file_to_download} {image_path}")
+                image_archive_path = os.path.join(
+                    image_path, os.path.basename(file_to_download)
+                )
+
+            if image_archive_path:
+                # Use tar command instead of tarfile.extractall, as it takes too long.
+                job.run(
+                    f"tar xfvz {image_archive_path} -C {image_path}", timeout_sec=120
+                )
+
+            reboot_to_bootloader(
+                fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time
+            )
+
+            logging.info(
+                f'Flashing {fuchsia_device.mdns_name} with {image_archive_path} using authorized keys "{fuchsia_device.authorized_file}".'
+            )
+            run_flash_script(fuchsia_device, image_path)
+    return True
+
+
+def reboot_to_bootloader(
+    fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5
+):
+    import psutil  # type: ignore
+    import usbinfo  # type: ignore
+
+    from antlion.controllers.fuchsia_lib.ssh import SSHError
+
+    if use_ssh:
+        logging.info("Sending reboot command via SSH to " "get into bootloader.")
+        # Sending this command will put the device in fastboot
+        # but it does not guarantee the device will be in fastboot
+        # after this command.  There is no check so if there is an
+        # expectation of the device being in fastboot, then some
+        # other check needs to be done.
+        try:
+            fuchsia_device.ssh.run(
+                "dm rb", timeout_sec=fuchsia_reconnect_after_reboot_time
+            )
+        except SSHError as e:
+            if "closed by remote host" not in e.result.stderr:
+                raise e
+    else:
+        pass
+        ## Todo: Add elif for SL4F if implemented in SL4F
+
+    time_counter = 0
+    while time_counter < FASTBOOT_TIMEOUT:
+        logging.info(
+            "Checking to see if fuchsia_device(%s) SN: %s is in "
+            "fastboot. (Attempt #%s Timeout: %s)"
+            % (
+                fuchsia_device.mdns_name,
+                fuchsia_device.serial_number,
+                str(time_counter + 1),
+                FASTBOOT_TIMEOUT,
+            )
+        )
+        for usb_device in usbinfo.usbinfo():
+            if (
+                usb_device["iSerialNumber"] == fuchsia_device.serial_number
+                and usb_device["iProduct"] == "USB_download_gadget"
+            ):
+                logging.info(
+                    "fuchsia_device(%s) SN: %s is in fastboot."
+                    % (fuchsia_device.mdns_name, fuchsia_device.serial_number)
+                )
+                time_counter = FASTBOOT_TIMEOUT
+        time_counter = time_counter + 1
+        if time_counter == FASTBOOT_TIMEOUT:
+            for fail_usb_device in usbinfo.usbinfo():
+                logging.debug(fail_usb_device)
+            raise TimeoutError(
+                "fuchsia_device(%s) SN: %s "
+                "never went into fastboot"
+                % (fuchsia_device.mdns_name, fuchsia_device.serial_number)
+            )
+        time.sleep(1)
+
+    end_time = time.time() + WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC
+    # Attempt to wait for existing flashing process to finish
+    while time.time() < end_time:
+        flash_process_found = False
+        for proc in psutil.process_iter():
+            if "bash" in proc.name() and "flash.sh" in proc.cmdline():
+                logging.info("Waiting for existing flash.sh process to complete.")
+                time.sleep(PROCESS_CHECK_WAIT_TIME_SEC)
+                flash_process_found = True
+        if not flash_process_found:
+            break
+
+
+def run_flash_script(fuchsia_device, flash_dir):
+    try:
+        flash_output = job.run(
+            f"bash {flash_dir}/flash.sh --ssh-key={fuchsia_device.authorized_file} -s {fuchsia_device.serial_number}",
+            timeout_sec=FLASH_TIMEOUT_SEC,
+        )
+        logging.debug(flash_output.stderr)
+    except job.TimeoutError as err:
+        raise TimeoutError(err)
+
+    logging.info(
+        "Waiting %s seconds for device"
+        " to come back up after flashing." % AFTER_FLASH_BOOT_TIME
+    )
+    time.sleep(AFTER_FLASH_BOOT_TIME)
+    logging.info("Updating device to new IP addresses.")
+    mdns_ip = None
+    for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
+        mdns_ip = get_fuchsia_mdns_ipv6_address(fuchsia_device.mdns_name)
+        if mdns_ip:
+            break
+        else:
+            time.sleep(1)
+    if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
+        logging.info(
+            "IP for fuchsia_device(%s) changed from %s to %s"
+            % (fuchsia_device.mdns_name, fuchsia_device.ip, mdns_ip)
+        )
+        fuchsia_device.ip = mdns_ip
+        fuchsia_device.address = "http://[{}]:{}".format(
+            fuchsia_device.ip, fuchsia_device.sl4f_port
+        )
+    else:
+        raise ValueError(f"Invalid IP: {fuchsia_device.mdns_name} after flashing.")
diff --git a/packages/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py b/packages/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
new file mode 100644
index 0000000..f801046
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import StrEnum, unique
+
+from antlion.controllers.ap_lib.hostapd_security import FuchsiaSecurityType
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+
+
+@unique
+class ConnectivityMode(StrEnum):
+    """Connectivity operating mode for the AP.
+
+    Defined by the fuchsia.wlan.policy FIDL and mapped by the SL4F wlan_policy facade.
+
+    https://cs.opensource.google/fuchsia/fuchsia/+/48dd18fe663ad902ecb7f70a45ce19fa605e96b6:sdk/fidl/fuchsia.wlan.policy/access_point_provider.fidl;l=100
+    https://cs.opensource.google/fuchsia/fuchsia/+/48dd18fe663ad902ecb7f70a45ce19fa605e96b6:src/testing/sl4f/src/wlan_policy/commands.rs;l=200
+    """
+
+    LOCAL_ONLY = "local_only"
+    """Allows for connectivity between co-located devices; does not forward traffic to
+    other network connections."""
+
+    UNRESTRICTED = "unrestricted"
+    """Allows for full connectivity with traffic potentially being forwarded
+    to other network connections (e.g. tethering mode)."""
+
+
+@unique
+class OperatingBand(StrEnum):
+    """Operating band for wlan control request and status updates.
+
+    Defined by the fuchsia.wlan.policy FIDL and mapped by the SL4F wlan_policy facade.
+
+    https://cs.opensource.google/fuchsia/fuchsia/+/48dd18fe663ad902ecb7f70a45ce19fa605e96b6:sdk/fidl/fuchsia.wlan.policy/types.fidl;l=54
+    https://cs.opensource.google/fuchsia/fuchsia/+/48dd18fe663ad902ecb7f70a45ce19fa605e96b6:src/testing/sl4f/src/wlan_policy/commands.rs;l=183
+    """
+
+    ANY = "any"
+    """Allows for band switching depending on device operating mode and environment."""
+
+    ONLY_2G = "only_2_4_ghz"
+    """Restricted to 2.4 GHz bands only."""
+
+    ONLY_5G = "only_5_ghz"
+    """Restricted to 5 GHz bands only."""
+
+
+class FuchsiaWlanApPolicyLib(BaseLib):
+    def __init__(self, addr: str) -> None:
+        super().__init__(addr, "wlan_ap_policy")
+
+    def wlanStartAccessPoint(
+        self,
+        target_ssid: str,
+        security_type: FuchsiaSecurityType,
+        target_pwd: str | None,
+        connectivity_mode: ConnectivityMode,
+        operating_band: OperatingBand,
+    ):
+        """Start an Access Point.
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network
+            target_pwd: Credential being saved with the network. None is equivalent to
+                empty string.
+            connectivity_mode: the connectivity mode to use
+            operating_band: The operating band to use
+
+        Returns:
+            boolean indicating if the action was successful
+        """
+
+        test_cmd = "wlan_ap_policy.start_access_point"
+
+        # The SoftAP API uses "open" security instead of None and "" password
+        # instead of None.
+        test_args = {
+            "target_ssid": target_ssid,
+            "security_type": str(security_type),
+            "target_pwd": target_pwd or "",
+            "connectivity_mode": str(connectivity_mode),
+            "operating_band": str(operating_band),
+        }
+
+        return self.send_command(test_cmd, test_args)
+
+    def wlanStopAccessPoint(
+        self,
+        target_ssid: str,
+        security_type: FuchsiaSecurityType,
+        target_pwd: str | None = None,
+    ):
+        """Stops an active Access Point.
+        Args:
+            target_ssid: the network to attempt a connection to
+            security_type: the security protocol of the network
+            target_pwd: credential being saved with the network. No password
+                is equivalent to empty string.
+
+        Returns:
+            boolean indicating if the action was successful
+        """
+
+        test_cmd = "wlan_ap_policy.stop_access_point"
+
+        test_args = {
+            "target_ssid": target_ssid,
+            "security_type": str(security_type),
+            "target_pwd": "" if target_pwd is None else target_pwd,
+        }
+
+        return self.send_command(test_cmd, test_args)
+
+    def wlanStopAllAccessPoint(self):
+        """Stops all Access Points
+
+        Returns:
+            boolean indicating if the actions were successful
+        """
+
+        test_cmd = "wlan_ap_policy.stop_all_access_points"
+
+        return self.send_command(test_cmd, {})
+
+    def wlanSetNewListener(self):
+        """Sets the update listener stream of the facade to a new stream so that updates will be
+        reset. Intended to be used between tests so that the behaviour of updates in a test is
+        independent from previous tests.
+        """
+        test_cmd = "wlan_ap_policy.set_new_update_listener"
+
+        return self.send_command(test_cmd, {})
+
+    def wlanGetUpdate(self, timeout=30):
+        """Gets a list of AP state updates. This call will return with an update immediately the
+        first time the update listener is initialized by setting a new listener or by creating
+        a client controller before setting a new listener. Subsequent calls will hang until
+        there is an update.
+        Returns:
+            A list of AP state updated. If there is no error, the result is a list with a
+            structure that matches the FIDL AccessPointState struct given for updates.
+        """
+        test_cmd = "wlan_ap_policy.get_update"
+
+        return self.send_command(test_cmd, {}, response_timeout=timeout)
diff --git a/packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py b/packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
new file mode 100644
index 0000000..df3f66e
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+
+
+class FuchsiaWlanDeprecatedConfigurationLib(BaseLib):
+    def __init__(self, addr: str) -> None:
+        super().__init__(addr, "wlan_deprecated")
+
+    def wlanSuggestAccessPointMacAddress(self, addr):
+        """Suggests a mac address to soft AP interface, to support
+        cast legacy behavior.
+
+        Args:
+            addr: string of mac address to suggest (e.g. '12:34:56:78:9a:bc')
+        """
+        test_cmd = "wlan_deprecated.suggest_ap_mac"
+        test_args = {"mac": addr}
+
+        return self.send_command(test_cmd, test_args)
diff --git a/packages/antlion/controllers/fuchsia_lib/wlan_lib.py b/packages/antlion/controllers/fuchsia_lib/wlan_lib.py
new file mode 100644
index 0000000..e37c20b
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/wlan_lib.py
@@ -0,0 +1,464 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from dataclasses import asdict
+from enum import StrEnum
+
+from honeydew import errors
+from honeydew.interfaces.device_classes.fuchsia_device import (
+    FuchsiaDevice as HdFuchsiaDevice,
+)
+from honeydew.typing.wlan import (
+    BssDescription,
+    BssType,
+    ChannelBandwidth,
+    ClientStatusConnected,
+    ClientStatusConnecting,
+    ClientStatusIdle,
+    ClientStatusResponse,
+    CountryCode,
+    Protection,
+    QueryIfaceResponse,
+    WlanChannel,
+    WlanMacRole,
+)
+from mobly import signals
+
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+from antlion.validation import MapValidator
+
+STATUS_IDLE_KEY = "Idle"
+STATUS_CONNECTING_KEY = "Connecting"
+
+# We need to convert the string we receive from the wlan facade to an intEnum
+# because serde gives us a string.
+string_to_int_enum_map: dict[str, Protection] = {
+    "Unknown": Protection.UNKNOWN,
+    "Open": Protection.OPEN,
+    "Wep": Protection.WEP,
+    "Wpa1": Protection.WPA1,
+    "Wpa1Wpa2PersonalTkipOnly": Protection.WPA1_WPA2_PERSONAL_TKIP_ONLY,
+    "Wpa2PersonalTkipOnly": Protection.WPA2_PERSONAL_TKIP_ONLY,
+    "Wpa1Wpa2Personal": Protection.WPA1_WPA2_PERSONAL,
+    "Wpa2Personal": Protection.WPA2_PERSONAL,
+    "Wpa2Wpa3Personal": Protection.WPA2_WPA3_PERSONAL,
+    "Wpa3Personal": Protection.WPA3_PERSONAL,
+    "Wpa2Enterprise": Protection.WPA2_ENTERPRISE,
+    "Wpa3Enterprise": Protection.WPA3_ENTERPRISE,
+}
+
+
+class WlanFailure(signals.TestFailure):
+    """Exception for SL4F commands executed by WLAN lib."""
+
+
+class Command(StrEnum):
+    """Sl4f Server Commands."""
+
+    SCAN_FOR_BSS_INFO = "wlan.scan_for_bss_info"
+    CONNECT = "wlan.connect"
+    DISCONNECT = "wlan.disconnect"
+    STATUS = "wlan.status"
+    GET_IFACE_ID_LIST = "wlan.get_iface_id_list"
+    GET_PHY_ID_LIST = "wlan.get_phy_id_list"
+    CREATE_IFACE = "wlan.create_iface"
+    DESTROY_IFACE = "wlan.destroy_iface"
+    GET_COUNTRY = "wlan_phy.get_country"
+    QUERY_IFACE = "wlan.query_iface"
+    SET_REGION = "location_regulatory_region_facade.set_region"
+
+
+class FuchsiaWlanLib(BaseLib):
+    def __init__(self, addr: str, honeydew_fd: HdFuchsiaDevice | None = None) -> None:
+        super().__init__(addr, "wlan")
+        self.honeydew_fd = honeydew_fd
+
+    def _check_response_error(
+        self, cmd: Command, response_json: dict[str, object]
+    ) -> object | None:
+        """Helper method to process errors from SL4F calls.
+
+        Args:
+            cmd: SL4F command sent.
+            response_json: Response from SL4F server.
+
+        Returns:
+            Response json or None if error.
+
+        Raises:
+            WlanFailure if the response_json has something in the 'error' field.
+        """
+        resp = MapValidator(response_json)
+        error = resp.get(str, "error", None)
+        if error:
+            # We sometimes expect to catch WlanFailure so we include a log here for
+            # when we do retries.
+            self.log.debug(f"SL4F call: {cmd} failed with Error: '{error}'.")
+            raise WlanFailure(f"SL4F call: {cmd} failed with Error: '{error}'.")
+        else:
+            return response_json.get("result")
+
+    def scan_for_bss_info(self) -> dict[str, list[BssDescription]]:
+        """Scans and returns BSS info
+
+        Returns:
+            A dict mapping each seen SSID to a list of BSS Description IE
+            blocks, one for each BSS observed in the network
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan.scan_for_bss_info()
+            except errors.Sl4fError as e:
+                raise WlanFailure(
+                    f"SL4F call {Command.SCAN_FOR_BSS_INFO} failed."
+                ) from e
+        else:
+            resp = self.send_command(Command.SCAN_FOR_BSS_INFO)
+            result = self._check_response_error(Command.SCAN_FOR_BSS_INFO, resp)
+
+            if not isinstance(result, dict):
+                raise TypeError(f'Expected "result" to be dict, got {type(result)}')
+
+            ssid_bss_desc_map: dict[str, list[BssDescription]] = {}
+            for ssid_key, bss_list in result.items():
+                if not isinstance(bss_list, list):
+                    raise TypeError(
+                        f'Expected "bss_list" to be list, got {type(bss_list)}'
+                    )
+
+                # Create BssDescription type out of return values
+                bss_descriptions: list[BssDescription] = []
+                for bss in bss_list:
+                    bss_map = MapValidator(bss)
+                    bssid = bss_map.list("bssid").all(int)
+                    ies = bss_map.list("ies").all(int)
+                    channel_map = MapValidator(bss_map.get(dict, "channel"))
+
+                    wlan_channel = WlanChannel(
+                        primary=channel_map.get(int, "primary"),
+                        cbw=ChannelBandwidth(channel_map.get(str, "cbw")),
+                        secondary80=channel_map.get(int, "secondary80"),
+                    )
+
+                    bss_block = BssDescription(
+                        bssid=bssid,
+                        bss_type=BssType(bss_map.get(str, "bss_type")),
+                        beacon_period=bss_map.get(int, "beacon_period"),
+                        capability_info=bss_map.get(int, "capability_info"),
+                        ies=ies,
+                        channel=wlan_channel,
+                        rssi_dbm=bss_map.get(int, "rssi_dbm"),
+                        snr_db=bss_map.get(int, "snr_db"),
+                    )
+                    bss_descriptions.append(bss_block)
+
+                ssid_bss_desc_map[ssid_key] = bss_descriptions
+
+            return ssid_bss_desc_map
+
+    def connect(
+        self, target_ssid: str, target_pwd: str | None, target_bss_desc: BssDescription
+    ) -> bool:
+        """Triggers a network connection
+        Args:
+            target_ssid: The network to connect to.
+            target_pwd: The password for the network.
+            target_bss_desc: The basic service set for target network.
+
+        Returns:
+            boolean indicating if the connection was successful
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        method_params = {
+            "target_ssid": target_ssid,
+            "target_pwd": target_pwd,
+            "target_bss_desc": asdict(target_bss_desc),
+        }
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan.connect(
+                    target_ssid, target_pwd, target_bss_desc
+                )
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.CONNECT} failed.") from e
+        else:
+            resp = self.send_command(Command.CONNECT, method_params)
+            result = self._check_response_error(Command.CONNECT, resp)
+
+            if not isinstance(result, bool):
+                raise TypeError(f'Expected "result" to be bool, got {type(result)}')
+
+            return result
+
+    def disconnect(self) -> None:
+        """Disconnect any current wifi connections
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan.disconnect()
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.DISCONNECT} failed.") from e
+        else:
+            resp = self.send_command(Command.DISCONNECT)
+            self._check_response_error(Command.DISCONNECT, resp)
+
+    def create_iface(
+        self, phy_id: int, role: WlanMacRole, sta_addr: str | None = None
+    ) -> int:
+        """Create a new WLAN interface.
+
+        Args:
+            phy_id: The interface id.
+            role: The role of new interface.
+            sta_addr: MAC address for softAP interface only.
+
+        Returns:
+            Iface id of newly created interface.
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        method_params = {
+            "phy_id": phy_id,
+            "role": role,
+            "sta_addr": sta_addr,
+        }
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan.create_iface(phy_id, role, sta_addr)
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.CREATE_IFACE} failed.") from e
+        else:
+            resp = self.send_command(Command.CREATE_IFACE, method_params)
+            result = self._check_response_error(Command.CREATE_IFACE, resp)
+
+            if not isinstance(result, int):
+                raise TypeError(f'Expected "result" to be int, got {type(result)}')
+
+            return result
+
+    def destroy_iface(self, iface_id: int) -> None:
+        """Destroy WLAN interface by ID.
+
+        Args:
+            iface_id: The interface to destroy.
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        method_params = {"identifier": iface_id}
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan.destroy_iface(iface_id)
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.DESTROY_IFACE} failed.") from e
+        else:
+            resp = self.send_command(Command.DESTROY_IFACE, method_params)
+            self._check_response_error(Command.DESTROY_IFACE, resp)
+
+    def get_iface_id_list(self) -> list[int]:
+        """Get list of wlan iface IDs on device.
+
+        Returns:
+            A list of wlan iface IDs that are present on the device.
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan.get_iface_id_list()
+            except errors.Sl4fError as e:
+                raise WlanFailure(
+                    f"SL4F call {Command.GET_IFACE_ID_LIST} failed."
+                ) from e
+        else:
+            resp = self.send_command(Command.GET_IFACE_ID_LIST)
+            result = self._check_response_error(Command.GET_IFACE_ID_LIST, resp)
+
+            if not isinstance(result, list):
+                raise TypeError(f'Expected "result" to be list, got {type(result)}')
+
+            return result
+
+    def get_phy_id_list(self) -> list[int]:
+        """Get list of phy ids on device.
+
+        Returns:
+            A list of phy ids that is present on the device.
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan.get_phy_id_list()
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.GET_PHY_ID_LIST} failed.") from e
+        else:
+            resp = self.send_command(Command.GET_PHY_ID_LIST)
+            result = self._check_response_error(Command.GET_PHY_ID_LIST, resp)
+
+            if not isinstance(result, list):
+                raise TypeError(f'Expected "result" to be list, got {type(result)}')
+
+            return result
+
+    def status(self) -> ClientStatusResponse:
+        """Request connection status
+
+        Returns:
+            ClientStatusResponse state summary and
+            status of various networks connections.
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan.status()
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.STATUS} failed.") from e
+        else:
+            resp = self.send_command(Command.STATUS)
+            result = self._check_response_error(Command.STATUS, resp)
+
+            if not isinstance(result, dict):
+                raise TypeError(f'Expected "result" to be dict, got {type(result)}')
+
+            result_map = MapValidator(result)
+            # Only one of these keys in result should be present.
+            if STATUS_IDLE_KEY in result:
+                return ClientStatusIdle()
+            elif STATUS_CONNECTING_KEY in result:
+                ssid = result.get("Connecting")
+                if not isinstance(ssid, list):
+                    raise TypeError(
+                        f'Expected "connecting" to be list, got "{type(ssid)}"'
+                    )
+                return ClientStatusConnecting(ssid=ssid)
+            else:
+                connected_map = MapValidator(result_map.get(dict, "Connected"))
+                channel_map = MapValidator(connected_map.get(dict, "channel"))
+                bssid = connected_map.list("bssid").all(int)
+                ssid = connected_map.list("ssid").all(int)
+                protection = connected_map.get(str, "protection")
+
+                channel = WlanChannel(
+                    primary=channel_map.get(int, "primary"),
+                    cbw=ChannelBandwidth(channel_map.get(str, "cbw")),
+                    secondary80=channel_map.get(int, "secondary80"),
+                )
+
+                return ClientStatusConnected(
+                    bssid=bssid,
+                    ssid=ssid,
+                    rssi_dbm=connected_map.get(int, "rssi_dbm"),
+                    snr_db=connected_map.get(int, "snr_db"),
+                    channel=channel,
+                    protection=Protection(string_to_int_enum_map.get(protection, 0)),
+                )
+
+    def get_country(self, phy_id: int) -> CountryCode:
+        """Reads the currently configured country for `phy_id`.
+
+        Args:
+            phy_id: unsigned 16-bit integer.
+
+        Returns:
+            The currently configured country code from phy_id.
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        method_params = {"phy_id": phy_id}
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan.get_country(phy_id)
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.GET_COUNTRY} failed.") from e
+        else:
+            resp = self.send_command(Command.GET_COUNTRY, method_params)
+            result = self._check_response_error(Command.GET_COUNTRY, resp)
+
+            if not isinstance(result, list):
+                raise TypeError(f'Expected "result" to be list, got {type(result)}')
+
+            set_code = "".join([chr(ascii_char) for ascii_char in result])
+
+            return CountryCode(set_code)
+
+    def query_iface(self, iface_id: int) -> QueryIfaceResponse:
+        """Retrieves interface info for given wlan iface id.
+
+        Args:
+            iface_id: The iface_id to query
+
+        Returns:
+            QueryIfaceResults from the SL4F server
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        method_params = {"iface_id": iface_id}
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan.query_iface(iface_id)
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.QUERY_IFACE} failed.") from e
+        else:
+            resp = self.send_command(Command.QUERY_IFACE, method_params)
+            result = self._check_response_error(Command.QUERY_IFACE, resp)
+
+            if not isinstance(result, dict):
+                raise TypeError(f'Expected "network" to be dict, got {type(result)}')
+
+            iface_results = MapValidator(result)
+            sta_addr = iface_results.list("sta_addr")
+
+            return QueryIfaceResponse(
+                role=WlanMacRole(iface_results.get(str, "role")),
+                id=iface_results.get(int, "id"),
+                phy_id=iface_results.get(int, "phy_id"),
+                phy_assigned_id=iface_results.get(int, "phy_assigned_id"),
+                sta_addr=sta_addr.all(int),
+            )
+
+    def set_region(self, region_code: CountryCode) -> None:
+        """Set regulatory region.
+
+        Args:
+            region_code: CountryCode which is a 2-byte ASCII string.
+
+        Raises:
+            WlanFailure: Sl4f run command failed.
+        """
+        method_params = {"region": region_code.value}
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan.set_region(region_code)
+            except errors.Sl4fError as e:
+                raise WlanFailure(f"SL4F call {Command.SET_REGION} failed.") from e
+        else:
+            resp = self.send_command(Command.SET_REGION, method_params)
+            self._check_response_error(Command.SET_REGION, resp)
diff --git a/packages/antlion/controllers/fuchsia_lib/wlan_policy_lib.py b/packages/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
new file mode 100644
index 0000000..ff16637
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
@@ -0,0 +1,415 @@
+# Lint as: python3
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#   This class provides pipeline betweem python tests and WLAN policy facade.
+
+from enum import StrEnum
+
+from honeydew import errors
+from honeydew.interfaces.device_classes.fuchsia_device import (
+    FuchsiaDevice as HdFuchsiaDevice,
+)
+from honeydew.typing.wlan import (
+    ClientStateSummary,
+    ConnectionState,
+    DisconnectStatus,
+    NetworkConfig,
+    NetworkIdentifier,
+    NetworkState,
+    RequestStatus,
+    SecurityType,
+    WlanClientState,
+)
+
+from antlion.controllers.ap_lib.hostapd_security import FuchsiaSecurityType
+from antlion.controllers.fuchsia_lib.base_lib import BaseLib
+from antlion.validation import MapValidator
+
+DEFAULT_UPDATE_TIMEOUT_SEC: float = 30.0
+
+
+class WlanPolicyError(Exception):
+    """Exception for SL4F commands executed by WLAN Policy."""
+
+
+class Command(StrEnum):
+    """Sl4f Server Commands."""
+
+    CONNECT = "wlan_policy.connect"
+    CREATE_CLIENT_CONTROLLER = "wlan_policy.create_client_controller"
+    GET_SAVED_NETWORKS = "wlan_policy.get_saved_networks"
+    GET_UPDATE = "wlan_policy.get_update"
+    REMOVE_ALL_NETWORKS = "wlan_policy.remove_all_networks"
+    REMOVE_NETWORK = "wlan_policy.remove_network"
+    SAVE_NETWORK = "wlan_policy.save_network"
+    SCAN_FOR_NETWORKS = "wlan_policy.scan_for_networks"
+    SET_NEW_UPDATE_LISTENER = "wlan_policy.set_new_update_listener"
+    START_CLIENT_CONNECTIONS = "wlan_policy.start_client_connections"
+    STOP_CLIENT_CONNECTIONS = "wlan_policy.stop_client_connections"
+
+
+class FuchsiaWlanPolicyLib(BaseLib):
+    def __init__(self, addr: str, honeydew_fd: HdFuchsiaDevice | None = None) -> None:
+        super().__init__(addr, "wlan_policy")
+        self.honeydew_fd = honeydew_fd
+
+    def _check_response_error(
+        self, cmd: Command, response_json: dict[str, object]
+    ) -> object | None:
+        """Helper method to process errors from SL4F calls.
+
+        Args:
+            cmd: SL4F command sent.
+            response_json: Response from SL4F server.
+
+        Returns:
+            Response json or None if error.
+
+        Raises:
+            WlanPolicyError if the response_json has something in the 'error' field.
+        """
+        resp = MapValidator(response_json)
+        error = resp.get(str, "error", None)
+        if error:
+            # We sometimes expect to catch WlanPolicyError so we include a log here for
+            # when we do retries.
+            self.log.debug(f"SL4F call: {cmd} failed with Error: '{error}'.")
+            raise WlanPolicyError(f"SL4F call: {cmd} failed with Error: '{error}'.")
+        else:
+            return response_json.get("result")
+
+    def connect(
+        self, target_ssid: str, security_type: SecurityType | FuchsiaSecurityType
+    ) -> RequestStatus:
+        """Triggers connection to a network.
+
+        Args:
+            target_ssid: The network to connect to. Must have been previously
+                saved in order for a successful connection to happen.
+            security_type: The security protocol of the network.
+
+        Returns:
+            A RequestStatus response to the connect request.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+            TypeError: Return value not a string.
+        """
+        # TODO(b/308807691): Change other uses of FuchsiaSecurityType to Honeydew's
+        # SecurityType
+        hd_security_type = SecurityType(security_type.value)
+        method_params = {
+            "target_ssid": target_ssid,
+            "security_type": str(hd_security_type),
+        }
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan_policy.connect(
+                    target_ssid, hd_security_type
+                )
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.CONNECT, method_params)
+            result = self._check_response_error(Command.CONNECT, resp)
+
+            if not isinstance(result, str):
+                raise TypeError(f'Expected "result" to be str, got {type(result)}')
+
+            return RequestStatus(result)
+
+    def create_client_controller(self) -> None:
+        """Initializes the client controller.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan_policy.create_client_controller()
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.CREATE_CLIENT_CONTROLLER)
+            self._check_response_error(Command.CREATE_CLIENT_CONTROLLER, resp)
+
+    def get_saved_networks(self) -> list[NetworkConfig]:
+        """Gets networks saved on device.
+
+        Returns:
+            A list of NetworkConfigs.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+            TypeError: Return values not correct types.
+        """
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan_policy.get_saved_networks()
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.GET_SAVED_NETWORKS)
+            result = self._check_response_error(Command.GET_SAVED_NETWORKS, resp)
+
+            if not isinstance(result, list):
+                raise TypeError(f'Expected "result" to be list, got {type(result)}')
+
+            networks: list[NetworkConfig] = []
+            for n in result:
+                if not isinstance(n, dict):
+                    raise TypeError(f'Expected "network" to be dict, got {type(n)}')
+
+                network = MapValidator(n)
+                security_type = network.get(str, "security_type", "None")
+                networks.append(
+                    NetworkConfig(
+                        ssid=network.get(str, "ssid"),
+                        security_type=SecurityType(security_type.lower()),
+                        credential_type=network.get(str, "credential_type"),
+                        credential_value=network.get(str, "credential_value"),
+                    )
+                )
+            return networks
+
+    def get_update(
+        self, timeout: float = DEFAULT_UPDATE_TIMEOUT_SEC
+    ) -> ClientStateSummary:
+        """Gets one client listener update.
+
+        This call will return with an update immediately the
+        first time the update listener is initialized by setting a new listener
+        or by creating a client controller before setting a new listener.
+        Subsequent calls will hang until there is a change since the last
+        update call.
+
+        Args:
+            timeout: Timeout in seconds to wait for the get_update command to
+                return.
+
+        Returns:
+            An update of connection status. If there is no error, the result is
+            a WlanPolicyUpdate with a structure that matches the FIDL
+            ClientStateSummary struct given for updates.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+            TypeError: Return values not correct types.
+        """
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan_policy.get_update(response_timeout=timeout)
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.GET_UPDATE, response_timeout=timeout)
+            result_raw = self._check_response_error(Command.GET_UPDATE, resp)
+
+            if not isinstance(result_raw, dict):
+                raise TypeError(f'Expected "result" to be dict, got {type(result_raw)}')
+
+            result = MapValidator(result_raw)
+            networks = result.get(list, "networks", [])
+
+            network_states: list[NetworkState] = []
+            for n in networks:
+                network = MapValidator(n)
+                state = network.get(str, "state")
+                status = network.get(str, "status", None)
+                if status is None:
+                    status = DisconnectStatus.CONNECTION_STOPPED
+
+                id = MapValidator(network.get(dict, "id"))
+                ssid = id.get(str, "ssid")
+                security_type = id.get(str, "type_")
+
+                network_states.append(
+                    NetworkState(
+                        network_identifier=NetworkIdentifier(
+                            ssid=ssid,
+                            security_type=SecurityType(security_type.lower()),
+                        ),
+                        connection_state=ConnectionState(state),
+                        disconnect_status=DisconnectStatus(status),
+                    )
+                )
+            client_state = result.get(str, "state")
+            return ClientStateSummary(
+                state=WlanClientState(client_state), networks=network_states
+            )
+
+    def remove_all_networks(self) -> None:
+        """Deletes all saved networks on the device.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan_policy.remove_all_networks()
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.REMOVE_ALL_NETWORKS)
+            self._check_response_error(Command.REMOVE_ALL_NETWORKS, resp)
+
+    def remove_network(
+        self,
+        target_ssid: str,
+        security_type: SecurityType,
+        target_pwd: str | None = None,
+    ) -> None:
+        """Removes or "forgets" a network from saved networks.
+
+        Args:
+            target_ssid: The network to remove.
+            security_type: The security protocol of the network.
+            target_pwd: The credential being saved with the network. No password
+                is equivalent to an empty string.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+        """
+        if not target_pwd:
+            target_pwd = ""
+
+        method_params = {
+            "target_ssid": target_ssid,
+            "security_type": str(security_type),
+            "target_pwd": target_pwd,
+        }
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan_policy.remove_network(
+                    target_ssid, security_type, target_pwd
+                )
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.REMOVE_NETWORK, method_params)
+            self._check_response_error(Command.REMOVE_NETWORK, resp)
+
+    def save_network(
+        self,
+        target_ssid: str,
+        security_type: SecurityType | FuchsiaSecurityType,
+        target_pwd: str | None = None,
+    ) -> None:
+        """Saves a network to the device.
+
+        Args:
+            target_ssid: The network to save.
+            security_type: The security protocol of the network.
+            target_pwd: The credential being saved with the network. No password
+                is equivalent to an empty string.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+        """
+        # TODO(b/308807691): Change other uses of FuchsiaSecurityType to Honeydew's
+        # SecurityType
+        hd_security_type = SecurityType(security_type.value)
+        if not target_pwd:
+            target_pwd = ""
+
+        method_params = {
+            "target_ssid": target_ssid,
+            "security_type": str(hd_security_type.value),
+            "target_pwd": target_pwd,
+        }
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan_policy.save_network(
+                    target_ssid, hd_security_type, target_pwd
+                )
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.SAVE_NETWORK, method_params)
+            self._check_response_error(Command.SAVE_NETWORK, resp)
+
+    def scan_for_networks(self) -> list[str]:
+        """Scans for networks.
+
+        Returns:
+            A list of network SSIDs that can be connected to.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+            TypeError: Return value not a list.
+        """
+        if self.honeydew_fd:
+            try:
+                return self.honeydew_fd.wlan_policy.scan_for_network()
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.SCAN_FOR_NETWORKS)
+            result = self._check_response_error(Command.SCAN_FOR_NETWORKS, resp)
+
+            if not isinstance(result, list):
+                raise TypeError(f'Expected "result" to be list, got {type(result)}')
+
+            return result
+
+    def set_new_update_listener(self) -> None:
+        """Sets the update listener stream of the facade to a new stream.
+        This causes updates to be reset. Intended to be used between tests so
+        that the behaviour of updates in a test is independent from previous
+        tests.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan_policy.set_new_update_listener()
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.SET_NEW_UPDATE_LISTENER)
+            self._check_response_error(Command.SET_NEW_UPDATE_LISTENER, resp)
+
+    def start_client_connections(self) -> None:
+        """Enables device to initiate connections to networks.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan_policy.start_client_connections()
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.START_CLIENT_CONNECTIONS)
+            self._check_response_error(Command.START_CLIENT_CONNECTIONS, resp)
+
+    def stop_client_connections(self) -> None:
+        """Disables device for initiating connections to networks.
+
+        Raises:
+            WlanPolicyError: Sl4f run command failed.
+        """
+        if self.honeydew_fd:
+            try:
+                self.honeydew_fd.wlan_policy.stop_client_connections()
+            except errors.Sl4fError as e:
+                raise WlanPolicyError from e
+        else:
+            resp = self.send_command(Command.STOP_CLIENT_CONNECTIONS)
+            self._check_response_error(Command.STOP_CLIENT_CONNECTIONS, resp)
diff --git a/packages/antlion/controllers/iperf_client.py b/packages/antlion/controllers/iperf_client.py
new file mode 100644
index 0000000..4731d14
--- /dev/null
+++ b/packages/antlion/controllers/iperf_client.py
@@ -0,0 +1,335 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+import os
+import socket
+import subprocess
+import threading
+from abc import ABC, abstractmethod
+
+from antlion import context
+from antlion.capabilities.ssh import SSHConfig
+from antlion.controllers.adb_lib.error import AdbCommandError
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.fuchsia_lib.ssh import SSHProvider
+from antlion.libs.proc import job
+from antlion.types import ControllerConfig
+from antlion.validation import MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME = "IPerfClient"
+ACTS_CONTROLLER_REFERENCE_NAME = "iperf_clients"
+
+
+class IPerfError(Exception):
+    """Raised on execution errors of iPerf."""
+
+
+def create(configs: list[ControllerConfig]) -> list[IPerfClientBase]:
+    """Factory method for iperf clients.
+
+    The function creates iperf clients based on at least one config.
+    If configs contain ssh settings or and AndroidDevice, remote iperf clients
+    will be started on those devices, otherwise, a the client will run on the
+    local machine.
+
+    Args:
+        configs: config parameters for the iperf server
+    """
+    results: list[IPerfClientBase] = []
+    for config in configs:
+        c = MapValidator(config)
+        if "ssh_config" in config:
+            results.append(
+                IPerfClientOverSsh(
+                    SSHProvider(SSHConfig.from_config(c.get(dict, "ssh_config"))),
+                    test_interface=c.get(str, "test_interface"),
+                )
+            )
+        else:
+            results.append(IPerfClient())
+    return results
+
+
+def get_info(iperf_clients) -> None:
+    """Placeholder for info about iperf clients
+
+    Returns:
+        None
+    """
+    return None
+
+
+def destroy(_) -> None:
+    # No cleanup needed.
+    pass
+
+
+class RouteNotFound(ConnectionError):
+    """Failed to find a route to the iperf server."""
+
+
+class IPerfClientBase(ABC):
+    """The Base class for all IPerfClients.
+
+    This base class is responsible for synchronizing the logging to prevent
+    multiple IPerfClients from writing results to the same file, as well
+    as providing the interface for IPerfClient objects.
+    """
+
+    # Keeps track of the number of IPerfClient logs to prevent file name
+    # collisions.
+    __log_file_counter = 0
+
+    __log_file_lock = threading.Lock()
+
+    @property
+    @abstractmethod
+    def test_interface(self) -> str | None:
+        """Find the test interface.
+
+        Returns:
+            Name of the interface used to communicate with server_ap, or None if
+            not set.
+        """
+        ...
+
+    @staticmethod
+    def _get_full_file_path(tag: str = "") -> str:
+        """Returns the full file path for the IPerfClient log file.
+
+        Note: If the directory for the file path does not exist, it will be
+        created.
+
+        Args:
+            tag: The tag passed in to the server run.
+        """
+        current_context = context.get_current_context()
+        full_out_dir = os.path.join(
+            current_context.get_full_output_path(), "iperf_client_files"
+        )
+
+        with IPerfClientBase.__log_file_lock:
+            os.makedirs(full_out_dir, exist_ok=True)
+            tags = ["IPerfClient", tag, IPerfClientBase.__log_file_counter]
+            out_file_name = "%s.log" % (
+                ",".join([str(x) for x in tags if x != "" and x is not None])
+            )
+            IPerfClientBase.__log_file_counter += 1
+
+        return os.path.join(full_out_dir, out_file_name)
+
+    def start(
+        self,
+        ip: str,
+        iperf_args: str,
+        tag: str,
+        timeout: int = 3600,
+        iperf_binary: str | None = None,
+    ):
+        """Starts iperf client, and waits for completion.
+
+        Args:
+            ip: iperf server ip address.
+            iperf_args: A string representing arguments to start iperf
+                client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
+            tag: A string to further identify iperf results file
+            timeout: the maximum amount of time the iperf client can run.
+            iperf_binary: Location of iperf3 binary. If none, it is assumed the
+                the binary is in the path.
+
+        Returns:
+            full_out_path: iperf result path.
+        """
+        raise NotImplementedError("start() must be implemented.")
+
+
+class IPerfClient(IPerfClientBase):
+    """Class that handles iperf3 client operations."""
+
+    @property
+    def test_interface(self) -> str | None:
+        return None
+
+    def start(
+        self,
+        ip: str,
+        iperf_args: str,
+        tag: str,
+        timeout: int = 3600,
+        iperf_binary: str | None = None,
+    ):
+        """Starts iperf client, and waits for completion.
+
+        Args:
+            ip: iperf server ip address.
+            iperf_args: A string representing arguments to start iperf
+            client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
+            tag: tag to further identify iperf results file
+            timeout: unused.
+            iperf_binary: Location of iperf3 binary. If none, it is assumed the
+                the binary is in the path.
+
+        Returns:
+            full_out_path: iperf result path.
+        """
+        if not iperf_binary:
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
+        else:
+            logging.debug(f"Using iperf3 binary located at {iperf_binary}")
+        iperf_cmd = [str(iperf_binary), "-c", ip] + iperf_args.split(" ")
+        full_out_path = self._get_full_file_path(tag)
+
+        with open(full_out_path, "w") as out_file:
+            subprocess.call(iperf_cmd, stdout=out_file)
+
+        return full_out_path
+
+
+class IPerfClientOverSsh(IPerfClientBase):
+    """Class that handles iperf3 client operations on remote machines."""
+
+    def __init__(
+        self,
+        ssh_provider: SSHProvider,
+        test_interface: str | None = None,
+    ):
+        self._ssh_provider = ssh_provider
+        self._test_interface = test_interface
+
+    @property
+    def test_interface(self) -> str | None:
+        return self._test_interface
+
+    def start(
+        self,
+        ip: str,
+        iperf_args: str,
+        tag: str,
+        timeout: int = 3600,
+        iperf_binary: str | None = None,
+    ) -> str:
+        """Starts iperf client, and waits for completion.
+
+        Args:
+            ip: iperf server ip address.
+            iperf_args: A string representing arguments to start iperf
+            client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
+            tag: tag to further identify iperf results file
+            timeout: the maximum amount of time to allow the iperf client to run
+            iperf_binary: Location of iperf3 binary. If none, it is assumed the
+                the binary is in the path.
+
+        Returns:
+            full_out_path: iperf result path.
+        """
+        if not iperf_binary:
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
+        else:
+            logging.debug(f"Using iperf3 binary located at {iperf_binary}")
+        iperf_cmd = f"{iperf_binary} -c {ip} {iperf_args}"
+        full_out_path = self._get_full_file_path(tag)
+
+        try:
+            iperf_process = self._ssh_provider.run(iperf_cmd, timeout_sec=timeout)
+            iperf_output = iperf_process.stdout
+            with open(full_out_path, "w") as out_file:
+                out_file.write(iperf_output)
+        except socket.timeout:
+            raise TimeoutError(
+                "Socket timeout. Timed out waiting for iperf " "client to finish."
+            )
+        except Exception as err:
+            logging.exception(f"iperf run failed: {err}")
+
+        return full_out_path
+
+
+class IPerfClientOverAdb(IPerfClientBase):
+    """Class that handles iperf3 operations over ADB devices."""
+
+    def __init__(
+        self, android_device: AndroidDevice, test_interface: str | None = None
+    ):
+        """Creates a new IPerfClientOverAdb object.
+
+        Args:
+            android_device_or_serial: Either an AndroidDevice object, or the
+                serial that corresponds to the AndroidDevice. Note that the
+                serial must be present in an AndroidDevice entry in the ACTS
+                config.
+            test_interface: The network interface that will be used to send
+                traffic to the iperf server.
+        """
+        self._android_device = android_device
+        self._test_interface = test_interface
+
+    @property
+    def test_interface(self) -> str | None:
+        return self._test_interface
+
+    def start(
+        self,
+        ip: str,
+        iperf_args: str,
+        tag: str,
+        timeout: int = 3600,
+        iperf_binary: str | None = None,
+    ) -> str:
+        """Starts iperf client, and waits for completion.
+
+        Args:
+            ip: iperf server ip address.
+            iperf_args: A string representing arguments to start iperf
+            client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
+            tag: tag to further identify iperf results file
+            timeout: the maximum amount of time to allow the iperf client to run
+            iperf_binary: Location of iperf3 binary. If none, it is assumed the
+                the binary is in the path.
+
+        Returns:
+            The iperf result file path.
+        """
+        clean_out = ""
+        try:
+            if not iperf_binary:
+                logging.debug(
+                    "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+                )
+                iperf_binary = "iperf3"
+            else:
+                logging.debug(f"Using iperf3 binary located at {iperf_binary}")
+            iperf_cmd = f"{iperf_binary} -c {ip} {iperf_args}"
+            out = self._android_device.adb.shell(str(iperf_cmd), timeout=timeout)
+            clean_out = out.split("\n")
+            if "error" in clean_out[0].lower():
+                raise IPerfError(clean_out)
+        except (job.TimeoutError, AdbCommandError):
+            logging.warning("TimeoutError: Iperf measurement failed.")
+
+        full_out_path = self._get_full_file_path(tag)
+        with open(full_out_path, "w") as out_file:
+            out_file.write("\n".join(clean_out))
+
+        return full_out_path
diff --git a/packages/antlion/controllers/iperf_server.py b/packages/antlion/controllers/iperf_server.py
new file mode 100755
index 0000000..1cbebf1
--- /dev/null
+++ b/packages/antlion/controllers/iperf_server.py
@@ -0,0 +1,709 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import json
+import logging
+import math
+import os
+import shlex
+import subprocess
+import threading
+import time
+
+from mobly import logger
+
+from antlion import context, utils
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.utils_lib.commands.command import require
+from antlion.controllers.utils_lib.commands.journalctl import LinuxJournalctlCommand
+from antlion.controllers.utils_lib.ssh import connection, settings
+from antlion.libs.proc import job
+from antlion.validation import MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME = "IPerfServer"
+ACTS_CONTROLLER_REFERENCE_NAME = "iperf_servers"
+KILOBITS = 1024
+MEGABITS = KILOBITS * 1024
+GIGABITS = MEGABITS * 1024
+BITS_IN_BYTE = 8
+
+
+def create(
+    configs: list[object],
+) -> list[IPerfServer | IPerfServerOverAdb | IPerfServerOverSsh]:
+    """Factory method for iperf servers.
+
+    The function creates iperf servers based on at least one config.
+    If configs only specify a port number, a regular local IPerfServer object
+    will be created. If configs contains ssh settings or and AndroidDevice,
+    remote iperf servers will be started on those devices
+
+    Args:
+        configs: config parameters for the iperf server
+    """
+    results: list[IPerfServer | IPerfServerOverAdb | IPerfServerOverSsh] = []
+    for c in configs:
+        if isinstance(c, (str, int)) and str(c).isdigit():
+            results.append(IPerfServer(int(c)))
+        elif isinstance(c, dict) and "AndroidDevice" in c and "port" in c:
+            results.append(IPerfServerOverAdb(c["AndroidDevice"], c["port"]))
+        elif isinstance(c, dict) and "ssh_config" in c and "port" in c:
+            config = MapValidator(c)
+            results.append(
+                IPerfServerOverSsh(
+                    settings.from_config(c["ssh_config"]),
+                    c["port"],
+                    test_interface=config.get(str, "test_interface", None),
+                    use_killall=config.get(bool, "use_killall", False),
+                )
+            )
+        else:
+            raise ValueError(
+                f"Config entry {c} in {configs} is not a valid IPerfServer config."
+            )
+    return results
+
+
+def get_info(iperf_servers):
+    """Placeholder for info about iperf servers
+
+    Returns:
+        None
+    """
+    return None
+
+
+def destroy(iperf_server_list):
+    for iperf_server in iperf_server_list:
+        try:
+            iperf_server.stop()
+        except Exception:
+            logging.exception(f"Unable to properly clean up {iperf_server}.")
+
+
+class IPerfResult(object):
+    def __init__(self, result_path, reporting_speed_units="Mbytes"):
+        """Loads iperf result from file.
+
+        Loads iperf result from JSON formatted server log. File can be accessed
+        before or after server is stopped. Note that only the first JSON object
+        will be loaded and this funtion is not intended to be used with files
+        containing multiple iperf client runs.
+        """
+        # if result_path isn't a path, treat it as JSON
+        self.reporting_speed_units = reporting_speed_units
+        if not os.path.exists(result_path):
+            self.result = json.loads(result_path)
+        else:
+            try:
+                with open(result_path, "r") as f:
+                    iperf_output = f.readlines()
+                    if "}\n" in iperf_output:
+                        iperf_output = iperf_output[: iperf_output.index("}\n") + 1]
+                    iperf_string = "".join(iperf_output)
+                    iperf_string = iperf_string.replace("nan", "0")
+                    self.result = json.loads(iperf_string)
+            except ValueError:
+                with open(result_path, "r") as f:
+                    # Possibly a result from interrupted iperf run,
+                    # skip first line and try again.
+                    lines = f.readlines()[1:]
+                    self.result = json.loads("".join(lines))
+
+    def _has_data(self):
+        """Checks if the iperf result has valid throughput data.
+
+        Returns:
+            True if the result contains throughput data. False otherwise.
+        """
+        return ("end" in self.result) and (
+            "sum_received" in self.result["end"] or "sum" in self.result["end"]
+        )
+
+    def _get_reporting_speed(
+        self, network_speed_in_bits_per_second: int | float
+    ) -> float:
+        """Sets the units for the network speed reporting based on how the
+        object was initiated.  Defaults to Megabytes per second.  Currently
+        supported, bits per second (bits), kilobits per second (kbits), megabits
+        per second (mbits), gigabits per second (gbits), bytes per second
+        (bytes), kilobits per second (kbytes), megabits per second (mbytes),
+        gigabytes per second (gbytes).
+
+        Args:
+            network_speed_in_bits_per_second: The network speed from iperf in
+                bits per second.
+
+        Returns:
+            The value of the throughput in the appropriate units.
+        """
+        speed_divisor = 1
+        if self.reporting_speed_units[1:].lower() == "bytes":
+            speed_divisor = speed_divisor * BITS_IN_BYTE
+        if self.reporting_speed_units[0:1].lower() == "k":
+            speed_divisor = speed_divisor * KILOBITS
+        if self.reporting_speed_units[0:1].lower() == "m":
+            speed_divisor = speed_divisor * MEGABITS
+        if self.reporting_speed_units[0:1].lower() == "g":
+            speed_divisor = speed_divisor * GIGABITS
+        return network_speed_in_bits_per_second / speed_divisor
+
+    def get_json(self):
+        """Returns the raw json output from iPerf."""
+        return self.result
+
+    @property
+    def error(self):
+        return self.result.get("error", None)
+
+    @property
+    def avg_rate(self):
+        """Average UDP rate in MB/s over the entire run.
+
+        This is the average UDP rate observed at the terminal the iperf result
+        is pulled from. According to iperf3 documentation this is calculated
+        based on bytes sent and thus is not a good representation of the
+        quality of the link. If the result is not from a success run, this
+        property is None.
+        """
+        if not self._has_data() or "sum" not in self.result["end"]:
+            return None
+        bps = self.result["end"]["sum"]["bits_per_second"]
+        return self._get_reporting_speed(bps)
+
+    @property
+    def avg_receive_rate(self):
+        """Average receiving rate in MB/s over the entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None.
+        """
+        if not self._has_data() or "sum_received" not in self.result["end"]:
+            return None
+        bps = self.result["end"]["sum_received"]["bits_per_second"]
+        return self._get_reporting_speed(bps)
+
+    @property
+    def avg_send_rate(self):
+        """Average sending rate in MB/s over the entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None.
+        """
+        if not self._has_data() or "sum_sent" not in self.result["end"]:
+            return None
+        bps = self.result["end"]["sum_sent"]["bits_per_second"]
+        return self._get_reporting_speed(bps)
+
+    @property
+    def instantaneous_rates(self):
+        """Instantaneous received rate in MB/s over entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None.
+        """
+        if not self._has_data():
+            return None
+        intervals = [
+            self._get_reporting_speed(interval["sum"]["bits_per_second"])
+            for interval in self.result["intervals"]
+        ]
+        return intervals
+
+    @property
+    def std_deviation(self):
+        """Standard deviation of rates in MB/s over entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None.
+        """
+        return self.get_std_deviation(0)
+
+    def get_std_deviation(self, iperf_ignored_interval):
+        """Standard deviation of rates in MB/s over entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None. A configurable number of
+        beginning (and the single last) intervals are ignored in the
+        calculation as they are inaccurate (e.g. the last is from a very small
+        interval)
+
+        Args:
+            iperf_ignored_interval: number of iperf interval to ignored in
+            calculating standard deviation
+
+        Returns:
+            The standard deviation.
+        """
+        if not self._has_data():
+            return None
+        instantaneous_rates = self.instantaneous_rates[iperf_ignored_interval:-1]
+        avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates)
+        sqd_deviations = [(rate - avg_rate) ** 2 for rate in instantaneous_rates]
+        std_dev = math.sqrt(math.fsum(sqd_deviations) / (len(sqd_deviations) - 1))
+        return std_dev
+
+
+class IPerfServerBase(object):
+    # Keeps track of the number of IPerfServer logs to prevent file name
+    # collisions.
+    __log_file_counter = 0
+
+    __log_file_lock = threading.Lock()
+
+    def __init__(self, port: int):
+        self._port = port
+        # TODO(markdr): We shouldn't be storing the log files in an array like
+        # this. Nobody should be reading this property either. Instead, the
+        # IPerfResult should be returned in stop() with all the necessary info.
+        # See aosp/1012824 for a WIP implementation.
+        self.log_files = []
+
+    @property
+    def port(self) -> int:
+        raise NotImplementedError("port must be specified.")
+
+    @property
+    def started(self) -> bool:
+        raise NotImplementedError("started must be specified.")
+
+    def start(self, extra_args: str = "", tag: str = "") -> None:
+        """Starts an iperf3 server.
+
+        Args:
+            extra_args: Extra arguments to start iperf server with.
+            tag: Appended to log file name to identify logs from different
+                iperf runs.
+        """
+        raise NotImplementedError("start() must be specified.")
+
+    def stop(self) -> str:
+        """Stops the iperf server.
+
+        Returns:
+            The name of the log file generated from the terminated session.
+        """
+        raise NotImplementedError("stop() must be specified.")
+
+    def _get_full_file_path(self, tag: str | None = None) -> str:
+        """Returns the full file path for the IPerfServer log file.
+
+        Note: If the directory for the file path does not exist, it will be
+        created.
+
+        Args:
+            tag: The tag passed in to the server run.
+        """
+        out_dir = self.log_path
+
+        with IPerfServerBase.__log_file_lock:
+            tags = [tag, IPerfServerBase.__log_file_counter]
+            out_file_name = "IPerfServer,%s.log" % (
+                ",".join([str(x) for x in tags if x != "" and x is not None])
+            )
+            IPerfServerBase.__log_file_counter += 1
+
+        file_path = os.path.join(out_dir, out_file_name)
+        self.log_files.append(file_path)
+        return file_path
+
+    @property
+    def log_path(self) -> str:
+        current_context = context.get_current_context()
+        full_out_dir = os.path.join(
+            current_context.get_full_output_path(), f"IPerfServer{self.port}"
+        )
+
+        # Ensure the directory exists.
+        os.makedirs(full_out_dir, exist_ok=True)
+
+        return full_out_dir
+
+
+def _get_port_from_ss_output(ss_output, pid):
+    pid = str(pid)
+    lines = ss_output.split("\n")
+    for line in lines:
+        if pid in line:
+            # Expected format:
+            # tcp LISTEN  0 5 *:<PORT>  *:* users:(("cmd",pid=<PID>,fd=3))
+            return line.split()[4].split(":")[-1]
+    else:
+        raise ProcessLookupError("Could not find started iperf3 process.")
+
+
+class IPerfServer(IPerfServerBase):
+    """Class that handles iperf server commands on localhost."""
+
+    def __init__(self, port: int = 5201) -> None:
+        super().__init__(port)
+        self._hinted_port = port
+        self._current_log_file = None
+        self._iperf_process = None
+        self._last_opened_file = None
+
+    @property
+    def port(self) -> int:
+        return self._port
+
+    @property
+    def started(self) -> bool:
+        return self._iperf_process is not None
+
+    def start(self, extra_args: str = "", tag: str = "") -> None:
+        """Starts iperf server on local machine.
+
+        Args:
+            extra_args: A string representing extra arguments to start iperf
+                server with.
+            tag: Appended to log file name to identify logs from different
+                iperf runs.
+        """
+        if self._iperf_process is not None:
+            return
+
+        self._current_log_file = self._get_full_file_path(tag)
+
+        # Run an iperf3 server on the hinted port with JSON output.
+        command = ["iperf3", "-s", "-p", str(self._hinted_port), "-J"]
+
+        command.extend(shlex.split(extra_args))
+
+        if self._last_opened_file:
+            self._last_opened_file.close()
+        self._last_opened_file = open(self._current_log_file, "w")
+        self._iperf_process = subprocess.Popen(
+            command, stdout=self._last_opened_file, stderr=subprocess.DEVNULL
+        )
+        for attempts_left in reversed(range(3)):
+            try:
+                self._port = int(
+                    _get_port_from_ss_output(
+                        job.run("ss -l -p -n | grep iperf").stdout,
+                        self._iperf_process.pid,
+                    )
+                )
+                break
+            except ProcessLookupError:
+                if attempts_left == 0:
+                    raise
+                logging.debug("iperf3 process not started yet.")
+                time.sleep(0.01)
+
+    def stop(self) -> None:
+        """Stops the iperf server.
+
+        Returns:
+            The name of the log file generated from the terminated session.
+        """
+        if self._iperf_process is None:
+            return
+
+        if self._last_opened_file:
+            self._last_opened_file.close()
+            self._last_opened_file = None
+
+        self._iperf_process.terminate()
+        self._iperf_process = None
+
+        return self._current_log_file
+
+    def __del__(self) -> None:
+        self.stop()
+
+
+class IPerfServerOverSsh(IPerfServerBase):
+    """Class that handles iperf3 operations on remote machines."""
+
+    def __init__(
+        self,
+        ssh_settings: settings.SshSettings,
+        port: int,
+        test_interface: str | None = None,
+        use_killall: bool = False,
+    ):
+        super().__init__(port)
+        self.hostname = ssh_settings.hostname
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[IPerfServer | {self.hostname}]",
+            },
+        )
+        self._ssh_settings = ssh_settings
+        self._ssh_session: connection.SshConnection | None = connection.SshConnection(
+            ssh_settings
+        )
+        self._journalctl = require(LinuxJournalctlCommand(self._ssh_session))
+
+        self._iperf_pid = None
+        self._current_tag = None
+        self._use_killall = str(use_killall).lower() == "true"
+
+        if test_interface:
+            self.test_interface = test_interface
+        else:
+            # A test interface can only be found if an ip address is specified.
+            # A fully qualified hostname will get_interface_based_on_ip to fail,
+            # raising RuntimeError.
+            self.test_interface = utils.get_interface_based_on_ip(
+                self._ssh_session, self.hostname
+            )
+
+    @property
+    def port(self) -> int:
+        return self._port
+
+    @property
+    def started(self) -> bool:
+        return self._iperf_pid is not None
+
+    def _get_remote_log_path(self) -> str:
+        return f"/tmp/iperf_server_port{self.port}.log"
+
+    def get_interface_ip_addresses(self, interface: str) -> dict[str, list[str]]:
+        """Gets all of the ip addresses, ipv4 and ipv6, associated with a
+           particular interface name.
+
+        Args:
+            interface: The interface name on the device, ie eth0
+
+        Returns:
+            A list of dictionaries of the various IP addresses. See
+            utils.get_interface_ip_addresses.
+        """
+        if not self._ssh_session:
+            self._start_ssh()
+
+        return utils.get_interface_ip_addresses(self._ssh_session, interface)
+
+    def renew_test_interface_ip_address(self) -> None:
+        """Renews the test interface's IPv4 address.
+
+        Necessary for changing DHCP scopes during a test.
+        """
+        if not self._ssh_session:
+            self._start_ssh()
+        utils.renew_linux_ip_address(self._ssh_session, self.test_interface)
+
+    def get_addr(
+        self, addr_type: str = "ipv4_private", timeout_sec: int | None = None
+    ) -> str:
+        """Wait until a type of IP address on the test interface is available
+        then return it.
+        """
+        if not self._ssh_session:
+            self._start_ssh()
+        return utils.get_addr(
+            self._ssh_session, self.test_interface, addr_type, timeout_sec
+        )
+
+    def _cleanup_iperf_port(self) -> None:
+        """Checks and kills zombie iperf servers occupying intended port."""
+        iperf_check_cmd = (
+            "netstat -tulpn | grep LISTEN | grep iperf3" " | grep :{}"
+        ).format(self.port)
+        iperf_check = self._ssh_session.run(iperf_check_cmd, ignore_status=True)
+        iperf_check = iperf_check.stdout
+        if iperf_check:
+            logging.debug(f"Killing zombie server on port {self.port}")
+            iperf_pid = iperf_check.split(" ")[-1].split("/")[0]
+            self._ssh_session.run(f"kill -9 {str(iperf_pid)}")
+
+    def start(
+        self,
+        extra_args: str = "",
+        tag: str = "",
+        iperf_binary: str | None = None,
+    ) -> None:
+        """Starts iperf server on specified machine and port.
+
+        Args:
+            extra_args: Extra arguments to start iperf server with.
+            tag: Appended to log file name to identify logs from different
+                iperf runs.
+            iperf_binary: Location of iperf3 binary. If none, it is assumed the
+                the binary is in the path.
+        """
+        if self.started:
+            return
+
+        if not self._ssh_session:
+            self._start_ssh()
+        self._cleanup_iperf_port()
+        if not iperf_binary:
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
+        else:
+            logging.debug(f"Using iperf3 binary located at {iperf_binary}")
+        iperf_command = f"{iperf_binary} -s -J -p {self.port}"
+
+        cmd = f"{iperf_command} {extra_args} > {self._get_remote_log_path()}"
+
+        job_result = self._ssh_session.run_async(cmd)
+        self._iperf_pid = job_result.stdout
+        self._current_tag = tag
+
+    def stop(self) -> str:
+        """Stops the iperf server.
+
+        Returns:
+            The name of the log file generated from the terminated session.
+        """
+        if not self.started:
+            return
+
+        if self._use_killall:
+            self._ssh_session.run("killall iperf3", ignore_status=True)
+        else:
+            self._ssh_session.run_async(f"kill -9 {str(self._iperf_pid)}")
+
+        iperf_result = self._ssh_session.run(f"cat {self._get_remote_log_path()}")
+
+        log_file = self._get_full_file_path(self._current_tag)
+        with open(log_file, "w") as f:
+            f.write(iperf_result.stdout)
+
+        self._ssh_session.run_async(f"rm {self._get_remote_log_path()}")
+        self._iperf_pid = None
+        return log_file
+
+    def _start_ssh(self) -> None:
+        """Starts an ssh session to the iperf server."""
+        if not self._ssh_session:
+            self._ssh_session = connection.SshConnection(self._ssh_settings)
+            self._journalctl.set_runner(self._ssh_session)
+
+    def close_ssh(self) -> None:
+        """Closes the ssh session to the iperf server, if one exists, preventing
+        connection reset errors when rebooting server device.
+        """
+        if self.started:
+            self.stop()
+        if self._ssh_session:
+            self._ssh_session.close()
+            self._ssh_session = None
+
+    def get_systemd_journal(self) -> str:
+        if self._ssh_session:
+            return self._journalctl.logs()
+
+        self._start_ssh()
+        logs = self._journalctl.logs()
+        self.close_ssh()  # return to closed state
+        return logs
+
+
+class IPerfServerOverAdb(IPerfServerBase):
+    """Class that handles iperf3 operations over ADB devices."""
+
+    def __init__(self, android_device: AndroidDevice, port: int):
+        """Creates a new IPerfServerOverAdb object.
+
+        Args:
+            android_device: Android device to run iperf server.
+            port: The port number to open the iperf server on.
+        """
+        super().__init__(port)
+        self._android_device = android_device
+        self._port = port
+
+        self._iperf_process = None
+        self._current_tag = ""
+
+    @property
+    def port(self) -> int:
+        return self._port
+
+    @property
+    def started(self) -> bool:
+        return self._iperf_process is not None
+
+    def _get_device_log_path(self) -> str:
+        return f"~/data/iperf_server_port{self.port}.log"
+
+    def start(
+        self, extra_args: str = "", tag: str = "", iperf_binary: str | None = None
+    ) -> None:
+        """Starts iperf server on an ADB device.
+
+        Args:
+            extra_args: A string representing extra arguments to start iperf
+                server with.
+            tag: Appended to log file name to identify logs from different
+                iperf runs.
+            iperf_binary: Location of iperf3 binary. If none, it is assumed the
+                the binary is in the path.
+        """
+        if self._iperf_process is not None:
+            return
+
+        if not iperf_binary:
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
+        else:
+            logging.debug(f"Using iperf3 binary located at {iperf_binary}")
+        iperf_command = f"{iperf_binary} -s -J -p {self.port}"
+
+        self._iperf_process = self._android_device.adb.shell_nb(
+            f"{iperf_command} {extra_args} > {self._get_device_log_path()} 2>&1"
+        )
+
+        self._iperf_process_adb_pid = ""
+        while len(self._iperf_process_adb_pid) == 0:
+            self._iperf_process_adb_pid = self._android_device.adb.shell(
+                "pgrep iperf3 -n"
+            )
+
+        self._current_tag = tag
+
+    def stop(self) -> str:
+        """Stops the iperf server.
+
+        Returns:
+            The name of the log file generated from the terminated session.
+        """
+        if self._iperf_process is None:
+            return
+
+        job.run(f"kill -9 {self._iperf_process.pid}")
+
+        # TODO(markdr): update with definitive kill method
+        while True:
+            iperf_process_list = self._android_device.adb.shell("pgrep iperf3")
+            if iperf_process_list.find(self._iperf_process_adb_pid) == -1:
+                break
+            else:
+                self._android_device.adb.shell(f"kill -9 {self._iperf_process_adb_pid}")
+
+        iperf_result = self._android_device.adb.shell(
+            f"cat {self._get_device_log_path()}"
+        )
+
+        log_file = self._get_full_file_path(self._current_tag)
+        with open(log_file, "w", encoding="utf-8") as f:
+            f.write(iperf_result)
+
+        self._android_device.adb.shell(f"rm {self._get_device_log_path()}")
+
+        self._iperf_process = None
+        return log_file
diff --git a/packages/antlion/controllers/openwrt_ap.py b/packages/antlion/controllers/openwrt_ap.py
new file mode 100644
index 0000000..4ca0129
--- /dev/null
+++ b/packages/antlion/controllers/openwrt_ap.py
@@ -0,0 +1,499 @@
+"""Controller for Open WRT access point."""
+
+import logging
+import random
+import re
+import time
+from typing import Literal
+
+import yaml
+from mobly import logger, signals
+
+from antlion.controllers.openwrt_lib import (
+    network_settings,
+    wireless_config,
+    wireless_settings_applier,
+)
+from antlion.controllers.openwrt_lib.openwrt_constants import SYSTEM_INFO_CMD
+from antlion.controllers.openwrt_lib.openwrt_constants import (
+    OpenWrtModelMap as modelmap,
+)
+from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtWifiSetting
+from antlion.controllers.utils_lib.ssh import connection, settings
+
+MOBLY_CONTROLLER_CONFIG_NAME = "OpenWrtAP"
+ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
+OWE_SECURITY = "owe"
+SAE_SECURITY = "sae"
+SAEMIXED_SECURITY = "sae-mixed"
+ENABLE_RADIO = "0"
+PMF_ENABLED = 2
+WAIT_TIME = 20
+DEFAULT_RADIOS = ("radio0", "radio1")
+
+
+def create(configs):
+    """Creates ap controllers from a json config.
+
+    Creates an ap controller from either a list, or a single element. The element
+    can either be just the hostname or a dictionary containing the hostname and
+    username of the AP to connect to over SSH.
+
+    Args:
+      configs: The json configs that represent this controller.
+
+    Returns:
+      AccessPoint object
+
+    Example:
+      Below is the config file entry for OpenWrtAP as a list. A testbed can have
+      1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
+      login information. OpenWrtAP#__init__() uses this to create SSH object.
+
+        "OpenWrtAP": [
+          {
+            "ssh_config": {
+              "user" : "root",
+              "host" : "192.168.1.1"
+            }
+          },
+          {
+            "ssh_config": {
+              "user" : "root",
+              "host" : "192.168.1.2"
+            }
+          }
+        ]
+    """
+    return [OpenWrtAP(c) for c in configs]
+
+
+def destroy(aps):
+    """Destroys a list of AccessPoints.
+
+    Args:
+      aps: The list of AccessPoints to destroy.
+    """
+    for ap in aps:
+        ap.close()
+        ap.close_ssh()
+
+
+def get_info(aps):
+    """Get information on a list of access points.
+
+    Args:
+      aps: A list of AccessPoints.
+
+    Returns:
+      A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in aps]
+
+
+BSSIDMap = dict[Literal["2g", "5g"], dict[str, str]]
+
+
+class OpenWrtAP(object):
+    """An AccessPoint controller.
+
+    Attributes:
+      ssh: The ssh connection to the AP.
+      ssh_settings: The ssh settings being used by the ssh connection.
+      log: Logging object for AccessPoint.
+      wireless_setting: object holding wireless configuration.
+      network_setting: Object for network configuration.
+      model: OpenWrt HW model.
+      radios: Fit interface for test.
+    """
+
+    def __init__(self, config):
+        """Initialize AP."""
+        self.ssh_settings = settings.from_config(config["ssh_config"])
+        self.ssh = connection.SshConnection(self.ssh_settings)
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[OpenWrtAP|{self.ssh_settings.hostname}]",
+            },
+        )
+        self.wireless_setting: (
+            wireless_settings_applier.WirelessSettingsApplier | None
+        ) = None
+        self.network_setting = network_settings.NetworkSettings(
+            self.ssh, self.ssh_settings, self.log
+        )
+        self.model = self.get_model_name()
+        if self.model in modelmap.__dict__:
+            self.radios = modelmap.__dict__[self.model]
+        else:
+            self.radios = DEFAULT_RADIOS
+
+    def configure_ap(
+        self,
+        wireless_configs: list[wireless_config.WirelessConfig],
+        channel_2g: int,
+        channel_5g: int,
+    ):
+        """Configure AP with the required settings.
+
+        Each test class inherits WifiBaseTest. Based on the test, we may need to
+        configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
+        combination. We call WifiBaseTest methods get_psk_network(),
+        get_open_network(), get_wep_network() and get_ent_network() to create
+        dictionaries which contains this information. 'wifi_configs' is a list of
+        such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
+        1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
+        configure the APs.
+
+        wifi_configs = [
+          {
+            '2g': {
+              'SSID': '2g_AkqXWPK4',
+              'security': 'psk2',
+              'password': 'YgYuXqDO9H',
+              'hiddenSSID': False
+            },
+          },
+          {
+            '5g': {
+              'SSID': '5g_8IcMR1Sg',
+              'security': 'none',
+              'hiddenSSID': False
+            },
+          }
+        ]
+
+        Args:
+          wifi_configs: list of network settings for 2G and 5G bands.
+          channel_2g: channel for 2G band.
+          channel_5g: channel for 5G band.
+        """
+        self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier(
+            self.ssh,
+            wireless_configs,
+            channel_2g,
+            channel_5g,
+            self.radios[1],
+            self.radios[0],
+        )
+        self.wireless_setting.apply_wireless_settings()
+
+    def start_ap(self):
+        """Starts the AP with the settings in /etc/config/wireless."""
+        self.ssh.run("wifi up")
+        curr_time = time.time()
+        while time.time() < curr_time + WAIT_TIME:
+            if self.get_wifi_status():
+                return
+            time.sleep(3)
+        if not self.get_wifi_status():
+            raise ValueError("Failed to turn on WiFi on the AP.")
+
+    def stop_ap(self):
+        """Stops the AP."""
+        self.ssh.run("wifi down")
+        curr_time = time.time()
+        while time.time() < curr_time + WAIT_TIME:
+            if not self.get_wifi_status():
+                return
+            time.sleep(3)
+        if self.get_wifi_status():
+            raise ValueError("Failed to turn off WiFi on the AP.")
+
+    def get_bssids_for_wifi_networks(self) -> BSSIDMap:
+        """Get BSSIDs for wifi networks configured.
+
+        Returns:
+          Dictionary of SSID - BSSID map for both bands.
+        """
+        bssid_map: BSSIDMap = {"2g": {}, "5g": {}}
+        for radio in self.radios:
+            ssid_ifname_map = self.get_ifnames_for_ssids(radio)
+            if radio == self.radios[0]:
+                for ssid, ifname in ssid_ifname_map.items():
+                    bssid_map["5g"][ssid] = self.get_bssid(ifname)
+            elif radio == self.radios[1]:
+                for ssid, ifname in ssid_ifname_map.items():
+                    bssid_map["2g"][ssid] = self.get_bssid(ifname)
+        return bssid_map
+
+    def get_ifnames_for_ssids(self, radio) -> dict[str, str]:
+        """Get interfaces for wifi networks.
+
+        Args:
+          radio: 2g or 5g radio get the bssids from.
+
+        Returns:
+          dictionary of ssid - ifname mappings.
+        """
+        ssid_ifname_map: dict[str, str] = {}
+        str_output = self.ssh.run(f"wifi status {radio}").stdout
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+        wifi_status = wifi_status[radio]
+        if wifi_status["up"]:
+            interfaces = wifi_status["interfaces"]
+            for config in interfaces:
+                ssid = config["config"]["ssid"]
+                ifname = config["ifname"]
+                ssid_ifname_map[ssid] = ifname
+        return ssid_ifname_map
+
+    def get_bssid(self, ifname):
+        """Get MAC address from an interface.
+
+        Args:
+          ifname: interface name of the corresponding MAC.
+
+        Returns:
+          BSSID of the interface.
+        """
+        ifconfig = self.ssh.run(f"ifconfig {ifname}").stdout
+        mac_addr = ifconfig.split("\n")[0].split()[-1]
+        return mac_addr
+
+    def set_wpa_encryption(self, encryption):
+        """Set different encryptions to wpa or wpa2.
+
+        Args:
+          encryption: ccmp, tkip, or ccmp+tkip.
+        """
+        str_output = self.ssh.run("wifi status").stdout
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+
+        # Counting how many interface are enabled.
+        total_interface = 0
+        for radio in self.radios:
+            num_interface = len(wifi_status[radio]["interfaces"])
+            total_interface += num_interface
+
+        # Iterates every interface to get and set wpa encryption.
+        default_extra_interface = 2
+        for i in range(total_interface + default_extra_interface):
+            origin_encryption = self.ssh.run(
+                f"uci get wireless.@wifi-iface[{i}].encryption"
+            ).stdout
+            origin_psk_pattern = re.match(r"psk\b", origin_encryption)
+            target_psk_pattern = re.match(r"psk\b", encryption)
+            origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
+            target_psk2_pattern = re.match(r"psk2\b", encryption)
+
+            if origin_psk_pattern == target_psk_pattern:
+                self.ssh.run(
+                    f"uci set wireless.@wifi-iface[{i}].encryption={encryption}"
+                )
+
+            if origin_psk2_pattern == target_psk2_pattern:
+                self.ssh.run(
+                    f"uci set wireless.@wifi-iface[{i}].encryption={encryption}"
+                )
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def set_password(self, pwd_5g=None, pwd_2g=None):
+        """Set password for individual interface.
+
+        Args:
+            pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
+            pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
+        """
+        if pwd_5g:
+            if len(pwd_5g) < 8 or len(pwd_5g) > 63:
+                self.log.error("Password must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
+                self.log.error("Password must only contains ascii letters and digits")
+            else:
+                self.ssh.run(f"uci set wireless.@wifi-iface[{3}].key={pwd_5g}")
+                self.log.info(f"Set 5G password to :{pwd_5g}")
+
+        if pwd_2g:
+            if len(pwd_2g) < 8 or len(pwd_2g) > 63:
+                self.log.error("Password must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
+                self.log.error("Password must only contains ascii letters and digits")
+            else:
+                self.ssh.run(f"uci set wireless.@wifi-iface[{2}].key={pwd_2g}")
+                self.log.info(f"Set 2G password to :{pwd_2g}")
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def set_ssid(self, ssid_5g=None, ssid_2g=None):
+        """Set SSID for individual interface.
+
+        Args:
+            ssid_5g: 8 ~ 63 chars for 5g network.
+            ssid_2g: 8 ~ 63 chars for 2g network.
+        """
+        if ssid_5g:
+            if len(ssid_5g) < 8 or len(ssid_5g) > 63:
+                self.log.error("SSID must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            else:
+                self.ssh.run(f"uci set wireless.@wifi-iface[{3}].ssid={ssid_5g}")
+                self.log.info(f"Set 5G SSID to :{ssid_5g}")
+
+        if ssid_2g:
+            if len(ssid_2g) < 8 or len(ssid_2g) > 63:
+                self.log.error("SSID must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            else:
+                self.ssh.run(f"uci set wireless.@wifi-iface[{2}].ssid={ssid_2g}")
+                self.log.info(f"Set 2G SSID to :{ssid_2g}")
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def generate_mobility_domain(self):
+        """Generate 4-character hexadecimal ID.
+
+        Returns:
+          String; a 4-character hexadecimal ID.
+        """
+        md = f"{random.getrandbits(16):04x}"
+        self.log.info(f"Mobility Domain ID: {md}")
+        return md
+
+    def enable_80211r(self, iface, md):
+        """Enable 802.11r for one single radio.
+
+        Args:
+          iface: index number of wifi-iface.
+                  2: radio1
+                  3: radio0
+          md: mobility domain. a 4-character hexadecimal ID.
+        Raises:
+          TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
+        """
+        str_output = self.ssh.run("wifi status").stdout
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+        # Check if the radio is up.
+        if iface == OpenWrtWifiSetting.IFACE_2G:
+            if wifi_status[self.radios[1]]["up"]:
+                self.log.info("2g network is ENABLED")
+            else:
+                raise signals.TestSkip("2g network is NOT ENABLED")
+        elif iface == OpenWrtWifiSetting.IFACE_5G:
+            if wifi_status[self.radios[0]]["up"]:
+                self.log.info("5g network is ENABLED")
+            else:
+                raise signals.TestSkip("5g network is NOT ENABLED")
+
+        # Setup 802.11r.
+        self.ssh.run(f"uci set wireless.@wifi-iface[{iface}].ieee80211r='1'")
+        self.ssh.run(f"uci set wireless.@wifi-iface[{iface}].ft_psk_generate_local='1'")
+        self.ssh.run(f"uci set wireless.@wifi-iface[{iface}].mobility_domain='{md}'")
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+        # Check if 802.11r is enabled.
+        result = self.ssh.run(
+            f"uci get wireless.@wifi-iface[{iface}].ieee80211r"
+        ).stdout
+        if result == "1":
+            self.log.info("802.11r is ENABLED")
+        else:
+            raise signals.TestSkip("802.11r is NOT ENABLED")
+
+    def get_wifi_network(self, security=None, band=None):
+        """Return first match wifi interface's config.
+
+        Args:
+          security: psk2 or none
+          band: '2g' or '5g'
+
+        Returns:
+          A dict contains match wifi interface's config.
+        """
+        if not self.wireless_setting:
+            raise RuntimeError("The AP has not been configured yet; run configure_ap()")
+
+        for wifi_iface in self.wireless_setting.wireless_configs:
+            match_list = []
+            wifi_network = wifi_iface.__dict__
+            if security:
+                match_list.append(security == wifi_network["security"])
+            if band:
+                match_list.append(band == wifi_network["band"])
+
+            if all(match_list):
+                wifi_network["SSID"] = wifi_network["ssid"]
+                if not wifi_network["password"]:
+                    del wifi_network["password"]
+                return wifi_network
+        return None
+
+    def get_wifi_status(self):
+        """Check if radios are up. Default are 2G and 5G bands.
+
+        Returns:
+          True if both radios are up. False if not.
+        """
+        status = True
+        for radio in self.radios:
+            try:
+                str_output = self.ssh.run(f"wifi status {radio}").stdout
+                wifi_status = yaml.load(
+                    str_output.replace("\t", "").replace("\n", ""),
+                    Loader=yaml.SafeLoader,
+                )
+                status = wifi_status[radio]["up"] and status
+            except:
+                self.log.info("Failed to make ssh connection to the OpenWrt")
+                return False
+        return status
+
+    def verify_wifi_status(self, timeout=20):
+        """Ensure wifi interfaces are ready.
+
+        Args:
+          timeout: An integer that is the number of times to try
+                   wait for interface ready.
+        Returns:
+          True if both radios are up. False if not.
+        """
+        start_time = time.time()
+        end_time = start_time + timeout
+        while time.time() < end_time:
+            if self.get_wifi_status():
+                return True
+            time.sleep(1)
+        return False
+
+    def get_model_name(self):
+        """Get Openwrt model name.
+
+        Returns:
+          A string include device brand and model. e.g. NETGEAR_R8000
+        """
+        out = self.ssh.run(SYSTEM_INFO_CMD).stdout.split("\n")
+        for line in out:
+            if "board_name" in line:
+                model = line.split()[1].strip('",').split(",")
+                return "_".join(map(lambda i: i.upper(), model))
+        self.log.info("Failed to retrieve OpenWrt model information.")
+        return None
+
+    def close(self):
+        """Reset wireless and network settings to default and stop AP."""
+        if self.network_setting.config:
+            self.network_setting.cleanup_network_settings()
+        if self.wireless_setting:
+            self.wireless_setting.cleanup_wireless_settings()
+
+    def close_ssh(self):
+        """Close SSH connection to AP."""
+        self.ssh.close()
+
+    def reboot(self):
+        """Reboot Openwrt."""
+        self.ssh.run("reboot")
diff --git a/src/antlion/controllers/openwrt_lib/OWNERS b/packages/antlion/controllers/openwrt_lib/OWNERS
similarity index 100%
rename from src/antlion/controllers/openwrt_lib/OWNERS
rename to packages/antlion/controllers/openwrt_lib/OWNERS
diff --git a/src/antlion/controllers/openwrt_lib/__init__.py b/packages/antlion/controllers/openwrt_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/openwrt_lib/__init__.py
rename to packages/antlion/controllers/openwrt_lib/__init__.py
diff --git a/packages/antlion/controllers/openwrt_lib/network_const.py b/packages/antlion/controllers/openwrt_lib/network_const.py
new file mode 100644
index 0000000..7375ff7
--- /dev/null
+++ b/packages/antlion/controllers/openwrt_lib/network_const.py
@@ -0,0 +1,289 @@
+LOCALHOST = "192.168.1.1"
+
+# params for ipsec.conf
+IPSEC_CONF = {
+    "config setup": {
+        "charondebug": "chd 2,ike 2,knl 2,net 2,esp 2,dmn 2,"
+        "mgr 2,lib 1,cfg 2,enc 1".__repr__(),
+        "uniqueids": "never",
+    },
+    "conn %default": {"ike": "aes128-sha-modp1024", "esp": "aes128-sha1"},
+}
+
+IPSEC_L2TP_PSK = {
+    "conn L2TP_PSK": {
+        "keyexchange": "ikev1",
+        "type": "transport",
+        "left": LOCALHOST,
+        "leftprotoport": "17/1701",
+        "leftauth": "psk",
+        "right": "%any",
+        "rightprotoport": "17/%any",
+        "rightsubnet": "0.0.0.0/0",
+        "rightauth": "psk",
+        "auto": "add",
+    }
+}
+
+IPSEC_L2TP_RSA = {
+    "conn L2TP_RSA": {
+        "keyexchange": "ikev1",
+        "type": "transport",
+        "left": LOCALHOST,
+        "leftprotoport": "17/1701",
+        "leftauth": "pubkey",
+        "leftcert": "serverCert.der",
+        "right": "%any",
+        "rightprotoport": "17/%any",
+        "rightsubnet": "0.0.0.0/0",
+        "rightauth": "pubkey",
+        "auto": "add",
+    }
+}
+
+IPSEC_HYBRID_RSA = {
+    "conn HYBRID_RSA": {
+        "keyexchange": "ikev1",
+        "left": LOCALHOST,
+        "leftsubnet": "0.0.0.0/0",
+        "leftauth": "pubkey",
+        "leftcert": "serverCert.der",
+        "leftsendcert": "always",
+        "right": "%any",
+        "rightsubnet": "0.0.0.0/0",
+        "rightauth": "pubkey",
+        "rightauth2": "xauth",
+        "xauth": "server",
+        "auto": "add",
+    }
+}
+
+IPSEC_XAUTH_PSK = {
+    "conn XAUTH_PSK": {
+        "keyexchange": "ikev1",
+        "left": LOCALHOST,
+        "leftsubnet": "0.0.0.0/0",
+        "leftauth": "psk",
+        "right": "%any",
+        "rightsubnet": "0.0.0.0/0",
+        "rightauth": "psk",
+        "rightauth2": "xauth",
+        "auto": "add",
+    }
+}
+
+IPSEC_XAUTH_RSA = {
+    "conn XAUTH_RSA": {
+        "keyexchange": "ikev1",
+        "left": LOCALHOST,
+        "leftsubnet": "0.0.0.0/0",
+        "leftcert": "serverCert.der",
+        "leftsendcert": "always",
+        "right": "%any",
+        "rightsubnet": "0.0.0.0/0",
+        "rightauth": "xauth",
+        "xauth": "server",
+        "auto": "add",
+    }
+}
+
+IPSEC_IKEV2_MSCHAPV2 = {
+    "conn IKEV2_MSCHAPV2": {
+        "keyexchange": "ikev2",
+        "left": LOCALHOST,
+        "leftid": LOCALHOST,
+        "leftcert": "serverCert.der",
+        "leftsubnet": "0.0.0.0/0",
+        "leftauth": "pubkey",
+        "leftsendcert": "always",
+        "right": "%any",
+        "rightid": "vpntest",
+        "rightauth": "eap-mschapv2",
+        "auto": "add",
+    }
+}
+
+IPSEC_IKEV2_PSK = {
+    "conn IKEV2_PSK": {
+        "keyexchange": "ikev2",
+        "left": LOCALHOST,
+        "leftid": LOCALHOST,
+        "leftauth": "psk",
+        "leftsubnet": "0.0.0.0/0",
+        "right": "%any",
+        "rightid": "vpntest",
+        "rightauth": "psk",
+        "auto": "add",
+    }
+}
+
+IPSEC_IKEV2_RSA = {
+    "conn IKEV2_RSA": {
+        "keyexchange": "ikev2",
+        "left": LOCALHOST,
+        "leftid": LOCALHOST,
+        "leftcert": "serverCert.der",
+        "leftsubnet": "0.0.0.0/0",
+        "leftauth": "pubkey",
+        "leftsendcert": "always",
+        "right": "%any",
+        "rightid": f"vpntest@{LOCALHOST}",
+        "rightauth": "pubkey",
+        "rightcert": "clientCert.pem",
+        "auto": "add",
+    }
+}
+
+IPSEC_IKEV2_MSCHAPV2_HOSTNAME = {
+    "conn IKEV2_MSCHAPV2_HOSTNAME": {
+        "keyexchange": "ikev2",
+        "left": LOCALHOST,
+        "leftid": "strongswan-vpn-server.android-iperf.com",
+        "leftcert": "serverCert.der",
+        "leftsubnet": "0.0.0.0/0",
+        "leftauth": "pubkey",
+        "leftsendcert": "always",
+        "right": "%any",
+        "rightid": "vpntest",
+        "rightauth": "eap-mschapv2",
+        "auto": "add",
+    }
+}
+
+IPSEC_IKEV2_PSK_HOSTNAME = {
+    "conn IKEV2_PSK_HOSTNAME": {
+        "keyexchange": "ikev2",
+        "left": LOCALHOST,
+        "leftid": "strongswan-vpn-server.android-iperf.com",
+        "leftauth": "psk",
+        "leftsubnet": "0.0.0.0/0",
+        "right": "%any",
+        "rightid": "vpntest",
+        "rightauth": "psk",
+        "auto": "add",
+    }
+}
+
+IPSEC_IKEV2_RSA_HOSTNAME = {
+    "conn IKEV2_RSA_HOSTNAME": {
+        "keyexchange": "ikev2",
+        "left": LOCALHOST,
+        "leftid": "strongswan-vpn-server.android-iperf.com",
+        "leftcert": "serverCert.der",
+        "leftsubnet": "0.0.0.0/0",
+        "leftauth": "pubkey",
+        "leftsendcert": "always",
+        "right": "%any",
+        "rightid": "vpntest@strongswan-vpn-server.android-iperf.com",
+        "rightauth": "pubkey",
+        "rightcert": "clientCert.pem",
+        "auto": "add",
+    }
+}
+
+# parmas for lx2tpd
+
+XL2TPD_CONF_GLOBAL = (
+    "[global]",
+    "ipsec saref = no",
+    "debug tunnel = no",
+    "debug avp = no",
+    "debug network = no",
+    "debug state = no",
+    "access control = no",
+    "rand source = dev",
+    "port = 1701",
+)
+
+XL2TPD_CONF_INS = (
+    "[lns default]",
+    "require authentication = yes",
+    "pass peer = yes",
+    "ppp debug = no",
+    "length bit = yes",
+    "refuse pap = yes",
+    "refuse chap = yes",
+)
+
+XL2TPD_OPTION = (
+    "require-mschap-v2",
+    "refuse-mschap",
+    "ms-dns 8.8.8.8",
+    "ms-dns 8.8.4.4",
+    "asyncmap 0",
+    "auth",
+    "crtscts",
+    "idle 1800",
+    "mtu 1410",
+    "mru 1410",
+    "connect-delay 5000",
+    "lock",
+    "hide-password",
+    "local",
+    "debug",
+    "modem",
+    "proxyarp",
+    "lcp-echo-interval 30",
+    "lcp-echo-failure 4",
+    "nomppe",
+)
+
+# iptable rules for vpn_pptp
+FIREWALL_RULES_FOR_PPTP = (
+    "iptables -A input_rule -i ppp+ -j ACCEPT",
+    "iptables -A output_rule -o ppp+ -j ACCEPT",
+    "iptables -A forwarding_rule -i ppp+ -j ACCEPT",
+)
+
+# iptable rules for vpn_l2tp
+FIREWALL_RULES_FOR_L2TP = (
+    "iptables -I INPUT  -m policy --dir in --pol ipsec --proto esp -j ACCEPT",
+    "iptables -I FORWARD  -m policy --dir in --pol ipsec --proto esp -j ACCEPT",
+    "iptables -I FORWARD  -m policy --dir out --pol ipsec --proto esp -j ACCEPT",
+    "iptables -I OUTPUT   -m policy --dir out --pol ipsec --proto esp -j ACCEPT",
+    "iptables -t nat -I POSTROUTING -m policy --pol ipsec --dir out -j ACCEPT",
+    "iptables -A INPUT -p esp -j ACCEPT",
+    "iptables -A INPUT -i eth0.2 -p udp --dport 500 -j ACCEPT",
+    "iptables -A INPUT -i eth0.2 -p tcp --dport 500 -j ACCEPT",
+    "iptables -A INPUT -i eth0.2 -p udp --dport 4500 -j ACCEPT",
+    "iptables -A INPUT -p udp --dport 500 -j ACCEPT",
+    "iptables -A INPUT -p udp --dport 4500 -j ACCEPT",
+    "iptables -A INPUT -p udp -m policy --dir in --pol ipsec -m udp --dport 1701 -j ACCEPT",
+)
+
+FIREWALL_RULES_DISABLE_DNS_RESPONSE = (
+    "iptables -I OUTPUT -p udp --sport 53 -j DROP",
+    "iptables -I OUTPUT -p tcp --sport 53 -j DROP",
+    "ip6tables -I OUTPUT -p udp --sport 53 -j DROP",
+    "ip6tables -I OUTPUT -p tcp --sport 53 -j DROP",
+)
+
+
+# Object for vpn profile
+class VpnL2tp(object):
+    """Profile for vpn l2tp type.
+
+    Attributes:
+        hostname: vpn server domain name
+        address: vpn server address
+        username: vpn user account
+        password: vpn user password
+        psk_secret: psk for ipsec
+        name: vpn server name for register in OpenWrt
+    """
+
+    def __init__(
+        self,
+        vpn_server_hostname,
+        vpn_server_address,
+        vpn_username,
+        vpn_password,
+        psk_secret,
+        server_name,
+    ):
+        self.name = server_name
+        self.hostname = vpn_server_hostname
+        self.address = vpn_server_address
+        self.username = vpn_username
+        self.password = vpn_password
+        self.psk_secret = psk_secret
diff --git a/packages/antlion/controllers/openwrt_lib/network_settings.py b/packages/antlion/controllers/openwrt_lib/network_settings.py
new file mode 100644
index 0000000..8a8494c
--- /dev/null
+++ b/packages/antlion/controllers/openwrt_lib/network_settings.py
@@ -0,0 +1,1154 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import time
+
+from mobly import signals
+
+from antlion import utils
+from antlion.controllers.openwrt_lib import network_const
+
+SERVICE_DNSMASQ = "dnsmasq"
+SERVICE_STUNNEL = "stunnel"
+SERVICE_NETWORK = "network"
+SERVICE_PPTPD = "pptpd"
+SERVICE_FIREWALL = "firewall"
+SERVICE_IPSEC = "ipsec"
+SERVICE_XL2TPD = "xl2tpd"
+SERVICE_ODHCPD = "odhcpd"
+SERVICE_OPENNDS = "opennds"
+SERVICE_UHTTPD = "uhttpd"
+PPTP_PACKAGE = "pptpd kmod-nf-nathelper-extra"
+L2TP_PACKAGE = "strongswan-full openssl-util xl2tpd"
+NAT6_PACKAGE = "ip6tables kmod-ipt-nat6"
+CAPTIVE_PORTAL_PACKAGE = "opennds php7-cli php7-mod-openssl php7-cgi php7"
+MDNS_PACKAGE = "avahi-utils avahi-daemon-service-http avahi-daemon-service-ssh libavahi-client avahi-dbus-daemon"
+STUNNEL_CONFIG_PATH = "/etc/stunnel/DoTServer.conf"
+HISTORY_CONFIG_PATH = "/etc/dirty_configs"
+PPTPD_OPTION_PATH = "/etc/ppp/options.pptpd"
+XL2TPD_CONFIG_PATH = "/etc/xl2tpd/xl2tpd.conf"
+XL2TPD_OPTION_CONFIG_PATH = "/etc/ppp/options.xl2tpd"
+FIREWALL_CUSTOM_OPTION_PATH = "/etc/firewall.user"
+PPP_CHAP_SECRET_PATH = "/etc/ppp/chap-secrets"
+IKEV2_VPN_CERT_KEYS_PATH = "/var/ikev2_cert.sh"
+TCPDUMP_DIR = "/tmp/tcpdump/"
+LOCALHOST = "192.168.1.1"
+DEFAULT_PACKAGE_INSTALL_TIMEOUT = 200
+
+
+class NetworkSettings(object):
+    """Class for network settings.
+
+    Attributes:
+        ssh: ssh connection object.
+        ssh_settings: ssh settings for AccessPoint.
+        service_manager: Object manage service configuration.
+        user: username for ssh.
+        ip: ip address for AccessPoint.
+        log: Logging object for AccessPoint.
+        config: A list to store changes on network settings.
+        firewall_rules_list: A list of firewall rule name list.
+        l2tp: profile for vpn l2tp server.
+    """
+
+    def __init__(self, ssh, ssh_settings, logger):
+        """Initialize wireless settings.
+
+        Args:
+            ssh: ssh connection object.
+            ssh_settings: ssh settings for AccessPoint.
+            logger: Logging object for AccessPoint.
+        """
+        self.ssh = ssh
+        self.service_manager = ServiceManager(ssh)
+        self.ssh_settings = ssh_settings
+        self.user = self.ssh_settings.username
+        self.ip = self.ssh_settings.hostname
+        self.log = logger
+        self.config = set()
+        self.firewall_rules_list = []
+        # This map contains cleanup functions to restore the configuration to
+        # its default state. We write these keys to HISTORY_CONFIG_PATH prior to
+        # making any changes to that subsystem.
+        # This makes it easier to recover after an aborted test.
+        self.update_firewall_rules_list()
+        self.cleanup_network_settings()
+        self.clear_tcpdump()
+
+    def cleanup_network_settings(self):
+        """Reset all changes on Access point."""
+
+        # Detect if any changes that is not clean up.
+        if self.file_exists(HISTORY_CONFIG_PATH):
+            out = self.ssh.run(f"cat {HISTORY_CONFIG_PATH}").stdout
+            if out:
+                self.config = set(out.split("\n"))
+
+        if self.config:
+            temp = self.config.copy()
+            for change in temp:
+                change_list = change.split()
+
+                command = change_list[0]
+                args = change_list[1:]
+                if command == "setup_dns_server":
+                    self.remove_dns_server()
+                elif command == "setup_vpn_pptp_server":
+                    self.remove_vpn_pptp_server()
+                elif command == "setup_vpn_l2tp_server":
+                    self.remove_vpn_l2tp_server()
+                elif command == "disable_ipv6":
+                    self.enable_ipv6()
+                elif command == "setup_ipv6_bridge":
+                    self.remove_ipv6_bridge()
+                elif command == "default_dns":
+                    addr_list = str(change_list[1])
+                    self.del_default_dns(addr_list)
+                elif command == "default_v6_dns":
+                    addr_list = str(change_list[1])
+                    self.del_default_v6_dns(addr_list)
+                elif command == "ipv6_prefer_option":
+                    self.remove_ipv6_prefer_option()
+                elif command == "block_dns_response":
+                    self.unblock_dns_response()
+                elif command == "setup_mdns":
+                    self.remove_mdns()
+                elif command == "add_dhcp_rapid_commit":
+                    self.remove_dhcp_rapid_commit()
+                elif command == "setup_captive_portal":
+                    try:
+                        fas_port = int(change_list[1])
+                    except IndexError:
+                        fas_port = 1000
+                    self.remove_cpative_portal(fas_port)
+                else:
+                    raise TypeError(f'Unknown command "{change}"')
+
+            self.config = set()
+
+        if self.file_exists(HISTORY_CONFIG_PATH):
+            out = self.ssh.run(f"cat {HISTORY_CONFIG_PATH}").stdout
+            if not out:
+                self.ssh.run(f"rm {HISTORY_CONFIG_PATH}")
+
+    def commit_changes(self):
+        """Apply changes on Access point."""
+        self.ssh.run("uci commit")
+        self.service_manager.restart_services()
+        self.create_config_file("\n".join(self.config), HISTORY_CONFIG_PATH)
+
+    def package_install(self, package_list):
+        """Install packages on OpenWrtAP via opkg If not installed.
+
+        Args:
+            package_list: package list to install.
+                          e.g. "pptpd kmod-mppe kmod-nf-nathelper-extra"
+        """
+        self.ssh.run("opkg update")
+        for package_name in package_list.split(" "):
+            if not self._package_installed(package_name):
+                self.ssh.run(
+                    f"opkg install {package_name}",
+                    timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT,
+                )
+                self.log.info(f"Package: {package_name} installed.")
+            else:
+                self.log.info(f"Package: {package_name} skipped (already installed).")
+
+    def package_remove(self, package_list):
+        """Remove packages on OpenWrtAP via opkg If existed.
+
+        Args:
+            package_list: package list to remove.
+        """
+        for package_name in package_list.split(" "):
+            if self._package_installed(package_name):
+                self.ssh.run(f"opkg remove {package_name}")
+                self.log.info(f"Package: {package_name} removed.")
+            else:
+                self.log.info(f"No exist package {package_name} found.")
+
+    def _package_installed(self, package_name):
+        """Check if target package installed on OpenWrtAP.
+
+        Args:
+            package_name: package name want to check.
+
+        Returns:
+            True if installed.
+        """
+        if self.ssh.run(f"opkg list-installed {package_name}").stdout:
+            return True
+        return False
+
+    def file_exists(self, abs_file_path):
+        """Check if target file exist on specific path on OpenWrt.
+
+        Args:
+            abs_file_path: Absolute path for the file.
+
+        Returns:
+            True if Existed.
+        """
+        path, file_name = abs_file_path.rsplit("/", 1)
+        if self.ssh.run(f"ls {path} | grep {file_name}", ignore_status=True).stdout:
+            return True
+        return False
+
+    def path_exists(self, abs_path):
+        """Check if dir exist on OpenWrt.
+
+        Args:
+            abs_path: absolutely path for create folder.
+        """
+        try:
+            self.ssh.run(f"ls {abs_path}")
+        except:
+            return False
+        return True
+
+    def create_folder(self, abs_path):
+        """If dir not exist, create it.
+
+        Args:
+            abs_path: absolutely path for create folder.
+        """
+        if not self.path_exists(abs_path):
+            self.ssh.run(f"mkdir {abs_path}")
+        else:
+            self.log.info(f"{abs_path} already existed.")
+
+    def count(self, config, key):
+        """Count in uci config.
+
+        Args:
+            config: config or section to research
+            key: keywords to  e.g. rule, domain
+        Returns:
+            Numbers of the count.
+        """
+        count = self.ssh.run(
+            f"uci show {config} | grep ={key}", ignore_status=True
+        ).stdout
+        return len(count.split("\n"))
+
+    def create_config_file(self, config, file_path):
+        """Create config file. Overwrite if file already exist.
+
+        Args:
+            config: A string of content of config.
+            file_path: Config's abs_path.
+        """
+        self.ssh.run(f'echo -e "{config}" > {file_path}')
+
+    def replace_config_option(self, old_option, new_option, file_path):
+        """Replace config option if pattern match.
+
+        If find match pattern with old_option, then replace it with new_option.
+        Else add new_option to the file.
+
+        Args:
+            old_option: the regexp pattern to replace.
+            new_option: the option to add.
+            file_path: Config's abs_path.
+        """
+        config = self.ssh.run(f"cat {file_path}").stdout
+        config, count = re.subn(old_option, new_option, config)
+        if not count:
+            config = f"{config}\n{new_option}"
+        self.create_config_file(config, file_path)
+
+    def remove_config_option(self, option, file_path):
+        """Remove option from config file.
+
+        Args:
+            option: Option to remove. Support regular expression.
+            file_path: Config's abs_path.
+        Returns:
+            Boolean for find option to remove.
+        """
+        config = self.ssh.run(f"cat {file_path}").stdout.split("\n")
+        for line in config:
+            count = re.subn(option, "", line)[1]
+            if count > 0:
+                config.remove(line)
+                self.create_config_file("\n".join(config), file_path)
+                return True
+        self.log.warning("No match option to remove.")
+        return False
+
+    def setup_dns_server(self, domain_name):
+        """Setup DNS server on OpenWrtAP.
+
+        Args:
+            domain_name: Local dns domain name.
+        """
+        self.config.add("setup_dns_server")
+        self.log.info(f"Setup DNS server with domain name {domain_name}")
+        self.ssh.run(f"uci set dhcp.@dnsmasq[0].local='/{domain_name}/'")
+        self.ssh.run(f"uci set dhcp.@dnsmasq[0].domain='{domain_name}'")
+        self.add_resource_record(domain_name, self.ip)
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+        self.commit_changes()
+
+        # Check stunnel package is installed
+        self.package_install("stunnel")
+        self.service_manager.stop(SERVICE_STUNNEL)
+        self.service_manager.disable(SERVICE_STUNNEL)
+
+        # Enable stunnel
+        self.create_stunnel_config()
+        self.ssh.run("stunnel /etc/stunnel/DoTServer.conf")
+
+    def remove_dns_server(self):
+        """Remove DNS server on OpenWrtAP."""
+        if self.file_exists("/var/run/stunnel.pid"):
+            self.ssh.run("kill $(cat /var/run/stunnel.pid)")
+        self.ssh.run("uci set dhcp.@dnsmasq[0].local='/lan/'")
+        self.ssh.run("uci set dhcp.@dnsmasq[0].domain='lan'")
+        self.clear_resource_record()
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+        self.config.discard("setup_dns_server")
+        self.commit_changes()
+
+    def add_resource_record(self, domain_name, domain_ip):
+        """Add resource record.
+
+        Args:
+            domain_name: A string for domain name.
+            domain_ip: A string for domain ip.
+        """
+        self.ssh.run("uci add dhcp domain")
+        self.ssh.run(f"uci set dhcp.@domain[-1].name='{domain_name}'")
+        self.ssh.run(f"uci set dhcp.@domain[-1].ip='{domain_ip}'")
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+
+    def del_resource_record(self):
+        """Delete the last resource record."""
+        self.ssh.run("uci delete dhcp.@domain[-1]")
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+
+    def clear_resource_record(self):
+        """Delete the all resource record."""
+        rr = self.ssh.run("uci show dhcp | grep =domain", ignore_status=True).stdout
+        if rr:
+            for _ in rr.split("\n"):
+                self.del_resource_record()
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+
+    def create_stunnel_config(self):
+        """Create config for stunnel service."""
+        stunnel_config = [
+            "pid = /var/run/stunnel.pid",
+            "[dns]",
+            "accept = 853",
+            "connect = 127.0.0.1:53",
+            "cert = /etc/stunnel/fullchain.pem",
+            "key = /etc/stunnel/privkey.pem",
+        ]
+        config_string = "\n".join(stunnel_config)
+        self.create_config_file(config_string, STUNNEL_CONFIG_PATH)
+
+    def setup_vpn_pptp_server(self, local_ip, user, password):
+        """Setup pptp vpn server on OpenWrt.
+
+        Args:
+            local_ip: local pptp server ip address.
+            user: username for pptp user.
+            password: password for pptp user.
+        """
+        #  Install pptp service
+        self.package_install(PPTP_PACKAGE)
+
+        self.config.add("setup_vpn_pptp_server")
+        # Edit /etc/config/pptpd & /etc/ppp/options.pptpd
+        self.setup_pptpd(local_ip, user, password)
+        # Edit /etc/config/firewall & /etc/firewall.user
+        self.setup_firewall_rules_for_pptp()
+        # Enable service
+        self.service_manager.enable(SERVICE_PPTPD)
+        self.service_manager.need_restart(SERVICE_PPTPD)
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+        self.commit_changes()
+
+    def remove_vpn_pptp_server(self):
+        """Remove pptp vpn server on OpenWrt."""
+        # Edit /etc/config/pptpd
+        self.restore_pptpd()
+        # Edit /etc/config/firewall & /etc/firewall.user
+        self.restore_firewall_rules_for_pptp()
+        # Disable service
+        self.service_manager.disable(SERVICE_PPTPD)
+        self.service_manager.need_restart(SERVICE_PPTPD)
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+        self.config.discard("setup_vpn_pptp_server")
+        self.commit_changes()
+
+        self.package_remove(PPTP_PACKAGE)
+        self.ssh.run("rm /etc/ppp/options.pptpd")
+        self.ssh.run("rm /etc/config/pptpd")
+
+    def setup_pptpd(self, local_ip, username, password, ms_dns="8.8.8.8"):
+        """Setup pptpd config for ip addr and account.
+
+        Args:
+            local_ip: vpn server address
+            username: pptp vpn username
+            password: pptp vpn password
+            ms_dns: DNS server
+        """
+        # Calculate remote ip address
+        # e.g. local_ip = 10.10.10.9
+        # remote_ip = 10.10.10.10 -250
+        remote_ip = local_ip.split(".")
+        remote_ip.append(str(int(remote_ip.pop(-1)) + 1))
+        remote_ip = ".".join(remote_ip)
+        # Enable pptp service and set ip addr
+        self.ssh.run("uci set pptpd.pptpd.enabled=1")
+        self.ssh.run(f"uci set pptpd.pptpd.localip='{local_ip}'")
+        self.ssh.run(f"uci set pptpd.pptpd.remoteip='{remote_ip}-250'")
+
+        # Setup pptp service account
+        self.ssh.run(f"uci set pptpd.@login[0].username='{username}'")
+        self.ssh.run(f"uci set pptpd.@login[0].password='{password}'")
+        self.service_manager.need_restart(SERVICE_PPTPD)
+
+        self.replace_config_option(
+            r"#*ms-dns \d+.\d+.\d+.\d+", f"ms-dns {ms_dns}", PPTPD_OPTION_PATH
+        )
+        self.replace_config_option("(#no)*proxyarp", "proxyarp", PPTPD_OPTION_PATH)
+
+    def restore_pptpd(self):
+        """Disable pptpd."""
+        self.ssh.run("uci set pptpd.pptpd.enabled=0")
+        self.remove_config_option(r"\S+ pptp-server \S+ \*", PPP_CHAP_SECRET_PATH)
+        self.service_manager.need_restart(SERVICE_PPTPD)
+
+    def setup_vpn_l2tp_server(
+        self,
+        vpn_server_hostname,
+        vpn_server_address,
+        vpn_username,
+        vpn_password,
+        psk_secret,
+        server_name,
+        country,
+        org,
+    ):
+        """Setup l2tp vpn server on OpenWrt.
+
+        Args:
+            vpn_server_hostname: vpn server domain name
+            vpn_server_address: vpn server addr
+            vpn_username: vpn account
+            vpn_password: vpn password
+            psk_secret: psk for ipsec
+            server_name: vpn server name for register in OpenWrt
+            country: country code for generate cert keys.
+            org: Organization name for generate cert keys.
+        """
+        self.l2tp = network_const.VpnL2tp(
+            vpn_server_hostname,
+            vpn_server_address,
+            vpn_username,
+            vpn_password,
+            psk_secret,
+            server_name,
+        )
+
+        self.package_install(L2TP_PACKAGE)
+        self.config.add("setup_vpn_l2tp_server")
+
+        # /etc/strongswan.conf: Strongswan configuration file
+        self.setup_strongswan()
+        # /etc/ipsec.conf /etc/ipsec.secrets
+        self.setup_ipsec()
+        # /etc/xl2tpd/xl2tpd.conf & /etc/ppp/options.xl2tpd
+        self.setup_xl2tpd()
+        # /etc/ppp/chap-secrets
+        self.setup_ppp_secret()
+        # /etc/config/firewall & /etc/firewall.user
+        self.setup_firewall_rules_for_l2tp()
+        # setup vpn server local ip
+        self.setup_vpn_local_ip()
+        # generate cert and key for rsa
+        if self.l2tp.name == "ikev2-server":
+            self.generate_ikev2_vpn_cert_keys(country, org)
+            self.add_resource_record(self.l2tp.hostname, LOCALHOST)
+        else:
+            self.generate_vpn_cert_keys(country, org)
+        # restart service
+        self.service_manager.need_restart(SERVICE_IPSEC)
+        self.service_manager.need_restart(SERVICE_XL2TPD)
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+        self.commit_changes()
+
+    def remove_vpn_l2tp_server(self):
+        """Remove l2tp vpn server on OpenWrt."""
+        self.config.discard("setup_vpn_l2tp_server")
+        self.restore_firewall_rules_for_l2tp()
+        self.remove_vpn_local_ip()
+        if self.l2tp.name == "ikev2-server":
+            self.clear_resource_record()
+        self.service_manager.need_restart(SERVICE_IPSEC)
+        self.service_manager.need_restart(SERVICE_XL2TPD)
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+        self.commit_changes()
+        self.package_remove(L2TP_PACKAGE)
+        if hasattr(self, "l2tp"):
+            delattr(self, "l2tp")
+
+    def setup_strongswan(self, dns="8.8.8.8"):
+        """Setup strongswan config."""
+        config = [
+            "charon {",
+            "   load_modular = yes",
+            "   plugins {",
+            "       include strongswan.d/charon/*.conf",
+            "   }",
+            f"   dns1={dns}",
+            "}",
+        ]
+        self.create_config_file("\n".join(config), "/etc/strongswan.conf")
+
+    def setup_ipsec(self):
+        """Setup ipsec config."""
+
+        config: list[str] = []
+
+        def load_ipsec_config(data, rightsourceip=False):
+            for i in data.keys():
+                config.append(i)
+                for j in data[i].keys():
+                    config.append(f"\t {j}={data[i][j]}")
+                if rightsourceip:
+                    config.append(
+                        f"\t rightsourceip={self.l2tp.address.rsplit('.', 1)[0]}.16/26"
+                    )
+                config.append("")
+
+        load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2, True)
+        load_ipsec_config(network_const.IPSEC_IKEV2_PSK, True)
+        load_ipsec_config(network_const.IPSEC_IKEV2_RSA, True)
+        load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2_HOSTNAME, True)
+        load_ipsec_config(network_const.IPSEC_IKEV2_PSK_HOSTNAME, True)
+        load_ipsec_config(network_const.IPSEC_IKEV2_RSA_HOSTNAME, True)
+        load_ipsec_config(network_const.IPSEC_CONF)
+        load_ipsec_config(network_const.IPSEC_L2TP_PSK)
+        load_ipsec_config(network_const.IPSEC_L2TP_RSA)
+        load_ipsec_config(network_const.IPSEC_HYBRID_RSA, True)
+        load_ipsec_config(network_const.IPSEC_XAUTH_PSK, True)
+        load_ipsec_config(network_const.IPSEC_XAUTH_RSA, True)
+        self.create_config_file("\n".join(config), "/etc/ipsec.conf")
+
+        ipsec_secret = []
+        ipsec_secret.append(r": PSK \"%s\"" % self.l2tp.psk_secret)
+        ipsec_secret.append(r": RSA \"%s\"" % "serverKey.der")
+        ipsec_secret.append(
+            r"%s : XAUTH \"%s\"" % (self.l2tp.username, self.l2tp.password)
+        )
+        self.create_config_file("\n".join(ipsec_secret), "/etc/ipsec.secrets")
+
+    def setup_xl2tpd(self, ip_range=20):
+        """Setup xl2tpd config."""
+        net_id, host_id = self.l2tp.address.rsplit(".", 1)
+        xl2tpd_conf = list(network_const.XL2TPD_CONF_GLOBAL)
+        xl2tpd_conf.append(f"auth file = {PPP_CHAP_SECRET_PATH}")
+        xl2tpd_conf.extend(network_const.XL2TPD_CONF_INS)
+        xl2tpd_conf.append(
+            f"ip range = {net_id}.{host_id}-{net_id}.{str(int(host_id) + ip_range)}"
+        )
+        xl2tpd_conf.append(f"local ip = {self.l2tp.address}")
+        xl2tpd_conf.append(f"name = {self.l2tp.name}")
+        xl2tpd_conf.append(f"pppoptfile = {XL2TPD_OPTION_CONFIG_PATH}")
+
+        self.create_config_file("\n".join(xl2tpd_conf), XL2TPD_CONFIG_PATH)
+        xl2tpd_option = list(network_const.XL2TPD_OPTION)
+        xl2tpd_option.append(f"name {self.l2tp.name}")
+        self.create_config_file("\n".join(xl2tpd_option), XL2TPD_OPTION_CONFIG_PATH)
+
+    def setup_ppp_secret(self):
+        self.replace_config_option(
+            r"\S+ %s \S+ \*" % self.l2tp.name,
+            f"{self.l2tp.username} {self.l2tp.name} {self.l2tp.password} *",
+            PPP_CHAP_SECRET_PATH,
+        )
+
+    def generate_vpn_cert_keys(self, country, org):
+        """Generate cert and keys for vpn server."""
+        rsa = "--type rsa"
+        lifetime = "--lifetime 365"
+        size = "--size 4096"
+
+        self.ssh.run(f"ipsec pki --gen {rsa} {size} --outform der > caKey.der")
+        self.ssh.run(
+            "ipsec pki --self --ca %s --in caKey.der %s --dn "
+            '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
+            % (lifetime, rsa, country, org, self.l2tp.hostname)
+        )
+        self.ssh.run(f"ipsec pki --gen {size} {rsa} --outform der > serverKey.der")
+        self.ssh.run(
+            "ipsec pki --pub --in serverKey.der %s | ipsec pki "
+            "--issue %s --cacert caCert.der --cakey caKey.der "
+            '--dn "C=%s, O=%s, CN=%s" --san %s --flag serverAuth'
+            " --flag ikeIntermediate --outform der > serverCert.der"
+            % (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST)
+        )
+        self.ssh.run(f"ipsec pki --gen {size} {rsa} --outform der > clientKey.der")
+        self.ssh.run(
+            "ipsec pki --pub --in clientKey.der %s | ipsec pki "
+            "--issue %s --cacert caCert.der --cakey caKey.der "
+            '--dn "C=%s, O=%s, CN=%s@%s" --outform der > '
+            "clientCert.der"
+            % (rsa, lifetime, country, org, self.l2tp.username, self.l2tp.hostname)
+        )
+
+        self.ssh.run(
+            "openssl rsa -inform DER -in clientKey.der"
+            " -out clientKey.pem -outform PEM"
+        )
+        self.ssh.run(
+            "openssl x509 -inform DER -in clientCert.der"
+            " -out clientCert.pem -outform PEM"
+        )
+        self.ssh.run(
+            "openssl x509 -inform DER -in caCert.der" " -out caCert.pem -outform PEM"
+        )
+        self.ssh.run(
+            "openssl pkcs12 -in clientCert.pem -inkey  clientKey.pem"
+            " -certfile caCert.pem -export -out clientPkcs.p12 -passout pass:"
+        )
+
+        self.ssh.run("mv caCert.pem /etc/ipsec.d/cacerts/")
+        self.ssh.run("mv *Cert* /etc/ipsec.d/certs/")
+        self.ssh.run("mv *Key* /etc/ipsec.d/private/")
+        if not self.path_exists("/www/downloads/"):
+            self.ssh.run("mkdir /www/downloads/")
+        self.ssh.run("mv clientPkcs.p12 /www/downloads/")
+        self.ssh.run("chmod 664 /www/downloads/clientPkcs.p12")
+
+    def generate_ikev2_vpn_cert_keys(self, country, org):
+        rsa = "--type rsa"
+        lifetime = "--lifetime 365"
+        size = "--size 4096"
+
+        if not self.path_exists("/www/downloads/"):
+            self.ssh.run("mkdir /www/downloads/")
+
+        ikev2_vpn_cert_keys = [
+            f"ipsec pki --gen {rsa} {size} --outform der > caKey.der",
+            "ipsec pki --self --ca %s --in caKey.der %s --dn "
+            '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
+            % (lifetime, rsa, country, org, self.l2tp.hostname),
+            f"ipsec pki --gen {size} {rsa} --outform der > serverKey.der",
+            "ipsec pki --pub --in serverKey.der %s | ipsec pki --issue %s "
+            r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s\" "
+            "--san %s --san %s --flag serverAuth --flag ikeIntermediate "
+            "--outform der > serverCert.der"
+            % (
+                rsa,
+                lifetime,
+                country,
+                org,
+                self.l2tp.hostname,
+                LOCALHOST,
+                self.l2tp.hostname,
+            ),
+            f"ipsec pki --gen {size} {rsa} --outform der > clientKey.der",
+            "ipsec pki --pub --in clientKey.der %s | ipsec pki --issue %s "
+            r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s@%s\" "
+            r"--san \"%s\" --san \"%s@%s\" --san \"%s@%s\" --outform der "
+            "> clientCert.der"
+            % (
+                rsa,
+                lifetime,
+                country,
+                org,
+                self.l2tp.username,
+                self.l2tp.hostname,
+                self.l2tp.username,
+                self.l2tp.username,
+                LOCALHOST,
+                self.l2tp.username,
+                self.l2tp.hostname,
+            ),
+            "openssl rsa -inform DER -in clientKey.der "
+            "-out clientKey.pem -outform PEM",
+            "openssl x509 -inform DER -in clientCert.der "
+            "-out clientCert.pem -outform PEM",
+            "openssl x509 -inform DER -in caCert.der " "-out caCert.pem -outform PEM",
+            "openssl pkcs12 -in clientCert.pem -inkey  clientKey.pem "
+            "-certfile caCert.pem -export -out clientPkcs.p12 -passout pass:",
+            "mv caCert.pem /etc/ipsec.d/cacerts/",
+            "mv *Cert* /etc/ipsec.d/certs/",
+            "mv *Key* /etc/ipsec.d/private/",
+            "mv clientPkcs.p12 /www/downloads/",
+            "chmod 664 /www/downloads/clientPkcs.p12",
+        ]
+        file_string = "\n".join(ikev2_vpn_cert_keys)
+        self.create_config_file(file_string, IKEV2_VPN_CERT_KEYS_PATH)
+
+        self.ssh.run(f"chmod +x {IKEV2_VPN_CERT_KEYS_PATH}")
+        self.ssh.run(f"{IKEV2_VPN_CERT_KEYS_PATH}")
+
+    def update_firewall_rules_list(self):
+        """Update rule list in /etc/config/firewall."""
+        new_rules_list = []
+        for i in range(self.count("firewall", "rule")):
+            rule = self.ssh.run(f"uci get firewall.@rule[{i}].name").stdout
+            new_rules_list.append(rule)
+        self.firewall_rules_list = new_rules_list
+
+    def setup_firewall_rules_for_pptp(self):
+        """Setup firewall for vpn pptp server."""
+        self.update_firewall_rules_list()
+        if "pptpd" not in self.firewall_rules_list:
+            self.ssh.run("uci add firewall rule")
+            self.ssh.run("uci set firewall.@rule[-1].name='pptpd'")
+            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
+            self.ssh.run("uci set firewall.@rule[-1].proto='tcp'")
+            self.ssh.run("uci set firewall.@rule[-1].dest_port='1723'")
+            self.ssh.run("uci set firewall.@rule[-1].family='ipv4'")
+            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
+
+        if "GRP" not in self.firewall_rules_list:
+            self.ssh.run("uci add firewall rule")
+            self.ssh.run("uci set firewall.@rule[-1].name='GRP'")
+            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
+            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
+            self.ssh.run("uci set firewall.@rule[-1].proto='47'")
+
+        iptable_rules = list(network_const.FIREWALL_RULES_FOR_PPTP)
+        self.add_custom_firewall_rules(iptable_rules)
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+
+    def restore_firewall_rules_for_pptp(self):
+        """Restore firewall for vpn pptp server."""
+        self.update_firewall_rules_list()
+        if "pptpd" in self.firewall_rules_list:
+            self.ssh.run(
+                f"uci del firewall.@rule[{self.firewall_rules_list.index('pptpd')}]"
+            )
+        self.update_firewall_rules_list()
+        if "GRP" in self.firewall_rules_list:
+            self.ssh.run(
+                f"uci del firewall.@rule[{self.firewall_rules_list.index('GRP')}]"
+            )
+        self.remove_custom_firewall_rules()
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+
+    def setup_firewall_rules_for_l2tp(self):
+        """Setup firewall for vpn l2tp server."""
+        self.update_firewall_rules_list()
+        if "ipsec esp" not in self.firewall_rules_list:
+            self.ssh.run("uci add firewall rule")
+            self.ssh.run("uci set firewall.@rule[-1].name='ipsec esp'")
+            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
+            self.ssh.run("uci set firewall.@rule[-1].proto='esp'")
+            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
+
+        if "ipsec nat-t" not in self.firewall_rules_list:
+            self.ssh.run("uci add firewall rule")
+            self.ssh.run("uci set firewall.@rule[-1].name='ipsec nat-t'")
+            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
+            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
+            self.ssh.run("uci set firewall.@rule[-1].proto='udp'")
+            self.ssh.run("uci set firewall.@rule[-1].dest_port='4500'")
+
+        if "auth header" not in self.firewall_rules_list:
+            self.ssh.run("uci add firewall rule")
+            self.ssh.run("uci set firewall.@rule[-1].name='auth header'")
+            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
+            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
+            self.ssh.run("uci set firewall.@rule[-1].proto='ah'")
+
+        net_id = self.l2tp.address.rsplit(".", 1)[0]
+        iptable_rules = list(network_const.FIREWALL_RULES_FOR_L2TP)
+        iptable_rules.append(f"iptables -A FORWARD -s {net_id}.0/24  -j ACCEPT")
+        iptable_rules.append(
+            f"iptables -t nat -A POSTROUTING -s {net_id}.0/24 -o eth0.2 -j MASQUERADE"
+        )
+
+        self.add_custom_firewall_rules(iptable_rules)
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+
+    def restore_firewall_rules_for_l2tp(self):
+        """Restore firewall for vpn l2tp server."""
+        self.update_firewall_rules_list()
+        if "ipsec esp" in self.firewall_rules_list:
+            self.ssh.run(
+                f"uci del firewall.@rule[{self.firewall_rules_list.index('ipsec esp')}]"
+            )
+        self.update_firewall_rules_list()
+        if "ipsec nat-t" in self.firewall_rules_list:
+            self.ssh.run(
+                "uci del firewall.@rule[%s]"
+                % self.firewall_rules_list.index("ipsec nat-t")
+            )
+        self.update_firewall_rules_list()
+        if "auth header" in self.firewall_rules_list:
+            self.ssh.run(
+                "uci del firewall.@rule[%s]"
+                % self.firewall_rules_list.index("auth header")
+            )
+        self.remove_custom_firewall_rules()
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+
+    def add_custom_firewall_rules(self, rules):
+        """Backup current custom rules and replace with arguments.
+
+        Args:
+            rules: A list of iptable rules to apply.
+        """
+        backup_file_path = f"{FIREWALL_CUSTOM_OPTION_PATH}.backup"
+        if not self.file_exists(backup_file_path):
+            self.ssh.run(f"mv {FIREWALL_CUSTOM_OPTION_PATH} {backup_file_path}")
+        for rule in rules:
+            self.ssh.run(f"echo {rule} >> {FIREWALL_CUSTOM_OPTION_PATH}")
+
+    def remove_custom_firewall_rules(self):
+        """Clean up and recover custom firewall rules."""
+        backup_file_path = f"{FIREWALL_CUSTOM_OPTION_PATH}.backup"
+        if self.file_exists(backup_file_path):
+            self.ssh.run(f"mv {backup_file_path} {FIREWALL_CUSTOM_OPTION_PATH}")
+        else:
+            self.log.debug(f"Did not find {backup_file_path}")
+            self.ssh.run(f"echo  > {FIREWALL_CUSTOM_OPTION_PATH}")
+
+    def disable_pptp_service(self):
+        """Disable pptp service."""
+        self.package_remove(PPTP_PACKAGE)
+
+    def setup_vpn_local_ip(self):
+        """Setup VPN Server local ip on OpenWrt for client ping verify."""
+        self.ssh.run("uci set network.lan2=interface")
+        self.ssh.run("uci set network.lan2.type=bridge")
+        self.ssh.run("uci set network.lan2.ifname=eth1.2")
+        self.ssh.run("uci set network.lan2.proto=static")
+        self.ssh.run(f'uci set network.lan2.ipaddr="{self.l2tp.address}"')
+        self.ssh.run("uci set network.lan2.netmask=255.255.255.0")
+        self.ssh.run("uci set network.lan2=interface")
+        self.service_manager.reload(SERVICE_NETWORK)
+        self.commit_changes()
+
+    def remove_vpn_local_ip(self):
+        """Discard vpn local ip on OpenWrt."""
+        self.ssh.run("uci delete network.lan2")
+        self.service_manager.reload(SERVICE_NETWORK)
+        self.commit_changes()
+
+    def enable_ipv6(self):
+        """Enable ipv6 on OpenWrt."""
+        self.ssh.run("uci set network.lan.ipv6=1")
+        self.ssh.run("uci set network.wan.ipv6=1")
+        self.service_manager.enable("odhcpd")
+        self.service_manager.reload(SERVICE_NETWORK)
+        self.config.discard("disable_ipv6")
+        self.commit_changes()
+
+    def disable_ipv6(self):
+        """Disable ipv6 on OpenWrt."""
+        self.config.add("disable_ipv6")
+        self.ssh.run("uci set network.lan.ipv6=0")
+        self.ssh.run("uci set network.wan.ipv6=0")
+        self.service_manager.disable("odhcpd")
+        self.service_manager.reload(SERVICE_NETWORK)
+        self.commit_changes()
+
+    def setup_ipv6_bridge(self):
+        """Setup ipv6 bridge for client have ability to access network."""
+        self.config.add("setup_ipv6_bridge")
+
+        self.ssh.run("uci set dhcp.lan.dhcpv6=relay")
+        self.ssh.run("uci set dhcp.lan.ra=relay")
+        self.ssh.run("uci set dhcp.lan.ndp=relay")
+
+        self.ssh.run("uci set dhcp.wan6=dhcp")
+        self.ssh.run("uci set dhcp.wan6.dhcpv6=relay")
+        self.ssh.run("uci set dhcp.wan6.ra=relay")
+        self.ssh.run("uci set dhcp.wan6.ndp=relay")
+        self.ssh.run("uci set dhcp.wan6.master=1")
+        self.ssh.run("uci set dhcp.wan6.interface=wan6")
+
+        # Enable service
+        self.service_manager.need_restart(SERVICE_ODHCPD)
+        self.commit_changes()
+
+    def remove_ipv6_bridge(self):
+        """Discard ipv6 bridge on OpenWrt."""
+        if "setup_ipv6_bridge" in self.config:
+            self.config.discard("setup_ipv6_bridge")
+
+            self.ssh.run("uci set dhcp.lan.dhcpv6=server")
+            self.ssh.run("uci set dhcp.lan.ra=server")
+            self.ssh.run("uci delete dhcp.lan.ndp")
+
+            self.ssh.run("uci delete dhcp.wan6")
+
+            self.service_manager.need_restart(SERVICE_ODHCPD)
+            self.commit_changes()
+
+    def _add_dhcp_option(self, args):
+        self.ssh.run(f'uci add_list dhcp.lan.dhcp_option="{args}"')
+
+    def _remove_dhcp_option(self, args):
+        self.ssh.run(f'uci del_list dhcp.lan.dhcp_option="{args}"')
+
+    def add_default_dns(self, addr_list):
+        """Add default dns server for client.
+
+        Args:
+            addr_list: dns ip address for Openwrt client.
+        """
+        self._add_dhcp_option(f'6,{",".join(addr_list)}')
+        self.config.add(f"default_dns {addr_list}")
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+        self.commit_changes()
+
+    def del_default_dns(self, addr_list: str):
+        """Remove default dns server for client.
+
+        Args:
+            addr_list: list of dns ip address for Openwrt client.
+        """
+        self._remove_dhcp_option(f"6,{addr_list}")
+        self.config.discard(f"default_dns {addr_list}")
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+        self.commit_changes()
+
+    def add_default_v6_dns(self, addr_list: str):
+        """Add default v6 dns server for client.
+
+        Args:
+            addr_list: list of dns ip address for Openwrt client.
+        """
+        self.ssh.run(f'uci add_list dhcp.lan.dns="{addr_list}"')
+        self.config.add(f"default_v6_dns {addr_list}")
+        self.service_manager.need_restart(SERVICE_ODHCPD)
+        self.commit_changes()
+
+    def del_default_v6_dns(self, addr_list: str):
+        """Del default v6 dns server for client.
+
+        Args:
+            addr_list: list of dns ip address for Openwrt client.
+        """
+        self.ssh.run(f'uci del_list dhcp.lan.dns="{addr_list}"')
+        self.config.add(f"default_v6_dns {addr_list}")
+        self.service_manager.need_restart(SERVICE_ODHCPD)
+        self.commit_changes()
+
+    def add_ipv6_prefer_option(self):
+        self._add_dhcp_option("108,1800i")
+        self.config.add("ipv6_prefer_option")
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+        self.commit_changes()
+
+    def remove_ipv6_prefer_option(self):
+        self._remove_dhcp_option("108,1800i")
+        self.config.discard("ipv6_prefer_option")
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+        self.commit_changes()
+
+    def add_dhcp_rapid_commit(self):
+        self.create_config_file("dhcp-rapid-commit\n", "/etc/dnsmasq.conf")
+        self.config.add("add_dhcp_rapid_commit")
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+        self.commit_changes()
+
+    def remove_dhcp_rapid_commit(self):
+        self.create_config_file("", "/etc/dnsmasq.conf")
+        self.config.discard("add_dhcp_rapid_commit")
+        self.service_manager.need_restart(SERVICE_DNSMASQ)
+        self.commit_changes()
+
+    def start_tcpdump(self, test_name, args="", interface="br-lan"):
+        """ "Start tcpdump on OpenWrt.
+
+        Args:
+            test_name: Test name for create tcpdump file name.
+            args: Option args for tcpdump.
+            interface: Interface to logging.
+        Returns:
+            tcpdump_file_name: tcpdump file name on OpenWrt.
+            pid: tcpdump process id.
+        """
+        self.package_install("tcpdump")
+        if not self.path_exists(TCPDUMP_DIR):
+            self.ssh.run(f"mkdir {TCPDUMP_DIR}")
+        now = (time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())),)
+        tcpdump_file_name = f"openwrt_{test_name}_{now}.pcap"
+        tcpdump_file_path = f"{TCPDUMP_DIR}{tcpdump_file_name}"
+        cmd = f"tcpdump -i {interface} -s0 {args} -w {tcpdump_file_path}"
+        self.ssh.run_async(cmd)
+        pid = self._get_tcpdump_pid(tcpdump_file_name)
+        if not pid:
+            raise signals.TestFailure("Fail to start tcpdump on OpenWrt.")
+        # Set delay to prevent tcpdump fail to capture target packet.
+        time.sleep(15)
+        return tcpdump_file_name
+
+    def stop_tcpdump(self, tcpdump_file_name, pull_dir=None):
+        """Stop tcpdump on OpenWrt and pull the pcap file.
+
+        Args:
+            tcpdump_file_name: tcpdump file name on OpenWrt.
+            pull_dir: Keep none if no need to pull.
+        Returns:
+            tcpdump abs_path on host.
+        """
+        # Set delay to prevent tcpdump fail to capture target packet.
+        time.sleep(15)
+        pid = self._get_tcpdump_pid(tcpdump_file_name)
+        self.ssh.run(f"kill -9 {pid}", ignore_status=True)
+        if self.path_exists(TCPDUMP_DIR) and pull_dir:
+            tcpdump_path = f"{TCPDUMP_DIR}{tcpdump_file_name}"
+            tcpdump_remote_path = f"{pull_dir}/{tcpdump_file_name}"
+            tcpdump_local_path = f"{self.user}@{self.ip}:{tcpdump_path}"
+            utils.exe_cmd(f"scp {tcpdump_local_path} {tcpdump_remote_path}")
+
+        if self._get_tcpdump_pid(tcpdump_file_name):
+            raise signals.TestFailure("Failed to stop tcpdump on OpenWrt.")
+        if self.file_exists(tcpdump_path):
+            self.ssh.run(f"rm -f {tcpdump_path}")
+        return tcpdump_remote_path if pull_dir else None
+
+    def clear_tcpdump(self):
+        self.ssh.run("killall tcpdump", ignore_status=True)
+        if self.ssh.run("pgrep tcpdump", ignore_status=True).stdout:
+            raise signals.TestFailure("Failed to clean up tcpdump process.")
+        if self.path_exists(TCPDUMP_DIR):
+            self.ssh.run(f"rm -f  {TCPDUMP_DIR}/*")
+
+    def _get_tcpdump_pid(self, tcpdump_file_name):
+        """Check tcpdump process on OpenWrt."""
+        return self.ssh.run(f"pgrep -f {tcpdump_file_name}", ignore_status=True).stdout
+
+    def setup_mdns(self):
+        self.config.add("setup_mdns")
+        self.package_install(MDNS_PACKAGE)
+        self.commit_changes()
+
+    def remove_mdns(self):
+        self.config.discard("setup_mdns")
+        self.package_remove(MDNS_PACKAGE)
+        self.commit_changes()
+
+    def block_dns_response(self):
+        self.config.add("block_dns_response")
+        iptable_rules = list(network_const.FIREWALL_RULES_DISABLE_DNS_RESPONSE)
+        self.add_custom_firewall_rules(iptable_rules)
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+        self.commit_changes()
+
+    def unblock_dns_response(self):
+        self.config.discard("block_dns_response")
+        self.remove_custom_firewall_rules()
+        self.service_manager.need_restart(SERVICE_FIREWALL)
+        self.commit_changes()
+
+    def setup_captive_portal(self, fas_fdqn, fas_port=2080):
+        """Create captive portal with Forwarding Authentication Service.
+
+        Args:
+             fas_fdqn: String for captive portal page's fdqn add to local dns server.
+             fas_port: Port for captive portal page.
+        """
+        self.package_install(CAPTIVE_PORTAL_PACKAGE)
+        self.config.add(f"setup_captive_portal {fas_port}")
+        self.ssh.run("uci set opennds.@opennds[0].fas_secure_enabled=2")
+        self.ssh.run("uci set opennds.@opennds[0].gatewayport=2050")
+        self.ssh.run(f"uci set opennds.@opennds[0].fasport={fas_port}")
+        self.ssh.run(f"uci set opennds.@opennds[0].fasremotefqdn={fas_fdqn}")
+        self.ssh.run('uci set opennds.@opennds[0].faspath="/nds/fas-aes.php"')
+        self.ssh.run("uci set opennds.@opennds[0].faskey=1234567890")
+        self.service_manager.need_restart(SERVICE_OPENNDS)
+        # Config uhttpd
+        self.ssh.run("uci set uhttpd.main.interpreter=.php=/usr/bin/php-cgi")
+        self.ssh.run(f"uci add_list uhttpd.main.listen_http=0.0.0.0:{fas_port}")
+        self.ssh.run(f"uci add_list uhttpd.main.listen_http=[::]:{fas_port}")
+        self.service_manager.need_restart(SERVICE_UHTTPD)
+        # cp fas-aes.php
+        self.create_folder("/www/nds/")
+        self.ssh.run("cp /etc/opennds/fas-aes.php /www/nds")
+        # Add fdqn
+        self.add_resource_record(fas_fdqn, LOCALHOST)
+        self.commit_changes()
+
+    def remove_cpative_portal(self, fas_port: int = 2080):
+        """Remove captive portal.
+
+        Args:
+             fas_port: Port for captive portal page.
+        """
+        # Remove package
+        self.package_remove(CAPTIVE_PORTAL_PACKAGE)
+        # Clean up config
+        self.ssh.run("rm /etc/config/opennds")
+        # Remove fdqn
+        self.clear_resource_record()
+        # Restore uhttpd
+        self.ssh.run("uci del uhttpd.main.interpreter")
+        self.ssh.run(f"uci del_list uhttpd.main.listen_http='0.0.0.0:{fas_port}'")
+        self.ssh.run(f"uci del_list uhttpd.main.listen_http='[::]:{fas_port}'")
+        self.service_manager.need_restart(SERVICE_UHTTPD)
+        # Clean web root
+        self.ssh.run("rm -r /www/nds")
+        self.config.discard(f"setup_captive_portal {fas_port}")
+        self.commit_changes()
+
+
+class ServiceManager(object):
+    """Class for service on OpenWrt.
+
+    Attributes:
+    ssh: ssh object for the AP.
+    _need_restart: Record service need to restart.
+    """
+
+    def __init__(self, ssh):
+        self.ssh = ssh
+        self._need_restart = set()
+
+    def enable(self, service_name):
+        """Enable service auto start."""
+        self.ssh.run(f"/etc/init.d/{service_name} enable")
+
+    def disable(self, service_name):
+        """Disable service auto start."""
+        self.ssh.run(f"/etc/init.d/{service_name} disable")
+
+    def restart(self, service_name):
+        """Restart the service."""
+        self.ssh.run(f"/etc/init.d/{service_name} restart")
+
+    def reload(self, service_name):
+        """Restart the service."""
+        self.ssh.run(f"/etc/init.d/{service_name} reload")
+
+    def restart_services(self):
+        """Restart all services need to restart."""
+        for service in self._need_restart:
+            if service == SERVICE_NETWORK:
+                self.reload(service)
+            self.restart(service)
+        self._need_restart = set()
+
+    def stop(self, service_name):
+        """Stop the service."""
+        self.ssh.run(f"/etc/init.d/{service_name} stop")
+
+    def need_restart(self, service_name):
+        self._need_restart.add(service_name)
diff --git a/packages/antlion/controllers/openwrt_lib/openwrt_constants.py b/packages/antlion/controllers/openwrt_lib/openwrt_constants.py
new file mode 100644
index 0000000..3b591d5
--- /dev/null
+++ b/packages/antlion/controllers/openwrt_lib/openwrt_constants.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+SYSTEM_INFO_CMD = "ubus call system board"
+
+
+class OpenWrtWifiSecurity:
+    # Used by OpenWrt AP
+    WPA_PSK_DEFAULT = "psk"
+    WPA_PSK_CCMP = "psk+ccmp"
+    WPA_PSK_TKIP = "psk+tkip"
+    WPA_PSK_TKIP_AND_CCMP = "psk+tkip+ccmp"
+    WPA2_PSK_DEFAULT = "psk2"
+    WPA2_PSK_CCMP = "psk2+ccmp"
+    WPA2_PSK_TKIP = "psk2+tkip"
+    WPA2_PSK_TKIP_AND_CCMP = "psk2+tkip+ccmp"
+
+
+class OpenWrtWifiSetting:
+    IFACE_2G = 2
+    IFACE_5G = 3
+
+
+class OpenWrtModelMap:
+    NETGEAR_R8000 = ("radio2", "radio1")
diff --git a/packages/antlion/controllers/openwrt_lib/wireless_config.py b/packages/antlion/controllers/openwrt_lib/wireless_config.py
new file mode 100644
index 0000000..d97e197
--- /dev/null
+++ b/packages/antlion/controllers/openwrt_lib/wireless_config.py
@@ -0,0 +1,53 @@
+"""Class for Wireless config."""
+
+from antlion.controllers.ap_lib.hostapd_security import OpenWRTEncryptionMode
+
+
+class WirelessConfig(object):
+    """Creates an object to hold wireless config.
+
+    Attributes:
+      name: name of the wireless config
+      ssid: SSID of the network.
+      security: security of the wifi network.
+      band: band of the wifi network.
+      iface: network interface of the wifi network.
+      password: password for psk network.
+      wep_key: wep keys for wep network.
+      wep_key_num: key number for wep network.
+      radius_server_ip: IP address of radius server.
+      radius_server_port: Port number of radius server.
+      radius_server_secret: Secret key of radius server.
+      hidden: Boolean, if the wifi network is hidden.
+      ieee80211w: PMF bit of the wifi network.
+    """
+
+    def __init__(
+        self,
+        name: str,
+        ssid: str,
+        security: OpenWRTEncryptionMode,
+        band: str,
+        iface: str = "lan",
+        password: str | None = None,
+        wep_key: list[str] | None = None,
+        wep_key_num: int = 1,
+        radius_server_ip: str | None = None,
+        radius_server_port: int | None = None,
+        radius_server_secret: str | None = None,
+        hidden: bool = False,
+        ieee80211w: int | None = None,
+    ):
+        self.name = name
+        self.ssid = ssid
+        self.security = security
+        self.band = band
+        self.iface = iface
+        self.password = password
+        self.wep_key = wep_key
+        self.wep_key_num = wep_key_num
+        self.radius_server_ip = radius_server_ip
+        self.radius_server_port = radius_server_port
+        self.radius_server_secret = radius_server_secret
+        self.hidden = hidden
+        self.ieee80211w = ieee80211w
diff --git a/packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py b/packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py
new file mode 100644
index 0000000..da0d2d7
--- /dev/null
+++ b/packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py
@@ -0,0 +1,172 @@
+"""Class to configure wireless settings."""
+
+import time
+
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.openwrt_lib.network_settings import (
+    SERVICE_DNSMASQ,
+    ServiceManager,
+)
+from antlion.controllers.openwrt_lib.wireless_config import WirelessConfig
+
+LEASE_FILE = "/tmp/dhcp.leases"
+OPEN_SECURITY = "none"
+PSK1_SECURITY = "psk"
+PSK_SECURITY = "psk2"
+WEP_SECURITY = "wep"
+ENT_SECURITY = "wpa2"
+OWE_SECURITY = "owe"
+SAE_SECURITY = "sae"
+SAEMIXED_SECURITY = "sae-mixed"
+ENABLE_RADIO = "0"
+DISABLE_RADIO = "1"
+ENABLE_HIDDEN = "1"
+RADIO_2G = "radio1"
+RADIO_5G = "radio0"
+
+
+class WirelessSettingsApplier(object):
+    """Class for wireless settings.
+
+    Attributes:
+      ssh: ssh object for the AP.
+      service_manager: Object manage service configuration
+      wireless_configs: a list of
+        antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
+      channel_2g: channel for 2G band.
+      channel_5g: channel for 5G band.
+    """
+
+    def __init__(
+        self, ssh, configs, channel_2g, channel_5g, radio_2g=RADIO_2G, radio_5g=RADIO_5G
+    ):
+        """Initialize wireless settings.
+
+        Args:
+          ssh: ssh connection object.
+          configs: a list of
+            antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
+          channel_2g: channel for 2G band.
+          channel_5g: channel for 5G band.
+        """
+        self.ssh = ssh
+        self.service_manager = ServiceManager(ssh)
+        self.wireless_configs: list[WirelessConfig] = configs
+        self.channel_2g = channel_2g
+        self.channel_5g = channel_5g
+        self.radio_2g = radio_2g
+        self.radio_5g = radio_5g
+
+    def apply_wireless_settings(self):
+        """Configure wireless settings from a list of configs."""
+        default_2g_iface = f"default_{self.radio_2g}"
+        default_5g_iface = f"default_{self.radio_5g}"
+
+        # set channels for 2G and 5G bands
+        self.ssh.run(f"uci set wireless.{self.radio_2g}.channel='{self.channel_2g}'")
+        self.ssh.run(f"uci set wireless.{self.radio_5g}.channel='{self.channel_5g}'")
+        if self.channel_5g == 165:
+            self.ssh.run(f"uci set wireless.{self.radio_5g}.htmode='VHT20'")
+        elif self.channel_5g == 132 or self.channel_5g == 136:
+            self.ssh.run("iw reg set ZA")
+            self.ssh.run(f"uci set wireless.{self.radio_5g}.htmode='VHT40'")
+
+        if self.channel_2g == 13:
+            self.ssh.run("iw reg set AU")
+
+        # disable default OpenWrt SSID
+        self.ssh.run(f"uci set wireless.{default_2g_iface}.disabled='{DISABLE_RADIO}'")
+        self.ssh.run(f"uci set wireless.{default_5g_iface}.disabled='{DISABLE_RADIO}'")
+
+        # Enable radios
+        self.ssh.run(f"uci set wireless.{self.radio_2g}.disabled='{ENABLE_RADIO}'")
+        self.ssh.run(f"uci set wireless.{self.radio_5g}.disabled='{ENABLE_RADIO}'")
+
+        for config in self.wireless_configs:
+            # configure open network
+            if config.security == OPEN_SECURITY:
+                if config.band == hostapd_constants.BAND_2G:
+                    self.ssh.run(
+                        f"uci set wireless.{default_2g_iface}.ssid='{config.ssid}'"
+                    )
+                    self.ssh.run(
+                        f"uci set wireless.{default_2g_iface}.disabled='{ENABLE_RADIO}'"
+                    )
+                    if config.hidden:
+                        self.ssh.run(
+                            "uci set wireless.%s.hidden='%s'"
+                            % (default_2g_iface, ENABLE_HIDDEN)
+                        )
+                elif config.band == hostapd_constants.BAND_5G:
+                    self.ssh.run(
+                        f"uci set wireless.{default_5g_iface}.ssid='{config.ssid}'"
+                    )
+                    self.ssh.run(
+                        f"uci set wireless.{default_5g_iface}.disabled='{ENABLE_RADIO}'"
+                    )
+                    if config.hidden:
+                        self.ssh.run(
+                            "uci set wireless.%s.hidden='%s'"
+                            % (default_5g_iface, ENABLE_HIDDEN)
+                        )
+                continue
+
+            self.ssh.run(f"uci set wireless.{config.name}='wifi-iface'")
+            if config.band == hostapd_constants.BAND_2G:
+                self.ssh.run(f"uci set wireless.{config.name}.device='{self.radio_2g}'")
+            else:
+                self.ssh.run(f"uci set wireless.{config.name}.device='{self.radio_5g}'")
+            self.ssh.run(f"uci set wireless.{config.name}.network='{config.iface}'")
+            self.ssh.run(f"uci set wireless.{config.name}.mode='ap'")
+            self.ssh.run(f"uci set wireless.{config.name}.ssid='{config.ssid}'")
+            self.ssh.run(
+                f"uci set wireless.{config.name}.encryption='{config.security}'"
+            )
+            if (
+                config.security == PSK_SECURITY
+                or config.security == SAE_SECURITY
+                or config.security == PSK1_SECURITY
+                or config.security == SAEMIXED_SECURITY
+            ):
+                self.ssh.run(f"uci set wireless.{config.name}.key='{config.password}'")
+            elif config.security == WEP_SECURITY:
+                self.ssh.run(
+                    "uci set wireless.%s.key%s='%s'"
+                    % (config.name, config.wep_key_num, config.wep_key)
+                )
+                self.ssh.run(
+                    f"uci set wireless.{config.name}.key='{config.wep_key_num}'"
+                )
+            elif config.security == ENT_SECURITY:
+                self.ssh.run(
+                    "uci set wireless.%s.auth_secret='%s'"
+                    % (config.name, config.radius_server_secret)
+                )
+                self.ssh.run(
+                    "uci set wireless.%s.auth_server='%s'"
+                    % (config.name, config.radius_server_ip)
+                )
+                self.ssh.run(
+                    "uci set wireless.%s.auth_port='%s'"
+                    % (config.name, config.radius_server_port)
+                )
+            if config.ieee80211w:
+                self.ssh.run(
+                    f"uci set wireless.{config.name}.ieee80211w='{config.ieee80211w}'"
+                )
+            if config.hidden:
+                self.ssh.run(f"uci set wireless.{config.name}.hidden='{ENABLE_HIDDEN}'")
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run(f"cp {LEASE_FILE} {LEASE_FILE}.tmp")
+
+    def cleanup_wireless_settings(self):
+        """Reset wireless settings to default."""
+        self.ssh.run("wifi down")
+        self.ssh.run("rm -f /etc/config/wireless")
+        self.ssh.run("wifi config")
+        if self.channel_5g == 132:
+            self.ssh.run("iw reg set US")
+        self.ssh.run(f"cp {LEASE_FILE}.tmp {LEASE_FILE}")
+        self.service_manager.restart(SERVICE_DNSMASQ)
+        time.sleep(9)
diff --git a/packages/antlion/controllers/packet_capture.py b/packages/antlion/controllers/packet_capture.py
new file mode 100755
index 0000000..59fa1c3
--- /dev/null
+++ b/packages/antlion/controllers/packet_capture.py
@@ -0,0 +1,310 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import threading
+import time
+
+from mobly import asserts, logger
+
+from antlion.controllers.ap_lib.hostapd_constants import (
+    CENTER_CHANNEL_MAP,
+    FREQUENCY_MAP,
+    VHT_CHANNEL,
+)
+from antlion.controllers.utils_lib.ssh import connection, formatter, settings
+from antlion.libs.proc.process import Process
+
+MOBLY_CONTROLLER_CONFIG_NAME = "PacketCapture"
+ACTS_CONTROLLER_REFERENCE_NAME = "packet_capture"
+BSS = "BSS"
+BSSID = "BSSID"
+FREQ = "freq"
+FREQUENCY = "frequency"
+LEVEL = "level"
+MON_2G = "mon0"
+MON_5G = "mon1"
+BAND_IFACE = {"2G": MON_2G, "5G": MON_5G}
+SCAN_IFACE = "wlan2"
+SCAN_TIMEOUT = 60
+SEP = ":"
+SIGNAL = "signal"
+SSID = "SSID"
+
+
+def create(configs):
+    return [PacketCapture(c) for c in configs]
+
+
+def destroy(pcaps):
+    for pcap in pcaps:
+        pcap.close()
+
+
+def get_info(pcaps):
+    return [pcap.ssh_settings.hostname for pcap in pcaps]
+
+
+class PcapProperties(object):
+    """Class to maintain packet capture properties after starting tcpdump.
+
+    Attributes:
+        proc: Process object of tcpdump
+        pcap_fname: File name of the tcpdump output file
+        pcap_file: File object for the tcpdump output file
+    """
+
+    def __init__(self, proc, pcap_fname, pcap_file):
+        """Initialize object."""
+        self.proc = proc
+        self.pcap_fname = pcap_fname
+        self.pcap_file = pcap_file
+
+
+class PacketCaptureError(Exception):
+    """Error related to Packet capture."""
+
+
+class PacketCapture(object):
+    """Class representing packet capturer.
+
+    An instance of this class creates and configures two interfaces for monitor
+    mode; 'mon0' for 2G and 'mon1' for 5G and one interface for scanning for
+    wifi networks; 'wlan2' which is a dual band interface.
+
+    Attributes:
+        pcap_properties: dict that specifies packet capture properties for a
+            band.
+    """
+
+    def __init__(self, configs):
+        """Initialize objects.
+
+        Args:
+            configs: config for the packet capture.
+        """
+        self.ssh_settings = settings.from_config(configs["ssh_config"])
+        self.ssh = connection.SshConnection(self.ssh_settings)
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[PacketCapture|{self.ssh_settings.hostname}]",
+            },
+        )
+
+        self._create_interface(MON_2G, "monitor")
+        self._create_interface(MON_5G, "monitor")
+        self.managed_mode = True
+        result = self.ssh.run("ifconfig -a", ignore_status=True)
+        if result.stderr or SCAN_IFACE not in result.stdout:
+            self.managed_mode = False
+        if self.managed_mode:
+            self._create_interface(SCAN_IFACE, "managed")
+
+        self.pcap_properties = dict()
+        self._pcap_stop_lock = threading.Lock()
+
+    def _create_interface(self, iface, mode):
+        """Create interface of monitor/managed mode.
+
+        Create mon0/mon1 for 2G/5G monitor mode and wlan2 for managed mode.
+        """
+        if mode == "monitor":
+            self.ssh.run(f"ifconfig wlan{iface[-1]} down", ignore_status=True)
+        self.ssh.run(f"iw dev {iface} del", ignore_status=True)
+        self.ssh.run(
+            f"iw phy{iface[-1]} interface add {iface} type {mode}",
+            ignore_status=True,
+        )
+        self.ssh.run(f"ip link set {iface} up", ignore_status=True)
+        result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
+        if result.stderr or iface not in result.stdout:
+            raise PacketCaptureError(f"Failed to configure interface {iface}")
+
+    def _cleanup_interface(self, iface):
+        """Clean up monitor mode interfaces."""
+        self.ssh.run(f"iw dev {iface} del", ignore_status=True)
+        result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
+        if not result.stderr or "No such device" not in result.stderr:
+            raise PacketCaptureError(f"Failed to cleanup monitor mode for {iface}")
+
+    def _parse_scan_results(self, scan_result):
+        """Parses the scan dump output and returns list of dictionaries.
+
+        Args:
+            scan_result: scan dump output from scan on mon interface.
+
+        Returns:
+            Dictionary of found network in the scan.
+            The attributes returned are
+                a.) SSID - SSID of the network.
+                b.) LEVEL - signal level.
+                c.) FREQUENCY - WiFi band the network is on.
+                d.) BSSID - BSSID of the network.
+        """
+        scan_networks = []
+        network = {}
+        for line in scan_result.splitlines():
+            if SEP not in line:
+                continue
+            if BSS in line:
+                network[BSSID] = line.split("(")[0].split()[-1]
+            field, value = line.lstrip().rstrip().split(SEP)[0:2]
+            value = value.lstrip()
+            if SIGNAL in line:
+                network[LEVEL] = int(float(value.split()[0]))
+            elif FREQ in line:
+                network[FREQUENCY] = int(value)
+            elif SSID in line:
+                network[SSID] = value
+                scan_networks.append(network)
+                network = {}
+        return scan_networks
+
+    def get_wifi_scan_results(self):
+        """Starts a wifi scan on wlan2 interface.
+
+        Returns:
+            List of dictionaries each representing a found network.
+        """
+        if not self.managed_mode:
+            raise PacketCaptureError("Managed mode not setup")
+        result = self.ssh.run(f"iw dev {SCAN_IFACE} scan")
+        if result.stderr:
+            raise PacketCaptureError("Failed to get scan dump")
+        if not result.stdout:
+            return []
+        return self._parse_scan_results(result.stdout)
+
+    def start_scan_and_find_network(self, ssid):
+        """Start a wifi scan on wlan2 interface and find network.
+
+        Args:
+            ssid: SSID of the network.
+
+        Returns:
+            True/False if the network if found or not.
+        """
+        curr_time = time.time()
+        while time.time() < curr_time + SCAN_TIMEOUT:
+            found_networks = self.get_wifi_scan_results()
+            for network in found_networks:
+                if network[SSID] == ssid:
+                    return True
+            time.sleep(3)  # sleep before next scan
+        return False
+
+    def configure_monitor_mode(self, band, channel, bandwidth=20):
+        """Configure monitor mode.
+
+        Args:
+            band: band to configure monitor mode for.
+            channel: channel to set for the interface.
+            bandwidth : bandwidth for VHT channel as 40,80,160
+
+        Returns:
+            True if configure successful.
+            False if not successful.
+        """
+
+        band = band.upper()
+        if band not in BAND_IFACE:
+            self.log.error("Invalid band. Must be 2g/2G or 5g/5G")
+            return False
+
+        iface = BAND_IFACE[band]
+        if bandwidth == 20:
+            self.ssh.run(f"iw dev {iface} set channel {channel}", ignore_status=True)
+        else:
+            center_freq = None
+            for i, j in CENTER_CHANNEL_MAP[VHT_CHANNEL[bandwidth]]["channels"]:
+                if channel in range(i, j + 1):
+                    center_freq = (FREQUENCY_MAP[i] + FREQUENCY_MAP[j]) / 2
+                    break
+            asserts.assert_true(center_freq, "No match channel in VHT channel list.")
+            self.ssh.run(
+                "iw dev %s set freq %s %s %s"
+                % (iface, FREQUENCY_MAP[channel], bandwidth, center_freq),
+                ignore_status=True,
+            )
+
+        result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
+        if result.stderr or f"channel {channel}" not in result.stdout:
+            self.log.error(f"Failed to configure monitor mode for {band}")
+            return False
+        return True
+
+    def start_packet_capture(self, band, log_path, pcap_fname):
+        """Start packet capture for band.
+
+        band = 2G starts tcpdump on 'mon0' interface.
+        band = 5G starts tcpdump on 'mon1' interface.
+
+        Args:
+            band: '2g' or '2G' and '5g' or '5G'.
+            log_path: test log path to save the pcap file.
+            pcap_fname: name of the pcap file.
+
+        Returns:
+            pcap_proc: Process object of the tcpdump.
+        """
+        band = band.upper()
+        if band not in BAND_IFACE.keys() or band in self.pcap_properties:
+            self.log.error("Invalid band or packet capture already running")
+            return None
+
+        pcap_name = f"{pcap_fname}_{band}.pcap"
+        pcap_fname = os.path.join(log_path, pcap_name)
+        pcap_file = open(pcap_fname, "w+b")
+
+        tcpdump_cmd = f"tcpdump -i {BAND_IFACE[band]} -w - -U 2>/dev/null"
+        cmd = formatter.SshFormatter().format_command(
+            tcpdump_cmd, None, self.ssh_settings, extra_flags={"-q": None}
+        )
+        pcap_proc = Process(cmd)
+        pcap_proc.set_on_output_callback(lambda msg: pcap_file.write(msg), binary=True)
+        pcap_proc.start()
+
+        self.pcap_properties[band] = PcapProperties(pcap_proc, pcap_fname, pcap_file)
+        return pcap_proc
+
+    def stop_packet_capture(self, proc):
+        """Stop the packet capture.
+
+        Args:
+            proc: Process object of tcpdump to kill.
+        """
+        for key, val in self.pcap_properties.items():
+            if val.proc is proc:
+                break
+        else:
+            self.log.error("Failed to stop tcpdump. Invalid process.")
+            return
+
+        proc.stop()
+        with self._pcap_stop_lock:
+            self.pcap_properties[key].pcap_file.close()
+            del self.pcap_properties[key]
+
+    def close(self):
+        """Cleanup.
+
+        Cleans up all the monitor mode interfaces and closes ssh connections.
+        """
+        self._cleanup_interface(MON_2G)
+        self._cleanup_interface(MON_5G)
+        self.ssh.close()
diff --git a/packages/antlion/controllers/packet_sender.py b/packages/antlion/controllers/packet_sender.py
new file mode 100644
index 0000000..2091cb5
--- /dev/null
+++ b/packages/antlion/controllers/packet_sender.py
@@ -0,0 +1,940 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Collection of utility functions to generate and send custom packets.
+
+"""
+import logging
+import multiprocessing
+import socket
+import time
+
+# http://www.secdev.org/projects/scapy/
+# On ubuntu, sudo pip3 install scapy
+import scapy.all as scapy
+from mobly import signals
+
+MOBLY_CONTROLLER_CONFIG_NAME = "PacketSender"
+ACTS_CONTROLLER_REFERENCE_NAME = "packet_senders"
+
+GET_FROM_LOCAL_INTERFACE = "get_local"
+MAC_BROADCAST = "ff:ff:ff:ff:ff:ff"
+IPV4_BROADCAST = "255.255.255.255"
+ARP_DST = "00:00:00:00:00:00"
+RA_MAC = "33:33:00:00:00:01"
+RA_IP = "ff02::1"
+RA_PREFIX = "d00d::"
+RA_PREFIX_LEN = 64
+DHCP_OFFER_OP = 2
+DHCP_OFFER_SRC_PORT = 67
+DHCP_OFFER_DST_PORT = 68
+DHCP_TRANS_ID = 0x01020304
+DNS_LEN = 3
+PING6_DATA = "BEST PING6 EVER"
+PING4_TYPE = 8
+MDNS_TTL = 255
+MDNS_QTYPE = "PTR"
+MDNS_UDP_PORT = 5353
+MDNS_V4_IP_DST = "224.0.0.251"
+MDNS_V4_MAC_DST = "01:00:5E:00:00:FB"
+MDNS_RECURSIVE = 1
+MDNS_V6_IP_DST = "FF02::FB"
+MDNS_V6_MAC_DST = "33:33:00:00:00:FB"
+ETH_TYPE_IP = 2048
+SAP_SPANNING_TREE = 0x42
+SNAP_OUI = 12
+SNAP_SSAP = 170
+SNAP_DSAP = 170
+SNAP_CTRL = 3
+LLC_XID_CONTROL = 191
+PAD_LEN_BYTES = 128
+
+
+def create(configs):
+    """Creates PacketSender controllers from a json config.
+
+    Args:
+        The json configs that represent this controller
+
+    Returns:
+        A new PacketSender
+    """
+    return [PacketSender(c) for c in configs]
+
+
+def destroy(objs):
+    """Destroys a list of PacketSenders and stops sending (if active).
+
+    Args:
+        objs: A list of PacketSenders
+    """
+    for pkt_sender in objs:
+        pkt_sender.stop_sending(True)
+    return
+
+
+def get_info(objs):
+    """Get information on a list of packet senders.
+
+    Args:
+        objs: A list of PacketSenders
+
+    Returns:
+        Network interface name that is being used by each packet sender
+    """
+    return [pkt_sender.interface for pkt_sender in objs]
+
+
+class ThreadSendPacket(multiprocessing.Process):
+    """Creates a thread that keeps sending the same packet until a stop signal.
+
+    Attributes:
+        stop_signal: signal to stop the thread execution
+        packet: desired packet to keep sending
+        interval: interval between consecutive packets (s)
+        interface: network interface name (e.g., 'eth0')
+        log: object used for logging
+    """
+
+    def __init__(self, signal, packet, interval, interface, log):
+        multiprocessing.Process.__init__(self)
+        self.stop_signal = signal
+        self.packet = packet
+        self.interval = interval
+        self.interface = interface
+        self.log = log
+
+    def run(self):
+        self.log.info("Packet Sending Started.")
+        while True:
+            if self.stop_signal.is_set():
+                # Poison pill means shutdown
+                self.log.info("Packet Sending Stopped.")
+                break
+
+            try:
+                scapy.sendp(self.packet, iface=self.interface, verbose=0)
+                time.sleep(self.interval)
+            except Exception:
+                self.log.exception("Exception when trying to send packet")
+                return
+
+        return
+
+
+class PacketSenderError(signals.ControllerError):
+    """Raises exceptions encountered in packet sender lib."""
+
+
+class PacketSender(object):
+    """Send any custom packet over a desired interface.
+
+    Attributes:
+        log: class logging object
+        thread_active: indicates whether or not the send thread is active
+        thread_send: thread object for the concurrent packet transmissions
+        stop_signal: event to stop the thread
+        interface: network interface name (e.g., 'eth0')
+    """
+
+    def __init__(self, ifname):
+        """Initiallize the PacketGenerator class.
+
+        Args:
+            ifname: network interface name that will be used packet generator
+        """
+        self.log = logging.getLogger()
+        self.packet = None
+        self.thread_active = False
+        self.thread_send = None
+        self.stop_signal = multiprocessing.Event()
+        self.interface = ifname
+
+    def send_ntimes(self, packet, ntimes, interval):
+        """Sends a packet ntimes at a given interval.
+
+        Args:
+            packet: custom built packet from Layer 2 up to Application layer
+            ntimes: number of packets to send
+            interval: interval between consecutive packet transmissions (s)
+        """
+        if packet is None:
+            raise PacketSenderError(
+                "There is no packet to send. Create a packet first."
+            )
+
+        for _ in range(ntimes):
+            try:
+                scapy.sendp(packet, iface=self.interface, verbose=0)
+                time.sleep(interval)
+            except socket.error as excpt:
+                self.log.exception(f"Caught socket exception : {excpt}")
+                return
+
+    def send_receive_ntimes(self, packet, ntimes, interval):
+        """Sends a packet and receives the reply ntimes at a given interval.
+
+        Args:
+            packet: custom built packet from Layer 2 up to Application layer
+            ntimes: number of packets to send
+            interval: interval between consecutive packet transmissions and
+                      the corresponding reply (s)
+        """
+        if packet is None:
+            raise PacketSenderError(
+                "There is no packet to send. Create a packet first."
+            )
+
+        for _ in range(ntimes):
+            try:
+                scapy.srp1(packet, iface=self.interface, timeout=interval, verbose=0)
+                time.sleep(interval)
+            except socket.error as excpt:
+                self.log.exception(f"Caught socket exception : {excpt}")
+                return
+
+    def start_sending(self, packet, interval):
+        """Sends packets in parallel with the main process.
+
+        Creates a thread and keeps sending the same packet at a given interval
+        until a stop signal is received
+
+        Args:
+            packet: custom built packet from Layer 2 up to Application layer
+            interval: interval between consecutive packets (s)
+        """
+        if packet is None:
+            raise PacketSenderError(
+                "There is no packet to send. Create a packet first."
+            )
+
+        if self.thread_active:
+            raise PacketSenderError(
+                (
+                    "There is already an active thread. Stop it"
+                    "before starting another transmission."
+                )
+            )
+
+        self.thread_send = ThreadSendPacket(
+            self.stop_signal, packet, interval, self.interface, self.log
+        )
+        self.thread_send.start()
+        self.thread_active = True
+
+    def stop_sending(self, ignore_status=False):
+        """Stops the concurrent thread that is continuously sending packets."""
+        if not self.thread_active:
+            if ignore_status:
+                return
+            else:
+                raise PacketSenderError(
+                    "Error: There is no acive thread running to stop."
+                )
+
+        # Stop thread
+        self.stop_signal.set()
+        self.thread_send.join()
+
+        # Just as precaution
+        if self.thread_send.is_alive():
+            self.thread_send.terminate()
+            self.log.warning("Packet Sending forced to terminate")
+
+        self.stop_signal.clear()
+        self.thread_send = None
+        self.thread_active = False
+
+
+class ArpGenerator(object):
+    """Creates a custom ARP packet
+
+    Attributes:
+        packet: desired built custom packet
+        src_mac: MAC address (Layer 2) of the source node
+        src_ipv4: IPv4 address (Layer 3) of the source node
+        dst_ipv4: IPv4 address (Layer 3) of the destination node
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: a dictionary with all the necessary packet fields.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_ipv4 = scapy.get_if_addr(interf)
+        else:
+            self.src_ipv4 = config_params["src_ipv4"]
+
+    def generate(
+        self,
+        op="who-has",
+        ip_dst=None,
+        ip_src=None,
+        hwsrc=None,
+        hwdst=None,
+        eth_dst=None,
+    ):
+        """Generates a custom ARP packet.
+
+        Args:
+            op: ARP type (request or reply)
+            ip_dst: ARP ipv4 destination (Optional)
+            ip_src: ARP ipv4 source address (Optional)
+            hwsrc: ARP hardware source address (Optional)
+            hwdst: ARP hardware destination address (Optional)
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+        # Create IP layer
+        hw_src = hwsrc if hwsrc is not None else self.src_mac
+        hw_dst = hwdst if hwdst is not None else ARP_DST
+        ipv4_dst = ip_dst if ip_dst is not None else self.dst_ipv4
+        ipv4_src = ip_src if ip_src is not None else self.src_ipv4
+        ip4 = scapy.ARP(op=op, pdst=ipv4_dst, psrc=ipv4_src, hwdst=hw_dst, hwsrc=hw_src)
+
+        # Create Ethernet layer
+        mac_dst = eth_dst if eth_dst is not None else MAC_BROADCAST
+        ethernet = scapy.Ether(src=self.src_mac, dst=mac_dst)
+
+        self.packet = ethernet / ip4
+        return self.packet
+
+
+class DhcpOfferGenerator(object):
+    """Creates a custom DHCP offer packet
+
+    Attributes:
+        packet: desired built custom packet
+        subnet_mask: local network subnet mask
+        src_mac: MAC address (Layer 2) of the source node
+        dst_mac: MAC address (Layer 2) of the destination node
+        src_ipv4: IPv4 address (Layer 3) of the source node
+        dst_ipv4: IPv4 address (Layer 3) of the destination node
+        gw_ipv4: IPv4 address (Layer 3) of the Gateway
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: contains all the necessary packet parameters.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        self.subnet_mask = config_params["subnet_mask"]
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_ipv4 = scapy.get_if_addr(interf)
+        else:
+            self.src_ipv4 = config_params["src_ipv4"]
+
+        self.gw_ipv4 = config_params["gw_ipv4"]
+
+    def generate(self, cha_mac=None, dst_ip=None):
+        """Generates a DHCP offer packet.
+
+        Args:
+            cha_mac: hardware target address for DHCP offer (Optional)
+            dst_ip: ipv4 address of target host for renewal (Optional)
+        """
+
+        # Create DHCP layer
+        dhcp = scapy.DHCP(
+            options=[
+                ("message-type", "offer"),
+                ("subnet_mask", self.subnet_mask),
+                ("server_id", self.src_ipv4),
+                ("end"),
+            ]
+        )
+
+        # Overwrite standard DHCP fields
+        sta_hw = cha_mac if cha_mac is not None else self.dst_mac
+        sta_ip = dst_ip if dst_ip is not None else self.dst_ipv4
+
+        # Create Boot
+        bootp = scapy.BOOTP(
+            op=DHCP_OFFER_OP,
+            yiaddr=sta_ip,
+            siaddr=self.src_ipv4,
+            giaddr=self.gw_ipv4,
+            chaddr=scapy.mac2str(sta_hw),
+            xid=DHCP_TRANS_ID,
+        )
+
+        # Create UDP
+        udp = scapy.UDP(sport=DHCP_OFFER_SRC_PORT, dport=DHCP_OFFER_DST_PORT)
+
+        # Create IP layer
+        ip4 = scapy.IP(src=self.src_ipv4, dst=IPV4_BROADCAST)
+
+        # Create Ethernet layer
+        ethernet = scapy.Ether(dst=MAC_BROADCAST, src=self.src_mac)
+
+        self.packet = ethernet / ip4 / udp / bootp / dhcp
+        return self.packet
+
+
+class NsGenerator(object):
+    """Creates a custom Neighbor Solicitation (NS) packet
+
+    Attributes:
+        packet: desired built custom packet
+        src_mac: MAC address (Layer 2) of the source node
+        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
+        src_ipv6: IPv6 address (Layer 3) of the source node
+        dst_ipv6: IPv6 address (Layer 3) of the destination node
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: contains all the necessary packet parameters.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+        self.dst_ipv6 = config_params["dst_ipv6"]
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
+        else:
+            self.src_ipv6 = config_params["src_ipv6"]
+
+    def generate(self, ip_dst=None, eth_dst=None):
+        """Generates a Neighbor Solicitation (NS) packet (ICMP over IPv6).
+
+        Args:
+            ip_dst: NS ipv6 destination (Optional)
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+        # Compute IP addresses
+        target_ip6 = ip_dst if ip_dst is not None else self.dst_ipv6
+        ndst_ip = socket.inet_pton(socket.AF_INET6, target_ip6)
+        nnode_mcast = scapy.in6_getnsma(ndst_ip)
+        node_mcast = socket.inet_ntop(socket.AF_INET6, nnode_mcast)
+        # Compute MAC addresses
+        hw_dst = eth_dst if eth_dst is not None else scapy.in6_getnsmac(nnode_mcast)
+
+        # Create IPv6 layer
+        base = scapy.IPv6(dst=node_mcast, src=self.src_ipv6)
+        neighbor_solicitation = scapy.ICMPv6ND_NS(tgt=target_ip6)
+        src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac)
+        ip6 = base / neighbor_solicitation / src_ll_addr
+
+        # Create Ethernet layer
+        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
+
+        self.packet = ethernet / ip6
+        return self.packet
+
+
+class RaGenerator(object):
+    """Creates a custom Router Advertisement (RA) packet
+
+    Attributes:
+        packet: desired built custom packet
+        src_mac: MAC address (Layer 2) of the source node
+        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
+        src_ipv6: IPv6 address (Layer 3) of the source node
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: contains all the necessary packet parameters.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
+        else:
+            self.src_ipv6 = config_params["src_ipv6"]
+
+    def generate(
+        self, lifetime, enableDNS=False, dns_lifetime=0, ip_dst=None, eth_dst=None
+    ):
+        """Generates a Router Advertisement (RA) packet (ICMP over IPv6).
+
+        Args:
+            lifetime: RA lifetime
+            enableDNS: Add RDNSS option to RA (Optional)
+            dns_lifetime: Set DNS server lifetime (Optional)
+            ip_dst: IPv6 destination address (Optional)
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+        # Overwrite standard fields if desired
+        ip6_dst = ip_dst if ip_dst is not None else RA_IP
+        hw_dst = eth_dst if eth_dst is not None else RA_MAC
+
+        # Create IPv6 layer
+        base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
+        router_solicitation = scapy.ICMPv6ND_RA(routerlifetime=lifetime)
+        src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac)
+        prefix = scapy.ICMPv6NDOptPrefixInfo(prefixlen=RA_PREFIX_LEN, prefix=RA_PREFIX)
+        if enableDNS:
+            rndss = scapy.ICMPv6NDOptRDNSS(
+                lifetime=dns_lifetime, dns=[self.src_ipv6], len=DNS_LEN
+            )
+            ip6 = base / router_solicitation / src_ll_addr / prefix / rndss
+        else:
+            ip6 = base / router_solicitation / src_ll_addr / prefix
+
+        # Create Ethernet layer
+        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
+
+        self.packet = ethernet / ip6
+        return self.packet
+
+
+class Ping6Generator(object):
+    """Creates a custom Ping v6 packet (i.e., ICMP over IPv6)
+
+    Attributes:
+        packet: desired built custom packet
+        src_mac: MAC address (Layer 2) of the source node
+        dst_mac: MAC address (Layer 2) of the destination node
+        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
+        src_ipv6: IPv6 address (Layer 3) of the source node
+        dst_ipv6: IPv6 address (Layer 3) of the destination node
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: contains all the necessary packet parameters.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+        self.dst_ipv6 = config_params["dst_ipv6"]
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
+        else:
+            self.src_ipv6 = config_params["src_ipv6"]
+
+    def generate(self, ip_dst=None, eth_dst=None):
+        """Generates a Ping6 packet (i.e., Echo Request)
+
+        Args:
+            ip_dst: IPv6 destination address (Optional)
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+        # Overwrite standard fields if desired
+        ip6_dst = ip_dst if ip_dst is not None else self.dst_ipv6
+        hw_dst = eth_dst if eth_dst is not None else self.dst_mac
+
+        # Create IPv6 layer
+        base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
+        echo_request = scapy.ICMPv6EchoRequest(data=PING6_DATA)
+
+        ip6 = base / echo_request
+
+        # Create Ethernet layer
+        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
+
+        self.packet = ethernet / ip6
+        return self.packet
+
+
+class Ping4Generator(object):
+    """Creates a custom Ping v4 packet (i.e., ICMP over IPv4)
+
+    Attributes:
+        packet: desired built custom packet
+        src_mac: MAC address (Layer 2) of the source node
+        dst_mac: MAC address (Layer 2) of the destination node
+        src_ipv4: IPv4 address (Layer 3) of the source node
+        dst_ipv4: IPv4 address (Layer 3) of the destination node
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: contains all the necessary packet parameters.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+        self.dst_ipv4 = config_params["dst_ipv4"]
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_ipv4 = scapy.get_if_addr(interf)
+        else:
+            self.src_ipv4 = config_params["src_ipv4"]
+
+    def generate(self, ip_dst=None, eth_dst=None):
+        """Generates a Ping4 packet (i.e., Echo Request)
+
+        Args:
+            ip_dst: IP destination address (Optional)
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+
+        # Overwrite standard fields if desired
+        sta_ip = ip_dst if ip_dst is not None else self.dst_ipv4
+        sta_hw = eth_dst if eth_dst is not None else self.dst_mac
+
+        # Create IPv6 layer
+        base = scapy.IP(src=self.src_ipv4, dst=sta_ip)
+        echo_request = scapy.ICMP(type=PING4_TYPE)
+
+        ip4 = base / echo_request
+
+        # Create Ethernet layer
+        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
+
+        self.packet = ethernet / ip4
+        return self.packet
+
+
+class Mdns6Generator(object):
+    """Creates a custom mDNS IPv6 packet
+
+    Attributes:
+        packet: desired built custom packet
+        src_mac: MAC address (Layer 2) of the source node
+        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
+        src_ipv6: IPv6 address (Layer 3) of the source node
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: contains all the necessary packet parameters.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+        self.src_ipv6_type = config_params["src_ipv6_type"]
+        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
+        else:
+            self.src_ipv6 = config_params["src_ipv6"]
+
+    def generate(self, ip_dst=None, eth_dst=None):
+        """Generates a mDNS v6 packet for multicast DNS config
+
+        Args:
+            ip_dst: IPv6 destination address (Optional)
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+
+        # Overwrite standard fields if desired
+        sta_ip = ip_dst if ip_dst is not None else MDNS_V6_IP_DST
+        sta_hw = eth_dst if eth_dst is not None else MDNS_V6_MAC_DST
+
+        # Create mDNS layer
+        qdServer = scapy.DNSQR(qname=self.src_ipv6, qtype=MDNS_QTYPE)
+        mDNS = scapy.DNS(rd=MDNS_RECURSIVE, qd=qdServer)
+
+        # Create UDP
+        udp = scapy.UDP(sport=MDNS_UDP_PORT, dport=MDNS_UDP_PORT)
+
+        # Create IP layer
+        ip6 = scapy.IPv6(src=self.src_ipv6, dst=sta_ip)
+
+        # Create Ethernet layer
+        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
+
+        self.packet = ethernet / ip6 / udp / mDNS
+        return self.packet
+
+
+class Mdns4Generator(object):
+    """Creates a custom mDNS v4 packet
+
+    Attributes:
+        packet: desired built custom packet
+        src_mac: MAC address (Layer 2) of the source node
+        src_ipv4: IPv4 address (Layer 3) of the source node
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: contains all the necessary packet parameters.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_ipv4 = scapy.get_if_addr(interf)
+        else:
+            self.src_ipv4 = config_params["src_ipv4"]
+
+    def generate(self, ip_dst=None, eth_dst=None):
+        """Generates a mDNS v4 packet for multicast DNS config
+
+        Args:
+            ip_dst: IP destination address (Optional)
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+
+        # Overwrite standard fields if desired
+        sta_ip = ip_dst if ip_dst is not None else MDNS_V4_IP_DST
+        sta_hw = eth_dst if eth_dst is not None else MDNS_V4_MAC_DST
+
+        # Create mDNS layer
+        qdServer = scapy.DNSQR(qname=self.src_ipv4, qtype=MDNS_QTYPE)
+        mDNS = scapy.DNS(rd=MDNS_RECURSIVE, qd=qdServer)
+
+        # Create UDP
+        udp = scapy.UDP(sport=MDNS_UDP_PORT, dport=MDNS_UDP_PORT)
+
+        # Create IP layer
+        ip4 = scapy.IP(src=self.src_ipv4, dst=sta_ip, ttl=255)
+
+        # Create Ethernet layer
+        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
+
+        self.packet = ethernet / ip4 / udp / mDNS
+        return self.packet
+
+
+class Dot3Generator(object):
+    """Creates a custom 802.3 Ethernet Frame
+
+    Attributes:
+        packet: desired built custom packet
+        src_mac: MAC address (Layer 2) of the source node
+        src_ipv4: IPv4 address (Layer 3) of the source node
+    """
+
+    def __init__(self, **config_params):
+        """Initialize the class with the required network and packet params.
+
+        Args:
+            config_params: contains all the necessary packet parameters.
+              Some fields can be generated automatically. For example:
+              {'subnet_mask': '255.255.255.0',
+               'dst_ipv4': '192.168.1.3',
+               'src_ipv4: 'get_local', ...
+              The key can also be 'get_local' which means the code will read
+              and use the local interface parameters
+        """
+        interf = config_params["interf"]
+        self.packet = None
+        self.dst_mac = config_params["dst_mac"]
+        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
+            self.src_mac = scapy.get_if_hwaddr(interf)
+        else:
+            self.src_mac = config_params["src_mac"]
+
+    def _build_ether(self, eth_dst=None):
+        """Creates the basic frame for 802.3
+
+        Args:
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+        # Overwrite standard fields if desired
+        sta_hw = eth_dst if eth_dst is not None else self.dst_mac
+        # Create Ethernet layer
+        dot3_base = scapy.Dot3(src=self.src_mac, dst=sta_hw)
+
+        return dot3_base
+
+    def _pad_frame(self, frame):
+        """Pads the frame with default length and values
+
+        Args:
+            frame: Ethernet (layer 2) to be padded
+        """
+        frame.len = PAD_LEN_BYTES
+        pad = scapy.Padding()
+        pad.load = "\x00" * PAD_LEN_BYTES
+        return frame / pad
+
+    def generate(self, eth_dst=None):
+        """Generates the basic 802.3 frame and adds padding
+
+        Args:
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+        """
+        # Create 802.3 Base
+        ethernet = self._build_ether(eth_dst)
+
+        self.packet = self._pad_frame(ethernet)
+        return self.packet
+
+    def generate_llc(self, eth_dst=None, dsap=2, ssap=3, ctrl=LLC_XID_CONTROL):
+        """Generates the 802.3 frame with LLC and adds padding
+
+        Args:
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+            dsap: Destination Service Access Point (Optional)
+            ssap: Source Service Access Point (Optional)
+            ctrl: Control (Optional)
+        """
+        # Create 802.3 Base
+        ethernet = self._build_ether(eth_dst)
+
+        # Create LLC layer
+        llc = scapy.LLC(dsap=dsap, ssap=ssap, ctrl=ctrl)
+
+        # Append and create packet
+        self.packet = self._pad_frame(ethernet / llc)
+        return self.packet
+
+    def generate_snap(
+        self,
+        eth_dst=None,
+        dsap=SNAP_DSAP,
+        ssap=SNAP_SSAP,
+        ctrl=SNAP_CTRL,
+        oui=SNAP_OUI,
+        code=ETH_TYPE_IP,
+    ):
+        """Generates the 802.3 frame with LLC and SNAP and adds padding
+
+        Args:
+            eth_dst: Ethernet (layer 2) destination address (Optional)
+            dsap: Destination Service Access Point (Optional)
+            ssap: Source Service Access Point (Optional)
+            ctrl: Control (Optional)
+            oid: Protocol Id or Org Code (Optional)
+            code: EtherType (Optional)
+        """
+        # Create 802.3 Base
+        ethernet = self._build_ether(eth_dst)
+
+        # Create 802.2 LLC header
+        llc = scapy.LLC(dsap=dsap, ssap=ssap, ctrl=ctrl)
+
+        # Create 802.3 SNAP header
+        snap = scapy.SNAP(OUI=oui, code=code)
+
+        # Append and create packet
+        self.packet = self._pad_frame(ethernet / llc / snap)
+        return self.packet
+
+
+def get_if_addr6(intf, address_type):
+    """Returns the Ipv6 address from a given local interface.
+
+    Returns the desired IPv6 address from the interface 'intf' in human
+    readable form. The address type is indicated by the IPv6 constants like
+    IPV6_ADDR_LINKLOCAL, IPV6_ADDR_GLOBAL, etc. If no address is found,
+    None is returned.
+
+    Args:
+        intf: desired interface name
+        address_type: addrees typle like LINKLOCAL or GLOBAL
+
+    Returns:
+        Ipv6 address of the specified interface in human readable format
+    """
+    for if_list in scapy.in6_getifaddr():
+        if if_list[2] == intf and if_list[1] == address_type:
+            return if_list[0]
+
+    return None
diff --git a/packages/antlion/controllers/pdu.py b/packages/antlion/controllers/pdu.py
new file mode 100644
index 0000000..c623225
--- /dev/null
+++ b/packages/antlion/controllers/pdu.py
@@ -0,0 +1,294 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import enum
+import logging
+import time
+from enum import IntEnum, unique
+from typing import Protocol
+
+from antlion.types import ControllerConfig, Json
+from antlion.validation import MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME = "PduDevice"
+ACTS_CONTROLLER_REFERENCE_NAME = "pdu_devices"
+
+# Allow time for capacitors to discharge.
+DEFAULT_REBOOT_DELAY_SEC = 5.0
+
+
+class PduType(enum.StrEnum):
+    NP02B = "synaccess.np02b"
+    WEBPOWERSWITCH = "digital_loggers.webpowerswitch"
+
+
+class PduError(Exception):
+    """An exception for use within PduDevice implementations"""
+
+
+def create(configs: list[ControllerConfig]) -> list[PduDevice]:
+    """Creates a PduDevice for each config in configs.
+
+    Args:
+        configs: List of configs from PduDevice field.
+            Fields:
+                device: a string "<brand>.<model>" that corresponds to module
+                    in pdu_lib/
+                host: a string of the device ip address
+                username (optional): a string of the username for device sign-in
+                password (optional): a string of the password for device sign-in
+    Return:
+        A list of PduDevice objects.
+    """
+    pdus: list[PduDevice] = []
+    for config in configs:
+        c = MapValidator(config)
+        device = c.get(str, "device")
+        pduType = PduType(device)
+
+        host = c.get(str, "host")
+        username = c.get(str, "username", None)
+        password = c.get(str, "password", None)
+
+        match pduType:
+            case PduType.NP02B:
+                from antlion.controllers.pdu_lib.synaccess.np02b import (
+                    PduDevice as NP02B,
+                )
+
+                pdus.append(NP02B(host, username, password))
+            case PduType.WEBPOWERSWITCH:
+                from antlion.controllers.pdu_lib.digital_loggers.webpowerswitch import (
+                    PduDevice as WebPowerSwitch,
+                )
+
+                pdus.append(WebPowerSwitch(host, username, password))
+    return pdus
+
+
+def destroy(pdu_list: list[PduDevice]) -> None:
+    """Ensure any connections to devices are closed.
+
+    Args:
+        pdu_list: A list of PduDevice objects.
+    """
+    for pdu in pdu_list:
+        pdu.close()
+
+
+def get_info(pdu_list: list[PduDevice]) -> list[dict[str, str | None]]:
+    """Retrieves info from a list of PduDevice objects.
+
+    Args:
+        pdu_list: A list of PduDevice objects.
+    Return:
+        A list containing a dictionary for each PduDevice, with keys:
+            'host': a string of the device ip address
+            'username': a string of the username
+            'password': a string of the password
+    """
+    info = []
+    for pdu in pdu_list:
+        info.append(
+            {"host": pdu.host, "username": pdu.username, "password": pdu.password}
+        )
+    return info
+
+
+def get_pdu_port_for_device(
+    device_pdu_config: dict[str, Json], pdus: list[PduDevice]
+) -> tuple[PduDevice, int]:
+    """Retrieves the pdu object and port of that PDU powering a given device.
+    This is especially necessary when there are multilpe devices on a single PDU
+    or multiple PDUs registered.
+
+    Args:
+        device_pdu_config: a dict, representing the config of the device.
+        pdus: a list of registered PduDevice objects.
+
+    Returns:
+        A tuple: (PduObject for the device, string port number on that PDU).
+
+    Raises:
+        ValueError, if there is no PDU matching the given host in the config.
+
+    Example ACTS config:
+        ...
+        "testbed": [
+            ...
+            "FuchsiaDevice": [
+                {
+                    "ip": "<device_ip>",
+                    "ssh_config": "/path/to/sshconfig",
+                    "PduDevice": {
+                        "host": "192.168.42.185",
+                        "port": 2
+                    }
+                }
+            ],
+            "AccessPoint": [
+                {
+                    "ssh_config": {
+                        ...
+                    },
+                    "PduDevice": {
+                        "host": "192.168.42.185",
+                        "port" 1
+                    }
+                }
+            ],
+            "PduDevice": [
+                {
+                    "device": "synaccess.np02b",
+                    "host": "192.168.42.185"
+                }
+            ]
+        ],
+        ...
+    """
+    config = MapValidator(device_pdu_config)
+    pdu_ip = config.get(str, "host")
+    port = config.get(int, "port")
+    for pdu in pdus:
+        if pdu.host == pdu_ip:
+            return pdu, port
+    raise ValueError(f"No PduDevice with host: {pdu_ip}")
+
+
+class PDU(Protocol):
+    """Control power delivery to a device with a PDU."""
+
+    def port(self, index: int) -> Port:
+        """Access a single port.
+
+        Args:
+            index: Index of the port, likely the number identifier above the outlet.
+
+        Returns:
+            Controller for the specified port.
+        """
+        ...
+
+    def __len__(self) -> int:
+        """Count the number of ports.
+
+        Returns:
+            Number of ports on this PDU.
+        """
+        ...
+
+
+class Port(Protocol):
+    """Controlling the power delivery to a single port of a PDU."""
+
+    def status(self) -> PowerState:
+        """Return the power state for this port.
+
+        Returns:
+            Power state
+        """
+        ...
+
+    def set(self, state: PowerState) -> None:
+        """Set the power state for this port.
+
+        Args:
+            state: Desired power state
+        """
+        ...
+
+    def reboot(self, delay_sec: float = DEFAULT_REBOOT_DELAY_SEC) -> None:
+        """Set the power state OFF then ON after a delay.
+
+        Args:
+            delay_sec: Length to wait before turning back ON. This is important to allow
+                the device's capacitors to discharge.
+        """
+        self.set(PowerState.OFF)
+        time.sleep(delay_sec)
+        self.set(PowerState.ON)
+
+
+@unique
+class PowerState(IntEnum):
+    OFF = 0
+    ON = 1
+
+
+class PduDevice(object):
+    """An object that defines the basic Pdu functionality and abstracts
+    the actual hardware.
+
+    This is a pure abstract class. Implementations should be of the same
+    class name (eg. class PduDevice(pdu.PduDevice)) and exist in
+    pdu_lib/<brand>/<device_name>.py. PduDevice objects should not be
+    instantiated by users directly.
+
+    TODO(http://b/318877544): Replace PduDevice with PDU
+    """
+
+    def __init__(self, host: str, username: str | None, password: str | None) -> None:
+        if type(self) is PduDevice:
+            raise NotImplementedError("Base class: cannot be instantiated directly")
+        self.host = host
+        self.username = username
+        self.password = password
+        self.log = logging.getLogger()
+
+    def on_all(self) -> None:
+        """Turns on all outlets on the device."""
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def off_all(self) -> None:
+        """Turns off all outlets on the device."""
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def on(self, outlet: int) -> None:
+        """Turns on specific outlet on the device.
+        Args:
+            outlet: index of the outlet to turn on.
+        """
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def off(self, outlet: int) -> None:
+        """Turns off specific outlet on the device.
+        Args:
+            outlet: index of the outlet to turn off.
+        """
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def reboot(self, outlet: int) -> None:
+        """Toggles a specific outlet on the device to off, then to on.
+        Args:
+            outlet: index of the outlet to reboot.
+        """
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def status(self) -> dict[str, bool]:
+        """Retrieves the status of the outlets on the device.
+
+        Return:
+            A dictionary matching outlet string to:
+                True: if outlet is On
+                False: if outlet is Off
+        """
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def close(self) -> None:
+        """Closes connection to the device."""
+        raise NotImplementedError("Base class: cannot be called directly")
diff --git a/src/antlion/controllers/pdu_lib/__init__.py b/packages/antlion/controllers/pdu_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/pdu_lib/__init__.py
rename to packages/antlion/controllers/pdu_lib/__init__.py
diff --git a/src/antlion/controllers/pdu_lib/digital_loggers/__init__.py b/packages/antlion/controllers/pdu_lib/digital_loggers/__init__.py
similarity index 100%
rename from src/antlion/controllers/pdu_lib/digital_loggers/__init__.py
rename to packages/antlion/controllers/pdu_lib/digital_loggers/__init__.py
diff --git a/packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py b/packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
new file mode 100644
index 0000000..660e965
--- /dev/null
+++ b/packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mobly import signals
+
+from antlion.controllers import pdu
+
+# Create an optional dependency for dlipower since it has a transitive
+# dependency on beautifulsoup4. This library is difficult to maintain as a
+# third_party dependency in Fuchsia since it is hosted on launchpad.
+#
+# TODO(b/246999212): Explore alternatives to the dlipower package
+try:
+    import dlipower
+
+    HAS_IMPORT_DLIPOWER = True
+except ImportError:
+    HAS_IMPORT_DLIPOWER = False
+
+
+class PduDevice(pdu.PduDevice):
+    """Implementation of pure abstract PduDevice object for the Digital Loggers
+    WebPowerSwitch PDUs.
+
+    This controller supports the following Digital Loggers PDUs:
+        - Pro (VII)
+        - WebPowerSwitch V
+        - WebPowerSwitch IV
+        - WebPowerSwitch III
+        - WebPowerSwitch II
+        - Ethernet Power Controller III
+    """
+
+    def __init__(self, host: str, username: str | None, password: str | None) -> None:
+        """
+        Note: This may require allowing plaintext password sign in on the
+        power switch, which can be configure in the device's control panel.
+        """
+        super(PduDevice, self).__init__(host, username, password)
+
+        if not HAS_IMPORT_DLIPOWER:
+            raise signals.ControllerError(
+                "Digital Loggers PDUs are not supported with current installed "
+                "packages; install the dlipower package to add support"
+            )
+
+        self.power_switch = dlipower.PowerSwitch(
+            hostname=host, userid=username, password=password
+        )
+        # Connection is made at command execution, this verifies the device
+        # can be reached before continuing.
+        if not self.power_switch.statuslist():
+            raise pdu.PduError(
+                "Failed to connect get WebPowerSwitch status. Incorrect host, "
+                "userid, or password?"
+            )
+        else:
+            self.log.info(f"Connected to WebPowerSwitch ({host}).")
+
+    def on_all(self):
+        """Turn on power to all outlets."""
+        for outlet in self.power_switch:
+            outlet.on()
+            self._verify_state(outlet.name, "ON")
+
+    def off_all(self):
+        """Turn off power to all outlets."""
+        for outlet in self.power_switch:
+            outlet.off()
+            self._verify_state(outlet.name, "OFF")
+
+    def on(self, outlet):
+        """Turn on power to given outlet
+
+        Args:
+            outlet: string or int, the outlet name/number
+        """
+        self.power_switch.command_on_outlets("on", str(outlet))
+        self._verify_state(outlet, "ON")
+
+    def off(self, outlet):
+        """Turn off power to given outlet
+
+        Args:
+            outlet: string or int, the outlet name/number
+        """
+        self.power_switch.command_on_outlets("off", str(outlet))
+        self._verify_state(outlet, "OFF")
+
+    def reboot(self, outlet):
+        """Cycle the given outlet to OFF and back ON.
+
+        Args:
+            outlet: string or int, the outlet name/number
+        """
+        self.power_switch.command_on_outlets("cycle", str(outlet))
+        self._verify_state(outlet, "ON")
+
+    def status(self):
+        """Return the status of the switch outlets.
+
+        Return:
+            a dict mapping outlet string numbers to:
+                True if outlet is ON
+                False if outlet is OFF
+        """
+        status_list = self.power_switch.statuslist()
+        return {str(outlet): state == "ON" for outlet, _, state in status_list}
+
+    def close(self):
+        # Since there isn't a long-running connection, close is not applicable.
+        pass
+
+    def _verify_state(self, outlet, expected_state, timeout=3):
+        """Verify that the state of a given outlet is at an expected state.
+        There can be a slight delay in when the device receives the
+        command and when the state actually changes (especially when powering
+        on). This function is used to verify the change has occurred before
+        exiting.
+
+        Args:
+            outlet: string, the outlet name or number to check state.
+            expected_state: string, 'ON' or 'OFF'
+
+        Returns if actual state reaches expected state.
+
+        Raises:
+            PduError: if state has not reached expected state at timeout.
+        """
+        for _ in range(timeout):
+            actual_state = self.power_switch.status(str(outlet))
+            if actual_state == expected_state:
+                return
+            else:
+                self.log.debug(f"Outlet {outlet} not yet in state {expected_state}")
+        raise pdu.PduError(
+            "Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n"
+            "Expected State: %s\n"
+            "Actual State: %s" % (outlet, self.host, expected_state, actual_state)
+        )
diff --git a/src/antlion/controllers/pdu_lib/synaccess/__init__.py b/packages/antlion/controllers/pdu_lib/synaccess/__init__.py
similarity index 100%
rename from src/antlion/controllers/pdu_lib/synaccess/__init__.py
rename to packages/antlion/controllers/pdu_lib/synaccess/__init__.py
diff --git a/packages/antlion/controllers/pdu_lib/synaccess/np02b.py b/packages/antlion/controllers/pdu_lib/synaccess/np02b.py
new file mode 100644
index 0000000..d977a2a
--- /dev/null
+++ b/packages/antlion/controllers/pdu_lib/synaccess/np02b.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+import urllib.parse
+import urllib.request
+from dataclasses import dataclass
+from enum import StrEnum, unique
+from typing import Protocol
+
+from mobly import signals
+from mobly.logger import PrefixLoggerAdapter
+
+from antlion.controllers import pdu
+
+
+class PduDevice(pdu.PduDevice):
+    """Implementation of pure abstract PduDevice object for the Synaccess np02b
+    Pdu.
+
+    TODO(http://b/318877544): Replace with NP02B
+    """
+
+    def __init__(self, host: str, username: str | None, password: str | None) -> None:
+        username = username or "admin"  # default username
+        password = password or "admin"  # default password
+        super().__init__(host, username, password)
+        self.np02b = NP02B(host, username, password)
+
+    def on_all(self) -> None:
+        for i in range(len(self.np02b)):
+            self.np02b.port(i).set(pdu.PowerState.ON)
+
+    def off_all(self) -> None:
+        for i in range(len(self.np02b)):
+            self.np02b.port(i).set(pdu.PowerState.OFF)
+
+    def on(self, outlet: int) -> None:
+        self.np02b.port(outlet).set(pdu.PowerState.ON)
+
+    def off(self, outlet: int) -> None:
+        self.np02b.port(outlet).set(pdu.PowerState.OFF)
+
+    def reboot(self, outlet: int) -> None:
+        self.np02b.port(outlet).reboot()
+
+    def status(self) -> dict[str, bool]:
+        """Returns the status of the np02b outlets.
+
+        Return:
+            Mapping of outlet index ('1' and '2') to true if ON, otherwise
+            false.
+        """
+        return {
+            "1": self.np02b.port(1).status() is pdu.PowerState.ON,
+            "2": self.np02b.port(2).status() is pdu.PowerState.ON,
+        }
+
+    def close(self) -> None:
+        """Ensure connection to device is closed.
+
+        In this implementation, this shouldn't be necessary, but could be in
+        others that open on creation.
+        """
+        return
+
+
+class NP02B(pdu.PDU):
+    """Controller for a Synaccess netBooter NP-02B.
+
+    See https://www.synaccess-net.com/np-02b
+    """
+
+    def __init__(self, host: str, username: str, password: str) -> None:
+        self.client = Client(host, username, password)
+
+    def port(self, index: int) -> pdu.Port:
+        return Port(self.client, index)
+
+    def __len__(self) -> int:
+        return 2
+
+
+class ParsePDUResponseError(signals.TestError):
+    """Error when the PDU returns an unexpected response."""
+
+
+class Client:
+    def __init__(self, host: str, user: str, password: str) -> None:
+        self._url = f"http://{host}/cmd.cgi"
+
+        password_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
+        password_manager.add_password(None, host, user, password)
+        auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
+        self._opener = urllib.request.build_opener(auth_handler)
+
+        self.log = PrefixLoggerAdapter(
+            logging.getLogger(),
+            {PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[pdu | {host}]"},
+        )
+
+    def request(self, command: Command) -> Response:
+        cmd = command.code()
+        args = command.args()
+        if args:
+            cmd += f' {" ".join(args)}'
+
+        url = f"{self._url}?{urllib.parse.quote_plus(cmd)}"
+        self.log.debug(f"Sending request {url}")
+
+        with self._opener.open(url) as res:
+            body = res.read().decode("utf-8")
+
+        self.log.debug(f"Received response: {body}")
+
+        # Syntax for the response should be in the form:
+        #    "<StatusCode>[,<PowerStatus>]"
+        # For example, StatusCommand returns "$A5,01" when Port 1 is ON and
+        # Port 2 is OFF.
+        try:
+            tokens = body.split(",", 1)
+            if len(tokens) == 0:
+                raise ParsePDUResponseError(f'Expected a response, found "{body}"')
+            code = tokens[0]
+            status_code = StatusCode(code)
+            power_status = PowerStatus(tokens[1]) if len(tokens) == 2 else None
+        except Exception as e:
+            raise ParsePDUResponseError(
+                f'Failed to parse response from "{body}"'
+            ) from e
+
+        return Response(status_code, power_status)
+
+
+class Port(pdu.Port):
+    def __init__(self, client: Client, port: int) -> None:
+        if port == 0:
+            raise TypeError("Invalid port index 0: ports are 1-indexed")
+        if port > 2:
+            raise TypeError(f"Invalid port index {port}: NP-02B only has 2 ports")
+
+        self.client = client
+        self.port = port
+
+    def status(self) -> pdu.PowerState:
+        resp = self.client.request(StatusCommand())
+        if resp.status != StatusCode.OK:
+            raise ParsePDUResponseError(
+                f"Expected PDU response to be {StatusCode.OK}, got {resp.status}"
+            )
+        if not resp.power:
+            raise ParsePDUResponseError(
+                "Expected PDU response to contain power, got None"
+            )
+        return resp.power.state(self.port)
+
+    def set(self, state: pdu.PowerState) -> None:
+        """Set the power state for this port on the PDU.
+
+        Args:
+            state: Desired power state
+        """
+        resp = self.client.request(SetCommand(self.port, state))
+        if resp.status != StatusCode.OK:
+            raise ParsePDUResponseError(
+                f"Expected PDU response to be {StatusCode.OK}, got {resp.status}"
+            )
+
+        # Verify the newly set power state.
+        status = self.status()
+        if status is not state:
+            raise ParsePDUResponseError(
+                f"Expected PDU port {self.port} to be {state}, got {status}"
+            )
+
+
+@dataclass
+class Response:
+    status: StatusCode
+    power: PowerStatus | None
+
+
+@unique
+class StatusCode(StrEnum):
+    OK = "$A0"
+    FAILED = "$AF"
+
+
+class Command(Protocol):
+    def code(self) -> str:
+        """Return the cmdCode for this command."""
+        ...
+
+    def args(self) -> list[str]:
+        """Return the list of arguments for this command."""
+        ...
+
+
+class PowerStatus:
+    """State of all ports"""
+
+    def __init__(self, states: str) -> None:
+        self.states: list[pdu.PowerState] = []
+        for state in states:
+            self.states.insert(0, pdu.PowerState(int(state)))
+
+    def ports(self) -> int:
+        return len(self.states)
+
+    def state(self, port: int) -> pdu.PowerState:
+        return self.states[port - 1]
+
+
+class SetCommand(Command):
+    def __init__(self, port: int, state: pdu.PowerState) -> None:
+        self.port = port
+        self.state = state
+
+    def code(self) -> str:
+        return "$A3"
+
+    def args(self) -> list[str]:
+        return [str(self.port), str(self.state)]
+
+
+class RebootCommand(Command):
+    def __init__(self, port: int) -> None:
+        self.port = port
+
+    def code(self) -> str:
+        return "$A4"
+
+    def args(self) -> list[str]:
+        return [str(self.port)]
+
+
+class StatusCommand(Command):
+    def code(self) -> str:
+        return "$A5"
+
+    def args(self) -> list[str]:
+        return []
+
+
+class SetAllCommand(Command):
+    def __init__(self, state: pdu.PowerState) -> None:
+        self.state = state
+
+    def code(self) -> str:
+        return "$A7"
+
+    def args(self) -> list[str]:
+        return [str(self.state)]
diff --git a/src/antlion/controllers/sl4a_lib/__init__.py b/packages/antlion/controllers/sl4a_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/sl4a_lib/__init__.py
rename to packages/antlion/controllers/sl4a_lib/__init__.py
diff --git a/packages/antlion/controllers/sl4a_lib/error_reporter.py b/packages/antlion/controllers/sl4a_lib/error_reporter.py
new file mode 100644
index 0000000..0829d01
--- /dev/null
+++ b/packages/antlion/controllers/sl4a_lib/error_reporter.py
@@ -0,0 +1,243 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+import threading
+import time
+
+from antlion import utils
+
+
+class ErrorLogger(logging.LoggerAdapter):
+    """A logger for a given error report."""
+
+    def __init__(self, label):
+        self.label = label
+        super(ErrorLogger, self).__init__(logging.getLogger(), {})
+
+    def process(self, msg, kwargs):
+        """Transforms a log message to be in a given format."""
+        return f"[Error Report|{self.label}] {msg}", kwargs
+
+
+class ErrorReporter(object):
+    """A class that reports errors and diagnoses possible points of failure.
+
+    Attributes:
+        max_reports: The maximum number of reports that should be reported.
+            Defaulted to 1 to prevent multiple reports from reporting at the
+            same time over one another.
+        name: The name of the report to be used in the error logs.
+    """
+
+    def __init__(self, name, max_reports=1):
+        """Creates an error report.
+
+        Args:
+            name: The name of the error report.
+            max_reports: Sets the maximum number of reports to this value.
+        """
+        self.name = name
+        self.max_reports = max_reports
+        self._ticket_number = 0
+        self._ticket_lock = threading.Lock()
+        self._current_request_count = 0
+        self._accept_requests = True
+
+    def create_error_report(self, sl4a_manager, sl4a_session, rpc_connection):
+        """Creates an error report, if possible.
+
+        Returns:
+            False iff a report cannot be created.
+        """
+        if not self._accept_requests:
+            return False
+
+        self._current_request_count += 1
+
+        try:
+            ticket = self._get_report_ticket()
+            if not ticket:
+                return False
+
+            report = ErrorLogger(f"{self.name}|{ticket}")
+            report.info("Creating error report.")
+
+            (
+                self.report_on_adb(sl4a_manager.adb, report)
+                and self.report_device_processes(sl4a_manager.adb, report)
+                and self.report_sl4a_state(rpc_connection, sl4a_manager.adb, report)
+                and self.report_sl4a_session(sl4a_manager, sl4a_session, report)
+            )
+
+            return True
+        finally:
+            self._current_request_count -= 1
+
+    def report_on_adb(self, adb, report):
+        """Creates an error report for ADB. Returns false if ADB has failed."""
+        adb_uptime = utils.get_command_uptime('"adb .* server"')
+        if adb_uptime:
+            report.info(
+                f"The adb daemon has an uptime of {adb_uptime} ([[dd-]hh:]mm:ss)."
+            )
+        else:
+            report.warning(
+                "The adb daemon (on the host machine) is not "
+                "running. All forwarded ports have been removed."
+            )
+            return False
+
+        devices_output = adb.devices()
+        if adb.serial not in devices_output:
+            report.warning(
+                "This device cannot be found by ADB. The device may have shut "
+                "down or disconnected."
+            )
+            return False
+        elif re.findall(r"%s\s+offline" % adb.serial, devices_output):
+            report.warning(
+                "The device is marked as offline in ADB. We are no longer able "
+                "to access the device."
+            )
+            return False
+        else:
+            report.info("The device is online and accessible through ADB calls.")
+        return True
+
+    def report_device_processes(self, adb, report):
+        """Creates an error report for the device's required processes.
+
+        Returns:
+            False iff user-apks cannot be communicated with over tcp.
+        """
+        zygote_uptime = utils.get_device_process_uptime(adb, "zygote")
+        if zygote_uptime:
+            report.info(
+                "Zygote has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % zygote_uptime
+            )
+        else:
+            report.warning(
+                "Zygote has been killed. It is likely the Android Runtime has "
+                "crashed. Check the bugreport/logcat for more information."
+            )
+            return False
+
+        netd_uptime = utils.get_device_process_uptime(adb, "netd")
+        if netd_uptime:
+            report.info(
+                "Netd has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % zygote_uptime
+            )
+        else:
+            report.warning(
+                "Netd has been killed. The Android Runtime may have crashed. "
+                "Check the bugreport/logcat for more information."
+            )
+            return False
+
+        adbd_uptime = utils.get_device_process_uptime(adb, "adbd")
+        if netd_uptime:
+            report.info(
+                "Adbd has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is low, the phone may have recently crashed." % adbd_uptime
+            )
+        else:
+            report.warning("Adbd is not running.")
+            return False
+        return True
+
+    def report_sl4a_state(self, rpc_connection, adb, report):
+        """Creates an error report for the state of SL4A."""
+        report.info(f"Diagnosing Failure over connection {rpc_connection.ports}.")
+
+        ports = rpc_connection.ports
+        forwarded_ports_output = adb.forward("--list")
+
+        expected_output = "%s tcp:%s tcp:%s" % (
+            adb.serial,
+            ports.forwarded_port,
+            ports.server_port,
+        )
+        if expected_output not in forwarded_ports_output:
+            formatted_output = re.sub(
+                "^", "    ", forwarded_ports_output, flags=re.MULTILINE
+            )
+            report.warning(
+                "The forwarded port for the failed RpcConnection is missing.\n"
+                "Expected:\n    %s\nBut found:\n%s"
+                % (expected_output, formatted_output)
+            )
+            return False
+        else:
+            report.info(
+                "The connection port has been properly forwarded to " "the device."
+            )
+
+        sl4a_uptime = utils.get_device_process_uptime(
+            adb, "com.googlecode.android_scripting"
+        )
+        if sl4a_uptime:
+            report.info(
+                "SL4A has been running for %s ([[dd-]hh:]mm:ss). If this "
+                "value is lower than the test case, it must have been "
+                "restarted during the test." % sl4a_uptime
+            )
+        else:
+            report.warning(
+                "The SL4A scripting service is not running. SL4A may have "
+                "crashed, or have been terminated by the Android Runtime."
+            )
+            return False
+        return True
+
+    def report_sl4a_session(self, sl4a_manager, session, report):
+        """Reports the state of an SL4A session."""
+        if session.server_port not in sl4a_manager.sl4a_ports_in_use:
+            report.warning(
+                "SL4A server port %s not found in set of open "
+                "ports %s" % (session.server_port, sl4a_manager.sl4a_ports_in_use)
+            )
+            return False
+
+        if session not in sl4a_manager.sessions.values():
+            report.warning(
+                "SL4A session %s over port %s is not managed by "
+                "the SL4A Manager. This session is already dead."
+                % (session.uid, session.server_port)
+            )
+            return False
+        return True
+
+    def finalize_reports(self):
+        self._accept_requests = False
+        while self._current_request_count > 0:
+            # Wait for other threads to finish.
+            time.sleep(0.1)
+
+    def _get_report_ticket(self):
+        """Returns the next ticket, or none if all tickets have been used."""
+        logging.debug("Getting ticket for SL4A error report.")
+        with self._ticket_lock:
+            self._ticket_number += 1
+            ticket_number = self._ticket_number
+
+        if ticket_number <= self.max_reports:
+            return ticket_number
+        else:
+            return None
diff --git a/packages/antlion/controllers/sl4a_lib/event_dispatcher.py b/packages/antlion/controllers/sl4a_lib/event_dispatcher.py
new file mode 100644
index 0000000..826bf1d
--- /dev/null
+++ b/packages/antlion/controllers/sl4a_lib/event_dispatcher.py
@@ -0,0 +1,480 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import queue
+import re
+import threading
+import time
+from concurrent.futures import ThreadPoolExecutor
+
+from mobly import logger
+
+from antlion.controllers.sl4a_lib import rpc_client
+
+
+class EventDispatcherError(Exception):
+    """The base class for all EventDispatcher exceptions."""
+
+
+class IllegalStateError(EventDispatcherError):
+    """Raise when user tries to put event_dispatcher into an illegal state."""
+
+
+class DuplicateError(EventDispatcherError):
+    """Raise when two event handlers have been assigned to an event name."""
+
+
+class EventDispatcher:
+    """A class for managing the events for an SL4A Session.
+
+    Attributes:
+        _serial: The serial of the device.
+        _rpc_client: The rpc client for that session.
+        _started: A bool that holds whether or not the event dispatcher is
+                  running.
+        _executor: The thread pool executor for running event handlers and
+                   polling.
+        _event_dict: A dictionary of str eventName = Queue<Event> eventQueue
+        _handlers: A dictionary of str eventName => (lambda, args) handler
+        _lock: A lock that prevents multiple reads/writes to the event queues.
+        log: The EventDispatcher's logger.
+    """
+
+    DEFAULT_TIMEOUT = 60
+
+    def __init__(self, serial, rpc_client):
+        self._serial = serial
+        self._rpc_client = rpc_client
+        self._started = False
+        self._executor = None
+        self._event_dict = {}
+        self._handlers = {}
+        self._lock = threading.RLock()
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[E Dispatcher|{self._serial}|{self._rpc_client.uid}]",
+            },
+        )
+
+    def poll_events(self):
+        """Continuously polls all types of events from sl4a.
+
+        Events are sorted by name and store in separate queues.
+        If there are registered handlers, the handlers will be called with
+        corresponding event immediately upon event discovery, and the event
+        won't be stored. If exceptions occur, stop the dispatcher and return
+        """
+        while self._started:
+            try:
+                # 60000 in ms, timeout in second
+                event_obj = self._rpc_client.eventWait(60000, timeout=120)
+            except rpc_client.Sl4aConnectionError as e:
+                if self._rpc_client.is_alive:
+                    self.log.warning("Closing due to closed session.")
+                    break
+                else:
+                    self.log.warning(f"Closing due to error: {e}.")
+                    self.close()
+                    raise e
+            if not event_obj:
+                continue
+            elif "name" not in event_obj:
+                self.log.error(f"Received Malformed event {event_obj}")
+                continue
+            else:
+                event_name = event_obj["name"]
+            # if handler registered, process event
+            if event_name == "EventDispatcherShutdown":
+                self.log.debug("Received shutdown signal.")
+                # closeSl4aSession has been called, which closes the event
+                # dispatcher. Stop execution on this polling thread.
+                return
+            if event_name in self._handlers:
+                self.log.debug(
+                    "Using handler %s for event: %r"
+                    % (self._handlers[event_name].__name__, event_obj)
+                )
+                self.handle_subscribed_event(event_obj, event_name)
+            else:
+                self.log.debug(f"Queuing event: {event_obj!r}")
+                self._lock.acquire()
+                if event_name in self._event_dict:  # otherwise, cache event
+                    self._event_dict[event_name].put(event_obj)
+                else:
+                    q = queue.Queue()
+                    q.put(event_obj)
+                    self._event_dict[event_name] = q
+                self._lock.release()
+
+    def register_handler(self, handler, event_name, args):
+        """Registers an event handler.
+
+        One type of event can only have one event handler associated with it.
+
+        Args:
+            handler: The event handler function to be registered.
+            event_name: Name of the event the handler is for.
+            args: User arguments to be passed to the handler when it's called.
+
+        Raises:
+            IllegalStateError: Raised if attempts to register a handler after
+                the dispatcher starts running.
+            DuplicateError: Raised if attempts to register more than one
+                handler for one type of event.
+        """
+        if self._started:
+            raise IllegalStateError(
+                "Cannot register service after polling is " "started."
+            )
+        self._lock.acquire()
+        try:
+            if event_name in self._handlers:
+                raise DuplicateError(f"A handler for {event_name} already exists")
+            self._handlers[event_name] = (handler, args)
+        finally:
+            self._lock.release()
+
+    def start(self):
+        """Starts the event dispatcher.
+
+        Initiates executor and start polling events.
+
+        Raises:
+            IllegalStateError: Can't start a dispatcher again when it's already
+                running.
+        """
+        if not self._started:
+            self._started = True
+            self._executor = ThreadPoolExecutor(max_workers=32)
+            self._executor.submit(self.poll_events)
+        else:
+            raise IllegalStateError("Dispatcher is already started.")
+
+    def close(self):
+        """Clean up and release resources.
+
+        This function should only be called after a
+        rpc_client.closeSl4aSession() call.
+        """
+        if not self._started:
+            return
+        self._started = False
+        self._executor.shutdown(wait=True)
+        self.clear_all_events()
+
+    def pop_event(self, event_name, timeout=DEFAULT_TIMEOUT):
+        """Pop an event from its queue.
+
+        Return and remove the oldest entry of an event.
+        Block until an event of specified name is available or
+        times out if timeout is set.
+
+        Args:
+            event_name: Name of the event to be popped.
+            timeout: Number of seconds to wait when event is not present.
+                Never times out if None.
+
+        Returns:
+            event: The oldest entry of the specified event. None if timed out.
+
+        Raises:
+            IllegalStateError: Raised if pop is called before the dispatcher
+                starts polling.
+        """
+        if not self._started:
+            raise IllegalStateError("Dispatcher needs to be started before popping.")
+
+        e_queue = self.get_event_q(event_name)
+
+        if not e_queue:
+            raise IllegalStateError(f"Failed to get an event queue for {event_name}")
+
+        try:
+            # Block for timeout
+            if timeout:
+                return e_queue.get(True, timeout)
+            # Non-blocking poll for event
+            elif timeout == 0:
+                return e_queue.get(False)
+            else:
+                # Block forever on event wait
+                return e_queue.get(True)
+        except queue.Empty:
+            msg = f"Timeout after {timeout}s waiting for event: {event_name}"
+            self.log.info(msg)
+            raise queue.Empty(msg)
+
+    def wait_for_event(
+        self, event_name, predicate, timeout=DEFAULT_TIMEOUT, *args, **kwargs
+    ):
+        """Wait for an event that satisfies a predicate to appear.
+
+        Continuously pop events of a particular name and check against the
+        predicate until an event that satisfies the predicate is popped or
+        timed out. Note this will remove all the events of the same name that
+        do not satisfy the predicate in the process.
+
+        Args:
+            event_name: Name of the event to be popped.
+            predicate: A function that takes an event and returns True if the
+                predicate is satisfied, False otherwise.
+            timeout: Number of seconds to wait.
+            *args: Optional positional args passed to predicate().
+            **kwargs: Optional keyword args passed to predicate().
+                consume_ignored_events: Whether or not to consume events while
+                    searching for the desired event. Defaults to True if unset.
+
+        Returns:
+            The event that satisfies the predicate.
+
+        Raises:
+            queue.Empty: Raised if no event that satisfies the predicate was
+                found before time out.
+        """
+        deadline = time.time() + timeout
+        ignored_events = []
+        consume_events = kwargs.pop("consume_ignored_events", True)
+        while True:
+            event = None
+            try:
+                event = self.pop_event(event_name, 1)
+                if consume_events:
+                    self.log.debug(f"Consuming event: {event!r}")
+                else:
+                    self.log.debug(f"Peeking at event: {event!r}")
+                    ignored_events.append(event)
+            except queue.Empty:
+                pass
+
+            if event and predicate(event, *args, **kwargs):
+                for ignored_event in ignored_events:
+                    self.get_event_q(event_name).put(ignored_event)
+                self.log.debug(f"Matched event: {event!r} with {predicate.__name__}")
+                return event
+
+            if time.time() > deadline:
+                for ignored_event in ignored_events:
+                    self.get_event_q(event_name).put(ignored_event)
+                msg = f"Timeout after {timeout}s waiting for event: {event_name}"
+                self.log.info(msg)
+                raise queue.Empty(msg)
+
+    def pop_events(self, regex_pattern, timeout, freq=1):
+        """Pop events whose names match a regex pattern.
+
+        If such event(s) exist, pop one event from each event queue that
+        satisfies the condition. Otherwise, wait for an event that satisfies
+        the condition to occur, with timeout.
+
+        Results are sorted by timestamp in ascending order.
+
+        Args:
+            regex_pattern: The regular expression pattern that an event name
+                should match in order to be popped.
+            timeout: Number of seconds to wait for events in case no event
+                matching the condition exits when the function is called.
+
+        Returns:
+            results: Pop events whose names match a regex pattern.
+                Empty if none exist and the wait timed out.
+
+        Raises:
+            IllegalStateError: Raised if pop is called before the dispatcher
+                starts polling.
+            queue.Empty: Raised if no event was found before time out.
+        """
+        if not self._started:
+            raise IllegalStateError("Dispatcher needs to be started before popping.")
+        deadline = time.time() + timeout
+        while True:
+            # TODO: fix the sleep loop
+            results = self._match_and_pop(regex_pattern)
+            if len(results) != 0 or time.time() > deadline:
+                break
+            time.sleep(freq)
+        if len(results) == 0:
+            msg = f"Timeout after {timeout}s waiting for event: {regex_pattern}"
+            self.log.error(msg)
+            raise queue.Empty(msg)
+
+        return sorted(results, key=lambda event: event["time"])
+
+    def _match_and_pop(self, regex_pattern):
+        """Pop one event from each of the event queues whose names
+        match (in a sense of regular expression) regex_pattern.
+        """
+        results = []
+        self._lock.acquire()
+        for name in self._event_dict.keys():
+            if re.match(regex_pattern, name):
+                q = self._event_dict[name]
+                if q:
+                    try:
+                        results.append(q.get(False))
+                    except queue.Empty:
+                        pass
+        self._lock.release()
+        return results
+
+    def get_event_q(self, event_name):
+        """Obtain the queue storing events of the specified name.
+
+        If no event of this name has been polled, wait for one to.
+
+        Returns: A queue storing all the events of the specified name.
+        """
+        self._lock.acquire()
+        if event_name not in self._event_dict or self._event_dict[event_name] is None:
+            self._event_dict[event_name] = queue.Queue()
+        self._lock.release()
+
+        event_queue = self._event_dict[event_name]
+        return event_queue
+
+    def handle_subscribed_event(self, event_obj, event_name):
+        """Execute the registered handler of an event.
+
+        Retrieve the handler and its arguments, and execute the handler in a
+            new thread.
+
+        Args:
+            event_obj: Json object of the event.
+            event_name: Name of the event to call handler for.
+        """
+        handler, args = self._handlers[event_name]
+        self._executor.submit(handler, event_obj, *args)
+
+    def _handle(
+        self, event_handler, event_name, user_args, event_timeout, cond, cond_timeout
+    ):
+        """Pop an event of specified type and calls its handler on it. If
+        condition is not None, block until condition is met or timeout.
+        """
+        if cond:
+            cond.wait(cond_timeout)
+        event = self.pop_event(event_name, event_timeout)
+        return event_handler(event, *user_args)
+
+    def handle_event(
+        self,
+        event_handler,
+        event_name,
+        user_args,
+        event_timeout=None,
+        cond=None,
+        cond_timeout=None,
+    ):
+        """Handle events that don't have registered handlers
+
+        In a new thread, poll one event of specified type from its queue and
+        execute its handler. If no such event exists, the thread waits until
+        one appears.
+
+        Args:
+            event_handler: Handler for the event, which should take at least
+                one argument - the event json object.
+            event_name: Name of the event to be handled.
+            user_args: User arguments for the handler; to be passed in after
+                the event json.
+            event_timeout: Number of seconds to wait for the event to come.
+            cond: A condition to wait on before executing the handler. Should
+                be a threading.Event object.
+            cond_timeout: Number of seconds to wait before the condition times
+                out. Never times out if None.
+
+        Returns:
+            worker: A concurrent.Future object associated with the handler.
+                If blocking call worker.result() is triggered, the handler
+                needs to return something to unblock.
+        """
+        worker = self._executor.submit(
+            self._handle,
+            event_handler,
+            event_name,
+            user_args,
+            event_timeout,
+            cond,
+            cond_timeout,
+        )
+        return worker
+
+    def pop_all(self, event_name):
+        """Return and remove all stored events of a specified name.
+
+        Pops all events from their queue. May miss the latest ones.
+        If no event is available, return immediately.
+
+        Args:
+            event_name: Name of the events to be popped.
+
+        Returns:
+           results: List of the desired events.
+
+        Raises:
+            IllegalStateError: Raised if pop is called before the dispatcher
+                starts polling.
+        """
+        if not self._started:
+            raise IllegalStateError(
+                ("Dispatcher needs to be started before " "popping.")
+            )
+        results = []
+        try:
+            self._lock.acquire()
+            while True:
+                e = self._event_dict[event_name].get(block=False)
+                results.append(e)
+        except (queue.Empty, KeyError):
+            return results
+        finally:
+            self._lock.release()
+
+    def clear_events(self, event_name):
+        """Clear all events of a particular name.
+
+        Args:
+            event_name: Name of the events to be popped.
+        """
+        self._lock.acquire()
+        try:
+            q = self.get_event_q(event_name)
+            q.queue.clear()
+        except queue.Empty:
+            return
+        finally:
+            self._lock.release()
+
+    def clear_all_events(self):
+        """Clear all event queues and their cached events."""
+        self._lock.acquire()
+        self._event_dict.clear()
+        self._lock.release()
+
+    def is_event_match(self, event, field, value):
+        return self.is_event_match_for_list(event, field, [value])
+
+    def is_event_match_for_list(self, event, field, value_list):
+        try:
+            value_in_event = event["data"][field]
+        except KeyError:
+            return False
+        for value in value_list:
+            if value_in_event == value:
+                return True
+        return False
diff --git a/packages/antlion/controllers/sl4a_lib/rpc_client.py b/packages/antlion/controllers/sl4a_lib/rpc_client.py
new file mode 100644
index 0000000..8dce0ae
--- /dev/null
+++ b/packages/antlion/controllers/sl4a_lib/rpc_client.py
@@ -0,0 +1,382 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import logging
+import socket
+import threading
+import time
+from concurrent import futures
+
+from mobly import logger
+
+from antlion import error
+
+# The default timeout value when no timeout is set.
+SOCKET_TIMEOUT = 60
+
+# The Session UID when a UID has not been received yet.
+UNKNOWN_UID = -1
+
+
+class Sl4aException(error.ActsError):
+    """The base class for all SL4A exceptions."""
+
+
+class Sl4aStartError(Sl4aException):
+    """Raised when sl4a is not able to be started."""
+
+
+class Sl4aApiError(Sl4aException):
+    """Raised when remote API reports an error.
+
+    This error mirrors the JSON-RPC 2.0 spec for Error Response objects.
+
+    Attributes:
+        code: The error code returned by SL4A. Not to be confused with
+            ActsError's error_code.
+        message: The error message returned by SL4A.
+        data: The extra data, if any, returned by SL4A.
+    """
+
+    def __init__(self, message, code=-1, data=None, rpc_name=""):
+        super().__init__()
+        self.message = message
+        self.code = code
+        if data is None:
+            self.data = {}
+        else:
+            self.data = data
+        self.rpc_name = rpc_name
+
+    def __str__(self):
+        if self.data:
+            return "Error in RPC %s %s:%s:%s" % (
+                self.rpc_name,
+                self.code,
+                self.message,
+                self.data,
+            )
+        else:
+            return f"Error in RPC {self.rpc_name} {self.code}:{self.message}"
+
+
+class Sl4aConnectionError(Sl4aException):
+    """An error raised upon failure to connect to SL4A."""
+
+
+class Sl4aProtocolError(Sl4aException):
+    """Raised when there an error in exchanging data with server on device."""
+
+    NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake."
+    NO_RESPONSE_FROM_SERVER = "No response from server."
+    MISMATCHED_API_ID = "Mismatched API id."
+
+
+class Sl4aNotInstalledError(Sl4aException):
+    """An error raised when an Sl4aClient is created without SL4A installed."""
+
+
+class Sl4aRpcTimeoutError(Sl4aException):
+    """An error raised when an SL4A RPC has timed out."""
+
+
+class RpcClient(object):
+    """An RPC client capable of processing multiple RPCs concurrently.
+
+    Attributes:
+        _free_connections: A list of all idle RpcConnections.
+        _working_connections: A list of all working RpcConnections.
+        _lock: A lock used for accessing critical memory.
+        max_connections: The maximum number of RpcConnections at a time.
+            Increasing or decreasing the number of max connections does NOT
+            modify the thread pool size being used for self.future RPC calls.
+        _log: The logger for this RpcClient.
+    """
+
+    """The default value for the maximum amount of connections for a client."""
+    DEFAULT_MAX_CONNECTION = 15
+
+    class AsyncClient(object):
+        """An object that allows RPC calls to be called asynchronously.
+
+        Attributes:
+            _rpc_client: The RpcClient to use when making calls.
+            _executor: The ThreadPoolExecutor used to keep track of workers
+        """
+
+        def __init__(self, rpc_client):
+            self._rpc_client = rpc_client
+            self._executor = futures.ThreadPoolExecutor(
+                max_workers=max(rpc_client.max_connections - 2, 1)
+            )
+
+        def rpc(self, name, *args, **kwargs):
+            future = self._executor.submit(name, *args, **kwargs)
+            return future
+
+        def __getattr__(self, name):
+            """Wrapper for python magic to turn method calls into RPC calls."""
+
+            def rpc_call(*args, **kwargs):
+                future = self._executor.submit(
+                    self._rpc_client.__getattr__(name), *args, **kwargs
+                )
+                return future
+
+            return rpc_call
+
+    def __init__(
+        self,
+        uid,
+        serial,
+        on_error_callback,
+        _create_connection_func,
+        max_connections=None,
+    ):
+        """Creates a new RpcClient object.
+
+        Args:
+            uid: The session uid this client is a part of.
+            serial: The serial of the Android device. Used for logging.
+            on_error_callback: A callback for when a connection error is raised.
+            _create_connection_func: A reference to the function that creates a
+                new session.
+            max_connections: The maximum number of connections the RpcClient
+                can have.
+        """
+        self._serial = serial
+        self.on_error = on_error_callback
+        self._create_connection_func = _create_connection_func
+        self._free_connections = [self._create_connection_func(uid)]
+
+        self.uid = self._free_connections[0].uid
+        self._lock = threading.Lock()
+
+        self._log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[RPC Service|{self._serial}|{self._uid}]",
+            },
+        )
+
+        self._working_connections = []
+        if max_connections is None:
+            self.max_connections = RpcClient.DEFAULT_MAX_CONNECTION
+        else:
+            self.max_connections = max_connections
+
+        self._async_client = RpcClient.AsyncClient(self)
+        self.is_alive = True
+
+    def terminate(self):
+        """Terminates all connections to the SL4A server."""
+        if len(self._working_connections) > 0:
+            self._log.warning(
+                "%s connections are still active, and waiting on "
+                "responses.Closing these connections now."
+                % len(self._working_connections)
+            )
+        connections = self._free_connections + self._working_connections
+        for connection in connections:
+            self._log.debug(f"Closing connection over ports {connection.ports}")
+            connection.close()
+        self._free_connections = []
+        self._working_connections = []
+        self.is_alive = False
+
+    def _get_free_connection(self):
+        """Returns a free connection to be used for an RPC call.
+
+        This function also adds the client to the working set to prevent
+        multiple users from obtaining the same client.
+        """
+        while True:
+            if len(self._free_connections) > 0:
+                with self._lock:
+                    # Check if another thread grabbed the remaining connection.
+                    # while we were waiting for the lock.
+                    if len(self._free_connections) == 0:
+                        continue
+                    client = self._free_connections.pop()
+                    self._working_connections.append(client)
+                    return client
+
+            client_count = len(self._free_connections) + len(self._working_connections)
+            if client_count < self.max_connections:
+                with self._lock:
+                    client_count = len(self._free_connections) + len(
+                        self._working_connections
+                    )
+                    if client_count < self.max_connections:
+                        client = self._create_connection_func(self.uid)
+                        self._working_connections.append(client)
+                        return client
+            time.sleep(0.01)
+
+    def _release_working_connection(self, connection):
+        """Marks a working client as free.
+
+        Args:
+            connection: The client to mark as free.
+        Raises:
+            A ValueError if the client is not a known working connection.
+        """
+        # We need to keep this code atomic because the client count is based on
+        # the length of the free and working connection list lengths.
+        with self._lock:
+            self._working_connections.remove(connection)
+            self._free_connections.append(connection)
+
+    def rpc(self, method, *args, timeout=None, retries=3):
+        """Sends an rpc to sl4a.
+
+        Sends an rpc call to sl4a over this RpcClient's corresponding session.
+
+        Args:
+            method: str, The name of the method to execute.
+            args: any, The args to send to sl4a.
+            timeout: The amount of time to wait for a response.
+            retries: Misnomer, is actually the number of tries.
+
+        Returns:
+            The result of the rpc.
+
+        Raises:
+            Sl4aProtocolError: Something went wrong with the sl4a protocol.
+            Sl4aApiError: The rpc went through, however executed with errors.
+        """
+        connection = self._get_free_connection()
+        ticket = connection.get_new_ticket()
+        timed_out = False
+        if timeout:
+            connection.set_timeout(timeout)
+        data = {"id": ticket, "method": method, "params": args}
+        request = json.dumps(data)
+        response = ""
+        try:
+            for i in range(1, retries + 1):
+                connection.send_request(request)
+
+                response = connection.get_response()
+                if not response:
+                    if i < retries:
+                        self._log.warning(
+                            "No response for RPC method %s on iteration %s", method, i
+                        )
+                        continue
+                    else:
+                        self._log.exception(
+                            "No response for RPC method %s on iteration %s", method, i
+                        )
+                        self.on_error(connection)
+                        raise Sl4aProtocolError(
+                            Sl4aProtocolError.NO_RESPONSE_FROM_SERVER
+                        )
+                else:
+                    break
+        except BrokenPipeError as e:
+            if self.is_alive:
+                self._log.exception(
+                    "The device disconnected during RPC call "
+                    "%s. Please check the logcat for a crash "
+                    "or disconnect.",
+                    method,
+                )
+                self.on_error(connection)
+            else:
+                self._log.warning("The connection was killed during cleanup:")
+                self._log.warning(e)
+            raise Sl4aConnectionError(e)
+        except socket.timeout as err:
+            # If a socket connection has timed out, the socket can no longer be
+            # used. Close it out and remove the socket from the connection pool.
+            timed_out = True
+            self._log.warning(
+                'RPC "%s" (id: %s) timed out after %s seconds.',
+                method,
+                ticket,
+                timeout or SOCKET_TIMEOUT,
+            )
+            self._log.debug(f"Closing timed out connection over {connection.ports}")
+            connection.close()
+            self._working_connections.remove(connection)
+            # Re-raise the error as an SL4A Error so end users can process it.
+            raise Sl4aRpcTimeoutError(err)
+        finally:
+            if not timed_out:
+                if timeout:
+                    connection.set_timeout(SOCKET_TIMEOUT)
+                self._release_working_connection(connection)
+        result = json.loads(str(response, encoding="utf8"))
+
+        if result["error"]:
+            error_object = result["error"]
+            if isinstance(error_object, dict):
+                # Uses JSON-RPC 2.0 Format
+                sl4a_api_error = Sl4aApiError(
+                    error_object.get("message", None),
+                    error_object.get("code", -1),
+                    error_object.get("data", {}),
+                    rpc_name=method,
+                )
+            else:
+                # Fallback on JSON-RPC 1.0 Format
+                sl4a_api_error = Sl4aApiError(error_object, rpc_name=method)
+            self._log.warning(sl4a_api_error)
+            raise sl4a_api_error
+        if result["id"] != ticket:
+            self._log.error(
+                "RPC method %s with mismatched api id %s", method, result["id"]
+            )
+            raise Sl4aProtocolError(Sl4aProtocolError.MISMATCHED_API_ID)
+        return result["result"]
+
+    @property
+    def future(self):
+        """Returns a magic function that returns a future running an RPC call.
+
+        This function effectively allows the idiom:
+
+        >>> rpc_client = RpcClient(...)
+        >>> # returns after call finishes
+        >>> rpc_client.someRpcCall()
+        >>> # Immediately returns a reference to the RPC's future, running
+        >>> # the lengthy RPC call on another thread.
+        >>> future = rpc_client.future.someLengthyRpcCall()
+        >>> rpc_client.doOtherThings()
+        >>> ...
+        >>> # Wait for and get the returned value of the lengthy RPC.
+        >>> # Can specify a timeout as well.
+        >>> value = future.result()
+
+        The number of concurrent calls to this method is limited to
+        (max_connections - 2), to prevent future calls from exhausting all free
+        connections.
+        """
+        return self._async_client
+
+    def __getattr__(self, name):
+        """Wrapper for python magic to turn method calls into RPC calls."""
+
+        def rpc_call(*args, **kwargs):
+            return self.rpc(name, *args, **kwargs)
+
+        if not self.is_alive:
+            raise Sl4aStartError(
+                "This SL4A session has already been terminated. You must "
+                "create a new session to continue."
+            )
+        return rpc_call
diff --git a/packages/antlion/controllers/sl4a_lib/rpc_connection.py b/packages/antlion/controllers/sl4a_lib/rpc_connection.py
new file mode 100644
index 0000000..cfabcad
--- /dev/null
+++ b/packages/antlion/controllers/sl4a_lib/rpc_connection.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import logging
+import socket
+import threading
+
+from mobly import logger
+
+from antlion.controllers.adb import AdbProxy
+from antlion.controllers.sl4a_lib import rpc_client
+from antlion.controllers.sl4a_lib.sl4a_ports import Sl4aPorts
+
+# The Session UID when a UID has not been received yet.
+UNKNOWN_UID = -1
+
+
+class Sl4aConnectionCommand(object):
+    """Commands that can be invoked on the sl4a client.
+
+    INIT: Initializes a new sessions in sl4a.
+    CONTINUE: Creates a connection.
+    """
+
+    INIT = "initiate"
+    CONTINUE = "continue"
+
+
+class RpcConnection(object):
+    """A single RPC Connection thread.
+
+    Attributes:
+        _client_socket: The socket this connection uses.
+        _socket_file: The file created over the _client_socket.
+        _ticket_counter: The counter storing the current ticket number.
+        _ticket_lock: A lock on the ticket counter to prevent ticket collisions.
+        adb: A reference to the AdbProxy of the AndroidDevice. Used for logging.
+        log: The logger for this RPC Client.
+        ports: The Sl4aPorts object that stores the ports this connection uses.
+        uid: The SL4A session ID.
+    """
+
+    def __init__(
+        self, adb: AdbProxy, ports: Sl4aPorts, client_socket, socket_fd, uid=UNKNOWN_UID
+    ):
+        self._client_socket = client_socket
+        self._socket_file = socket_fd
+        self._ticket_counter = 0
+        self._ticket_lock = threading.Lock()
+        self.adb = adb
+        self.uid = uid
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Client|{self.adb.serial}|{ports.client_port}|{self.uid}]",
+            },
+        )
+
+        self.ports = ports
+        self.set_timeout(rpc_client.SOCKET_TIMEOUT)
+
+    def open(self):
+        if self.uid != UNKNOWN_UID:
+            start_command = Sl4aConnectionCommand.CONTINUE
+        else:
+            start_command = Sl4aConnectionCommand.INIT
+
+        self._initiate_handshake(start_command)
+
+    def _initiate_handshake(self, start_command):
+        """Establishes a connection with the SL4A server.
+
+        Args:
+            start_command: The command to send. See Sl4aConnectionCommand.
+        """
+        try:
+            resp = self._cmd(start_command)
+        except socket.timeout as e:
+            self.log.error("Failed to open socket connection: %s", e)
+            raise
+        if not resp:
+            raise rpc_client.Sl4aProtocolError(
+                rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE
+            )
+        result = json.loads(str(resp, encoding="utf8"))
+        if result["status"]:
+            self.uid = result["uid"]
+        else:
+            self.log.warning(f"UID not received for connection {self.ports}.")
+            self.uid = UNKNOWN_UID
+        self.log.debug(f"Created connection over: {self.ports}.")
+
+    def _cmd(self, command):
+        """Sends an session protocol command to SL4A to establish communication.
+
+        Args:
+            command: The name of the command to execute.
+
+        Returns:
+            The line that was written back.
+        """
+        self.send_request(json.dumps({"cmd": command, "uid": self.uid}))
+        return self.get_response()
+
+    def get_new_ticket(self):
+        """Returns a ticket for a new request."""
+        with self._ticket_lock:
+            self._ticket_counter += 1
+            ticket = self._ticket_counter
+        return ticket
+
+    def set_timeout(self, timeout):
+        """Sets the socket's wait for response timeout."""
+        self._client_socket.settimeout(timeout)
+
+    def send_request(self, request):
+        """Sends a request over the connection."""
+        self._socket_file.write(request.encode("utf8") + b"\n")
+        self._socket_file.flush()
+        self.log.debug(f"Sent: {request}")
+
+    def get_response(self):
+        """Returns the first response sent back to the client."""
+        data = self._socket_file.readline()
+        bytes = data.decode("utf8", errors="replace")
+        self.log.debug(f"Received: {bytes}")
+        return data
+
+    def close(self):
+        """Closes the connection gracefully."""
+        self._client_socket.close()
+        self.adb.remove_tcp_forward(self.ports.forwarded_port)
diff --git a/packages/antlion/controllers/sl4a_lib/sl4a_manager.py b/packages/antlion/controllers/sl4a_lib/sl4a_manager.py
new file mode 100644
index 0000000..d093b5a
--- /dev/null
+++ b/packages/antlion/controllers/sl4a_lib/sl4a_manager.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import threading
+import time
+
+from mobly import logger
+
+from antlion.controllers.sl4a_lib import error_reporter, rpc_client, sl4a_session
+
+ATTEMPT_INTERVAL = 0.25
+MAX_WAIT_ON_SERVER_SECONDS = 5
+
+SL4A_PKG_NAME = "com.googlecode.android_scripting"
+
+_SL4A_LAUNCH_SERVER_CMD = (
+    "am startservice -a com.googlecode.android_scripting.action.LAUNCH_SERVER "
+    "--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s "
+    "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
+
+_SL4A_CLOSE_SERVER_CMD = (
+    "am startservice -a com.googlecode.android_scripting.action.KILL_PROCESS "
+    "--ei com.googlecode.android_scripting.extra.PROXY_PORT %s "
+    "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
+
+# The command for finding SL4A's server port as root.
+_SL4A_ROOT_FIND_PORT_CMD = (
+    # Get all open, listening ports, and their process names
+    "ss -l -p -n | "
+    # Find all open TCP ports for SL4A
+    'grep "tcp.*droid_scripting" | '
+    # Shorten all whitespace to a single space character
+    'tr -s " " | '
+    # Grab the 5th column (which is server:port)
+    'cut -d " " -f 5 |'
+    # Only grab the port
+    "sed s/.*://g"
+)
+
+# The command for finding SL4A's server port without root.
+_SL4A_USER_FIND_PORT_CMD = (
+    # Get all open, listening ports, and their process names
+    "ss -l -p -n | "
+    # Find all open ports exposed to the public. This can produce false
+    # positives since users cannot read the process associated with the port.
+    'grep -e "tcp.*::ffff:127\.0\.0\.1:" | '
+    # Shorten all whitespace to a single space character
+    'tr -s " " | '
+    # Grab the 5th column (which is server:port)
+    'cut -d " " -f 5 |'
+    # Only grab the port
+    "sed s/.*://g"
+)
+
+# The command that begins the SL4A ScriptingLayerService.
+_SL4A_START_SERVICE_CMD = (
+    "am startservice " "com.googlecode.android_scripting/.service.ScriptingLayerService"
+)
+
+# Maps device serials to their SL4A Manager. This is done to prevent multiple
+# Sl4aManagers from existing for the same device.
+_all_sl4a_managers = {}
+
+
+def create_sl4a_manager(adb):
+    """Creates and returns an SL4AManager for the given device.
+
+    Args:
+        adb: A reference to the device's AdbProxy.
+    """
+    if adb.serial in _all_sl4a_managers:
+        _all_sl4a_managers[adb.serial].log.warning(
+            "Attempted to return multiple SL4AManagers on the same device. "
+            "Returning pre-existing SL4AManager instead."
+        )
+        return _all_sl4a_managers[adb.serial]
+    else:
+        manager = Sl4aManager(adb)
+        _all_sl4a_managers[adb.serial] = manager
+        return manager
+
+
+class Sl4aManager(object):
+    """A manager for SL4A Clients to a given AndroidDevice.
+
+    SL4A is a single APK that can host multiple RPC servers at a time. This
+    class manages each server connection over ADB, and will gracefully
+    terminate the apk during cleanup.
+
+    Attributes:
+        _listen_for_port_lock: A lock for preventing multiple threads from
+            potentially mixing up requested ports.
+        _sl4a_ports: A set of all known SL4A server ports in use.
+        adb: A reference to the AndroidDevice's AdbProxy.
+        log: The logger for this object.
+        sessions: A dictionary of session_ids to sessions.
+    """
+
+    def __init__(self, adb):
+        self._listen_for_port_lock = threading.Lock()
+        self._sl4a_ports = set()
+        self.adb = adb
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Manager|{adb.serial}]",
+            },
+        )
+
+        self.sessions = {}
+        self._started = False
+        self.error_reporter = error_reporter.ErrorReporter(f"SL4A {adb.serial}")
+
+    @property
+    def sl4a_ports_in_use(self):
+        """Returns a list of all server ports used by SL4A servers."""
+        return set([session.server_port for session in self.sessions.values()])
+
+    def diagnose_failure(self, session, connection):
+        """Diagnoses all potential known reasons SL4A can fail.
+
+        Assumes the failure happened on an RPC call, which verifies the state
+        of ADB/device."""
+        self.error_reporter.create_error_report(self, session, connection)
+
+    def start_sl4a_server(self, device_port, try_interval=ATTEMPT_INTERVAL):
+        """Opens a server socket connection on SL4A.
+
+        Args:
+            device_port: The expected port for SL4A to open on. Note that in
+                many cases, this will be different than the port returned by
+                this method.
+            try_interval: The amount of seconds between attempts at finding an
+                opened port on the AndroidDevice.
+
+        Returns:
+            The port number on the device the SL4A server is open on.
+
+        Raises:
+            Sl4aConnectionError if SL4A's opened port cannot be found.
+        """
+        # Launch a server through SL4A.
+        self.adb.shell(_SL4A_LAUNCH_SERVER_CMD % device_port)
+
+        # There is a chance that the server has not come up yet by the time the
+        # launch command has finished. Try to read get the listening port again
+        # after a small amount of time.
+        time_left = MAX_WAIT_ON_SERVER_SECONDS
+        while time_left > 0:
+            port = self._get_open_listening_port()
+            if port is None:
+                time.sleep(try_interval)
+                time_left -= try_interval
+            else:
+                return port
+
+        raise rpc_client.Sl4aConnectionError(
+            "Unable to find a valid open port for a new server connection. "
+            "Expected port: %s. Open ports: %s" % (device_port, self._sl4a_ports)
+        )
+
+    def _get_all_ports_command(self):
+        """Returns the list of all ports from the command to get ports."""
+        is_root = True
+        if not self.adb.is_root():
+            is_root = self.adb.ensure_root()
+
+        if is_root:
+            return _SL4A_ROOT_FIND_PORT_CMD
+        else:
+            # TODO(markdr): When root is unavailable, search logcat output for
+            #               the port the server has opened.
+            self.log.warning(
+                "Device cannot be put into root mode. SL4A "
+                "server connections cannot be verified."
+            )
+            return _SL4A_USER_FIND_PORT_CMD
+
+    def _get_all_ports(self):
+        return self.adb.shell(self._get_all_ports_command()).split()
+
+    def _get_open_listening_port(self):
+        """Returns any open, listening port found for SL4A.
+
+        Will return none if no port is found.
+        """
+        possible_ports = self._get_all_ports()
+        self.log.debug(f"SL4A Ports found: {possible_ports}")
+
+        # Acquire the lock. We lock this method because if multiple threads
+        # attempt to get a server at the same time, they can potentially find
+        # the same port as being open, and both attempt to connect to it.
+        with self._listen_for_port_lock:
+            for port in possible_ports:
+                if port not in self._sl4a_ports:
+                    self._sl4a_ports.add(port)
+                    return int(port)
+        return None
+
+    def is_sl4a_installed(self):
+        """Returns True if SL4A is installed on the AndroidDevice."""
+        return bool(self.adb.shell(f"pm path {SL4A_PKG_NAME}", ignore_status=True))
+
+    def start_sl4a_service(self):
+        """Starts the SL4A Service on the device.
+
+        For starting an RPC server, use start_sl4a_server() instead.
+        """
+        # Verify SL4A is installed.
+        if not self._started:
+            self._started = True
+            if not self.is_sl4a_installed():
+                raise rpc_client.Sl4aNotInstalledError(
+                    f"SL4A is not installed on device {self.adb.serial}"
+                )
+            if self.adb.shell(f'(ps | grep "S {SL4A_PKG_NAME}") || true'):
+                # Close all SL4A servers not opened by this manager.
+                # TODO(markdr): revert back to closing all ports after
+                # b/76147680 is resolved.
+                self.adb.shell(f"kill -9 $(pidof {SL4A_PKG_NAME})")
+            self.adb.shell('settings put global hidden_api_blacklist_exemptions "*"')
+            # Start the service if it is not up already.
+            self.adb.shell(_SL4A_START_SERVICE_CMD)
+
+    def obtain_sl4a_server(self, server_port):
+        """Obtain an SL4A server port.
+
+        If the port is open and valid, return it. Otherwise, open an new server
+        with the hinted server_port.
+        """
+        if server_port not in self.sl4a_ports_in_use:
+            return self.start_sl4a_server(server_port)
+        else:
+            return server_port
+
+    def create_session(
+        self, max_connections=None, client_port=0, forwarded_port=0, server_port=None
+    ):
+        """Creates an SL4A server with the given ports if possible.
+
+        The ports are not guaranteed to be available for use. If the port
+        asked for is not available, this will be logged, and the port will
+        be randomized.
+
+        Args:
+            client_port: The client port on the host machine
+            forwarded_port: The server port on the host machine forwarded
+                            by adb from the Android device
+            server_port: The port on the Android device.
+            max_connections: The max number of client connections for the
+                session.
+
+        Returns:
+            A new Sl4aServer instance.
+        """
+        if server_port is None:
+            # If a session already exists, use the same server.
+            if len(self.sessions) > 0:
+                server_port = self.sessions[sorted(self.sessions.keys())[0]].server_port
+            # Otherwise, open a new server on a random port.
+            else:
+                server_port = 0
+        self.log.debug(
+            "Creating SL4A session client_port={}, forwarded_port={}, server_port={}".format(
+                client_port, forwarded_port, server_port
+            )
+        )
+        self.start_sl4a_service()
+        session = sl4a_session.Sl4aSession(
+            self.adb,
+            client_port,
+            server_port,
+            self.obtain_sl4a_server,
+            self.diagnose_failure,
+            forwarded_port,
+            max_connections=max_connections,
+        )
+        self.sessions[session.uid] = session
+        return session
+
+    def stop_service(self):
+        """Stops The SL4A Service. Force-stops the SL4A apk."""
+        try:
+            self.adb.shell(f"am force-stop {SL4A_PKG_NAME}", ignore_status=True)
+        except Exception as e:
+            self.log.warning("Fail to stop package %s: %s", SL4A_PKG_NAME, e)
+        self._started = False
+
+    def terminate_all_sessions(self):
+        """Terminates all SL4A sessions gracefully."""
+        self.error_reporter.finalize_reports()
+        for _, session in self.sessions.items():
+            session.terminate()
+        self.sessions = {}
+        self._close_all_ports()
+
+    def _close_all_ports(self, try_interval=ATTEMPT_INTERVAL):
+        """Closes all ports opened on SL4A."""
+        ports = self._get_all_ports()
+        for port in set.union(self._sl4a_ports, ports):
+            self.adb.shell(_SL4A_CLOSE_SERVER_CMD % port)
+        time_left = MAX_WAIT_ON_SERVER_SECONDS
+        while time_left > 0 and self._get_open_listening_port():
+            time.sleep(try_interval)
+            time_left -= try_interval
+
+        if time_left <= 0:
+            self.log.warning(
+                "Unable to close all un-managed servers! Server ports that are "
+                "still open are %s" % self._get_open_listening_port()
+            )
+        self._sl4a_ports = set()
diff --git a/packages/antlion/controllers/sl4a_lib/sl4a_ports.py b/packages/antlion/controllers/sl4a_lib/sl4a_ports.py
new file mode 100644
index 0000000..66306f2
--- /dev/null
+++ b/packages/antlion/controllers/sl4a_lib/sl4a_ports.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class Sl4aPorts(object):
+    """A container for the three ports needed for an SL4A connection.
+
+    Attributes:
+        client_port: The port on the host associated with the SL4A client
+        forwarded_port: The port forwarded to the Android device.
+        server_port: The port on the device associated with the SL4A server.
+    """
+
+    def __init__(
+        self, client_port: int = 0, forwarded_port: int = 0, server_port: int = 0
+    ) -> None:
+        self.client_port = client_port
+        self.forwarded_port = forwarded_port
+        self.server_port = server_port
+
+    def __str__(self) -> str:
+        return f"({self.client_port}, {self.forwarded_port}, {self.server_port})"
diff --git a/packages/antlion/controllers/sl4a_lib/sl4a_session.py b/packages/antlion/controllers/sl4a_lib/sl4a_session.py
new file mode 100644
index 0000000..6ecf2fd
--- /dev/null
+++ b/packages/antlion/controllers/sl4a_lib/sl4a_session.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import errno
+import logging
+import socket
+import threading
+
+from mobly import logger
+
+from antlion.controllers.adb_lib.error import AdbError
+from antlion.controllers.sl4a_lib import (
+    event_dispatcher,
+    rpc_client,
+    rpc_connection,
+    sl4a_ports,
+)
+from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError
+
+SOCKET_TIMEOUT = 60
+
+# The SL4A Session UID when a UID has not been received yet.
+UNKNOWN_UID = -1
+
+
+class Sl4aSession(object):
+    """An object that tracks the state of an SL4A Session.
+
+    Attributes:
+        _event_dispatcher: The EventDispatcher instance, if any, for this
+            session.
+        _terminate_lock: A lock that prevents race conditions for multiple
+            threads calling terminate()
+        _terminated: A bool that stores whether or not this session has been
+            terminated. Terminated sessions cannot be restarted.
+        adb: A reference to the AndroidDevice's AdbProxy.
+        log: The logger for this Sl4aSession
+        server_port: The SL4A server port this session is established on.
+        uid: The uid that corresponds the the SL4A Server's session id. This
+            value is only unique during the lifetime of the SL4A apk.
+    """
+
+    def __init__(
+        self,
+        adb,
+        host_port,
+        device_port,
+        get_server_port_func,
+        on_error_callback,
+        forwarded_port=0,
+        max_connections=None,
+    ):
+        """Creates an SL4A Session.
+
+        Args:
+            adb: A reference to the adb proxy
+            get_server_port_func: A lambda (int) that returns the corrected
+                server port. The int passed in hints at which port to use, if
+                possible.
+            host_port: The port the host machine uses to connect to the SL4A
+                server for its first connection.
+            device_port: The SL4A server port to be used as a hint for which
+                SL4A server to connect to.
+            forwarded_port: The server port on host machine forwarded by adb
+                            from Android device to accept SL4A connection
+        """
+        self._event_dispatcher = None
+        self._terminate_lock = threading.Lock()
+        self._terminated = False
+        self.adb = adb
+        self.uid = UNKNOWN_UID
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Session|{self.adb.serial}|{self.uid}]",
+            },
+        )
+
+        self.forwarded_port = forwarded_port
+        self.server_port = device_port
+        self.obtain_server_port = get_server_port_func
+        self._on_error_callback = on_error_callback
+
+        connection_creator = self._rpc_connection_creator(host_port)
+        self.rpc_client = rpc_client.RpcClient(
+            self.uid,
+            self.adb.serial,
+            self.diagnose_failure,
+            connection_creator,
+            max_connections=max_connections,
+        )
+
+    def _rpc_connection_creator(self, host_port):
+        def create_client(uid):
+            return self._create_rpc_connection(
+                ports=sl4a_ports.Sl4aPorts(
+                    host_port, self.forwarded_port, self.server_port
+                ),
+                uid=uid,
+            )
+
+        return create_client
+
+    @property
+    def is_alive(self):
+        return not self._terminated
+
+    def _create_forwarded_port(self, server_port, hinted_port=0):
+        """Creates a forwarded port to the specified server port.
+
+        Args:
+            server_port: (int) The port to forward to.
+            hinted_port: (int) The port to use for forwarding, if available.
+                         Otherwise, the chosen port will be random.
+        Returns:
+            The chosen forwarded port.
+
+        Raises AdbError if the version of ADB is too old, or the command fails.
+        """
+        if self.adb.get_version_number() < 37 and hinted_port == 0:
+            self.log.error(
+                "The current version of ADB does not automatically provide a "
+                "port to forward. Please upgrade ADB to version 1.0.37 or "
+                "higher."
+            )
+            raise Sl4aStartError("Unable to forward a port to the device.")
+        else:
+            try:
+                return self.adb.tcp_forward(hinted_port, server_port)
+            except AdbError as e:
+                if "cannot bind listener" in e.stderr:
+                    self.log.warning(
+                        "Unable to use %s to forward to device port %s due to: "
+                        '"%s". Attempting to choose a random port instead.'
+                        % (hinted_port, server_port, e.stderr)
+                    )
+                    # Call this method again, but this time with no hinted port.
+                    return self._create_forwarded_port(server_port)
+                raise e
+
+    def _create_rpc_connection(self, ports=None, uid=UNKNOWN_UID):
+        """Creates an RPC Connection with the specified ports.
+
+        Args:
+            ports: A Sl4aPorts object or a tuple of (host/client_port,
+                   forwarded_port, device/server_port). If any of these are
+                   zero, the OS will determine their values during connection.
+
+                   Note that these ports are only suggestions. If they are not
+                   available, the a different port will be selected.
+            uid: The UID of the SL4A Session. To create a new session, use
+                 UNKNOWN_UID.
+        Returns:
+            An Sl4aClient.
+        """
+        if ports is None:
+            ports = sl4a_ports.Sl4aPorts(0, 0, 0)
+        # Open a new server if a server cannot be inferred.
+        ports.server_port = self.obtain_server_port(ports.server_port)
+        self.server_port = ports.server_port
+        # Forward the device port to the host.
+        ports.forwarded_port = self._create_forwarded_port(
+            ports.server_port, hinted_port=ports.forwarded_port
+        )
+        client_socket, fd = self._create_client_side_connection(ports)
+        client = rpc_connection.RpcConnection(
+            self.adb, ports, client_socket, fd, uid=uid
+        )
+        client.open()
+        if uid == UNKNOWN_UID:
+            self.uid = client.uid
+        return client
+
+    def diagnose_failure(self, connection):
+        """Diagnoses any problems related to the SL4A session."""
+        self._on_error_callback(self, connection)
+
+    def get_event_dispatcher(self):
+        """Returns the EventDispatcher for this Sl4aSession."""
+        if self._event_dispatcher is None:
+            self._event_dispatcher = event_dispatcher.EventDispatcher(
+                self.adb.serial, self.rpc_client
+            )
+        return self._event_dispatcher
+
+    def _create_client_side_connection(self, ports):
+        """Creates and connects the client socket to the forward device port.
+
+        Args:
+            ports: A Sl4aPorts object or a tuple of (host_port,
+            forwarded_port, device_port).
+
+        Returns:
+            A tuple of (socket, socket_file_descriptor).
+        """
+        client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        client_socket.settimeout(SOCKET_TIMEOUT)
+        client_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+        if ports.client_port != 0:
+            try:
+                client_socket.bind((socket.gethostname(), ports.client_port))
+            except OSError as e:
+                # If the port is in use, log and ask for any open port.
+                if e.errno == errno.EADDRINUSE:
+                    self.log.warning(
+                        "Port %s is already in use on the host. "
+                        "Generating a random port." % ports.client_port
+                    )
+                    ports.client_port = 0
+                    return self._create_client_side_connection(ports)
+                raise
+
+        # Verify and obtain the port opened by SL4A.
+        try:
+            # Connect to the port that has been forwarded to the device.
+            client_socket.connect(("127.0.0.1", ports.forwarded_port))
+        except socket.timeout:
+            raise rpc_client.Sl4aConnectionError(
+                "SL4A has not connected over the specified port within the "
+                "timeout of %s seconds." % SOCKET_TIMEOUT
+            )
+        except socket.error as e:
+            # In extreme, unlikely cases, a socket error with
+            # errno.EADDRNOTAVAIL can be raised when a desired host_port is
+            # taken by a separate program between the bind and connect calls.
+            # Note that if host_port is set to zero, there is no bind before
+            # the connection is made, so this error will never be thrown.
+            if e.errno == errno.EADDRNOTAVAIL:
+                ports.client_port = 0
+                return self._create_client_side_connection(ports)
+            raise
+        ports.client_port = client_socket.getsockname()[1]
+        return client_socket, client_socket.makefile(mode="brw")
+
+    def terminate(self):
+        """Terminates the session.
+
+        The return of process execution is blocked on completion of all events
+        being processed by handlers in the Event Dispatcher.
+        """
+        with self._terminate_lock:
+            if not self._terminated:
+                self.log.debug("Terminating Session.")
+                try:
+                    self.rpc_client.closeSl4aSession()
+                except Exception as e:
+                    if "SL4A session has already been terminated" not in str(e):
+                        self.log.warning(e)
+                # Must be set after closeSl4aSession so the rpc_client does not
+                # think the session has closed.
+                self._terminated = True
+                if self._event_dispatcher:
+                    try:
+                        self._event_dispatcher.close()
+                    except Exception as e:
+                        self.log.warning(e)
+                try:
+                    self.rpc_client.terminate()
+                except Exception as e:
+                    self.log.warning(e)
diff --git a/packages/antlion/controllers/sniffer.py b/packages/antlion/controllers/sniffer.py
new file mode 100644
index 0000000..e87a547
--- /dev/null
+++ b/packages/antlion/controllers/sniffer.py
@@ -0,0 +1,293 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import importlib
+import logging
+
+MOBLY_CONTROLLER_CONFIG_NAME = "Sniffer"
+ACTS_CONTROLLER_REFERENCE_NAME = "sniffers"
+
+
+def create(configs):
+    """Initializes the sniffer structures based on the JSON configuration. The
+    expected keys are:
+
+    Type: A first-level type of sniffer. Planned to be 'local' for sniffers
+        running on the local machine, or 'remote' for sniffers running
+        remotely.
+    SubType: The specific sniffer type to be used.
+    Interface: The WLAN interface used to configure the sniffer.
+    BaseConfigs: A dictionary specifying baseline configurations of the
+        sniffer. Configurations can be overridden when starting a capture.
+        The keys must be one of the Sniffer.CONFIG_KEY_* values.
+    """
+    objs = []
+    for c in configs:
+        sniffer_type = c["Type"]
+        sniffer_subtype = c["SubType"]
+        interface = c["Interface"]
+        base_configs = c["BaseConfigs"]
+        module_name = "antlion.controllers.sniffer_lib.{}.{}".format(
+            sniffer_type, sniffer_subtype
+        )
+        module = importlib.import_module(module_name)
+        objs.append(
+            module.Sniffer(interface, logging.getLogger(), base_configs=base_configs)
+        )
+    return objs
+
+
+def destroy(objs):
+    """Destroys the sniffers and terminates any ongoing capture sessions."""
+    for sniffer in objs:
+        try:
+            sniffer.stop_capture()
+        except SnifferError:
+            pass
+
+
+class SnifferError(Exception):
+    """This is the Exception class defined for all errors generated by
+    Sniffer-related modules.
+    """
+
+
+class InvalidDataError(Exception):
+    """This exception is thrown when invalid configuration data is passed
+    to a method.
+    """
+
+
+class ExecutionError(SnifferError):
+    """This exception is thrown when trying to configure the capture device
+    or when trying to execute the capture operation.
+
+    When this exception is seen, it is possible that the sniffer module is run
+    without sudo (for local sniffers) or keys are out-of-date (for remote
+    sniffers).
+    """
+
+
+class InvalidOperationError(SnifferError):
+    """Certain methods may only be accessed when the instance upon which they
+    are invoked is in a certain state. This indicates that the object is not
+    in the correct state for a method to be called.
+    """
+
+
+class Sniffer(object):
+    """This class defines an object representing a sniffer.
+
+    The object defines the generic behavior of sniffers - irrespective of how
+    they are implemented, or where they are located: on the local machine or on
+    the remote machine.
+    """
+
+    CONFIG_KEY_CHANNEL = "channel"
+
+    def __init__(self, interface, logger, base_configs=None):
+        """The constructor for the Sniffer. It constructs a sniffer and
+        configures it to be ready for capture.
+
+        Args:
+            interface: A string specifying the interface used to configure the
+                sniffer.
+            logger: ACTS logger object.
+            base_configs: A dictionary containing baseline configurations of the
+                sniffer. These can be overridden when staring a capture. The
+                keys are specified by Sniffer.CONFIG_KEY_*.
+
+        Returns:
+            self: A configured sniffer.
+
+        Raises:
+            InvalidDataError: if the config_path is invalid.
+            NoPermissionError: if an error occurs while configuring the
+                sniffer.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def get_descriptor(self):
+        """This function returns a string describing the sniffer. The specific
+        string (and its format) is up to each derived sniffer type.
+
+        Returns:
+            A string describing the sniffer.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def get_type(self):
+        """This function returns the type of the sniffer.
+
+        Returns:
+            The type (string) of the sniffer. Corresponds to the 'Type' key of
+            the sniffer configuration.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def get_subtype(self):
+        """This function returns the sub-type of the sniffer.
+
+        Returns:
+            The sub-type (string) of the sniffer. Corresponds to the 'SubType'
+            key of the sniffer configuration.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def get_interface(self):
+        """This function returns The interface used to configure the sniffer,
+        e.g. 'wlan0'.
+
+        Returns:
+            The interface (string) used to configure the sniffer. Corresponds to
+            the 'Interface' key of the sniffer configuration.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def get_capture_file(self):
+        """The sniffer places a capture in the logger directory. This function
+        enables the caller to obtain the path of that capture.
+
+        Returns:
+            The full path of the current or last capture.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def start_capture(
+        self,
+        override_configs=None,
+        additional_args=None,
+        duration=None,
+        packet_count=None,
+    ):
+        """This function starts a capture which is saved to the specified file
+        path.
+
+        Depending on the type/subtype and configuration of the sniffer the
+        capture may terminate on its own or may require an explicit call to the
+        stop_capture() function.
+
+        This is a non-blocking function so a terminating function must be
+        called - either explicitly or implicitly:
+        - Explicitly: call either stop_capture() or wait_for_capture()
+        - Implicitly: use with a with clause. The wait_for_capture() function
+                      will be called if a duration is specified (i.e. is not
+                      None), otherwise a stop_capture() will be called.
+
+        The capture is saved to a file in the log path of the logger. Use
+        the get_capture_file() to get the full path to the current or most
+        recent capture.
+
+        Args:
+            override_configs: A dictionary which is combined with the
+                base_configs ("BaseConfigs" in the sniffer configuration). The
+                keys (specified by Sniffer.CONFIG_KEY_*) determine the
+                configuration of the sniffer for this specific capture.
+            additional_args: A string specifying additional raw
+                command-line arguments to pass to the underlying sniffer. The
+                interpretation of these flags is sniffer-dependent.
+            duration: An integer specifying the number of seconds over which to
+                capture packets. The sniffer will be terminated after this
+                duration. Used in implicit mode when using a 'with' clause. In
+                explicit control cases may have to be performed using a
+                sleep+stop or as the timeout argument to the wait function.
+            packet_count: An integer specifying the number of packets to capture
+                before terminating. Should be used with duration to guarantee
+                that capture terminates at some point (even if did not capture
+                the specified number of packets).
+
+        Returns:
+            An ActiveCaptureContext process which can be used with a 'with'
+            clause.
+
+        Raises:
+            InvalidDataError: for invalid configurations
+            NoPermissionError: if an error occurs while configuring and running
+                the sniffer.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def stop_capture(self):
+        """This function stops a capture and guarantees that the capture is
+        saved to the capture file configured during the start_capture() method.
+        Depending on the type of the sniffer the file may previously contain
+        partial results (e.g. for a local sniffer) or may not exist until the
+        stop_capture() method is executed (e.g. for a remote sniffer).
+
+        Depending on the type/subtype and configuration of the sniffer the
+        capture may terminate on its own without requiring a call to this
+        function. In such a case it is still necessary to call either this
+        function or the wait_for_capture() function to make sure that the
+        capture file is moved to the correct location.
+
+        Raises:
+            NoPermissionError: No permission when trying to stop a capture
+                and save the capture file.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def wait_for_capture(self, timeout=None):
+        """This function waits for a capture to terminate and guarantees that
+        the capture is saved to the capture file configured during the
+        start_capture() method. Depending on the type of the sniffer the file
+        may previously contain partial results (e.g. for a local sniffer) or
+        may not exist until the stop_capture() method is executed (e.g. for a
+        remote sniffer).
+
+        Depending on the type/subtype and configuration of the sniffer the
+        capture may terminate on its own without requiring a call to this
+        function. In such a case it is still necessary to call either this
+        function or the stop_capture() function to make sure that the capture
+        file is moved to the correct location.
+
+        Args:
+            timeout: An integer specifying the number of seconds to wait for
+                the capture to terminate on its own. On expiration of the
+                timeout the sniffer is stopped explicitly using the
+                stop_capture() function.
+
+        Raises:
+            NoPermissionError: No permission when trying to stop a capture and
+                save the capture file.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+
+class ActiveCaptureContext(object):
+    """This class defines an object representing an active sniffer capture.
+
+    The object is returned by a Sniffer.start_capture() command and terminates
+    the capture when the 'with' clause exits. It is syntactic sugar for
+    try/finally.
+    """
+
+    _sniffer = None
+    _timeout = None
+
+    def __init__(self, sniffer, timeout=None):
+        self._sniffer = sniffer
+        self._timeout = timeout
+
+    def __enter__(self):
+        pass
+
+    def __exit__(self, type, value, traceback):
+        if self._sniffer is not None:
+            if self._timeout is None:
+                self._sniffer.stop_capture()
+            else:
+                self._sniffer.wait_for_capture(self._timeout)
+        self._sniffer = None
diff --git a/src/antlion/controllers/sniffer_lib/__init__.py b/packages/antlion/controllers/sniffer_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/sniffer_lib/__init__.py
rename to packages/antlion/controllers/sniffer_lib/__init__.py
diff --git a/src/antlion/controllers/sniffer_lib/local/__init__.py b/packages/antlion/controllers/sniffer_lib/local/__init__.py
similarity index 100%
rename from src/antlion/controllers/sniffer_lib/local/__init__.py
rename to packages/antlion/controllers/sniffer_lib/local/__init__.py
diff --git a/packages/antlion/controllers/sniffer_lib/local/local_base.py b/packages/antlion/controllers/sniffer_lib/local/local_base.py
new file mode 100644
index 0000000..a037d2f
--- /dev/null
+++ b/packages/antlion/controllers/sniffer_lib/local/local_base.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Class for Local sniffers - i.e. running on the local machine.
+
+This class provides configuration for local interfaces but leaves
+the actual capture (sniff) to sub-classes.
+"""
+
+import os
+import shutil
+import signal
+import subprocess
+import tempfile
+
+from antlion import logger, utils
+from antlion.controllers import sniffer
+
+
+class SnifferLocalBase(sniffer.Sniffer):
+    """This class defines the common behaviors of WLAN sniffers running on
+    WLAN interfaces of the local machine.
+
+    Specific mechanisms to capture packets over the local WLAN interfaces are
+    implemented by sub-classes of this class - i.e. it is not a final class.
+    """
+
+    def __init__(self, interface, logger, base_configs=None):
+        """See base class documentation"""
+        self._base_configs = None
+        self._capture_file_path = ""
+        self._interface = ""
+        self._logger = logger
+        self._process = None
+        self._temp_capture_file_path = ""
+
+        if interface == "":
+            raise sniffer.InvalidDataError("Empty interface provided")
+        self._interface = interface
+        self._base_configs = base_configs
+
+        try:
+            utils.exe_cmd("ifconfig", self._interface, "down")
+            utils.exe_cmd("iwconfig", self._interface, "mode", "monitor")
+            utils.exe_cmd("ifconfig", self._interface, "up")
+        except Exception as err:
+            raise sniffer.ExecutionError(err)
+
+    def get_interface(self):
+        """See base class documentation"""
+        return self._interface
+
+    def get_type(self):
+        """See base class documentation"""
+        return "local"
+
+    def get_capture_file(self):
+        return self._capture_file_path
+
+    def _pre_capture_config(self, override_configs=None):
+        """Utility function which configures the wireless interface per the
+        specified configurations. Operation is performed before every capture
+        start using baseline configurations (specified when sniffer initialized)
+        and override configurations specified here.
+        """
+        final_configs = {}
+        if self._base_configs:
+            final_configs.update(self._base_configs)
+        if override_configs:
+            final_configs.update(override_configs)
+
+        if sniffer.Sniffer.CONFIG_KEY_CHANNEL in final_configs:
+            try:
+                utils.exe_cmd(
+                    "iwconfig",
+                    self._interface,
+                    "channel",
+                    str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]),
+                )
+            except Exception as err:
+                raise sniffer.ExecutionError(err)
+
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
+        """Utility function to be implemented by every child class - which
+        are the concrete sniffer classes. Each sniffer-specific class should
+        derive the command line to execute its sniffer based on the specified
+        arguments.
+        """
+        raise NotImplementedError("Base class should not be called directly!")
+
+    def _post_process(self):
+        """Utility function which is executed after a capture is done. It
+        moves the capture file to the requested location.
+        """
+        self._process = None
+        shutil.move(self._temp_capture_file_path, self._capture_file_path)
+
+    def start_capture(
+        self,
+        override_configs=None,
+        additional_args=None,
+        duration=None,
+        packet_count=None,
+    ):
+        """See base class documentation"""
+        if self._process is not None:
+            raise sniffer.InvalidOperationError(
+                "Trying to start a sniff while another is still running!"
+            )
+        capture_dir = os.path.join(self._logger.log_path, f"Sniffer-{self._interface}")
+        os.makedirs(capture_dir, exist_ok=True)
+        self._capture_file_path = os.path.join(
+            capture_dir, f"capture_{logger.get_log_file_timestamp()}.pcap"
+        )
+
+        self._pre_capture_config(override_configs)
+        _, self._temp_capture_file_path = tempfile.mkstemp(suffix=".pcap")
+
+        cmd = self._get_command_line(
+            additional_args=additional_args,
+            duration=duration,
+            packet_count=packet_count,
+        )
+
+        self._process = utils.start_standing_subprocess(cmd)
+        return sniffer.ActiveCaptureContext(self, duration)
+
+    def stop_capture(self):
+        """See base class documentation"""
+        if self._process is None:
+            raise sniffer.InvalidOperationError("Trying to stop a non-started process")
+        utils.stop_standing_subprocess(self._process, kill_signal=signal.SIGINT)
+        self._post_process()
+
+    def wait_for_capture(self, timeout=None):
+        """See base class documentation"""
+        if self._process is None:
+            raise sniffer.InvalidOperationError(
+                "Trying to wait on a non-started process"
+            )
+        try:
+            utils.wait_for_standing_subprocess(self._process, timeout)
+            self._post_process()
+        except subprocess.TimeoutExpired:
+            self.stop_capture()
diff --git a/packages/antlion/controllers/sniffer_lib/local/tcpdump.py b/packages/antlion/controllers/sniffer_lib/local/tcpdump.py
new file mode 100644
index 0000000..326adc1
--- /dev/null
+++ b/packages/antlion/controllers/sniffer_lib/local/tcpdump.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shutil
+
+from antlion.controllers import sniffer
+from antlion.controllers.sniffer_lib.local import local_base
+
+
+class Sniffer(local_base.SnifferLocalBase):
+    """This class defines a sniffer which uses tcpdump as its back-end"""
+
+    def __init__(self, config_path, logger, base_configs=None):
+        """See base class documentation"""
+        self._executable_path = None
+
+        super(local_base.SnifferLocalBase).__init__(
+            config_path, logger, base_configs=base_configs
+        )
+
+        self._executable_path = shutil.which("tcpdump")
+        if self._executable_path is None:
+            raise sniffer.SnifferError("Cannot find a path to the 'tcpdump' executable")
+
+    def get_descriptor(self):
+        """See base class documentation"""
+        return f"local-tcpdump-{self._interface}"
+
+    def get_subtype(self):
+        """See base class documentation"""
+        return "tcpdump"
+
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
+        cmd = "{} -i {} -w {}".format(
+            self._executable_path, self._interface, self._temp_capture_file_path
+        )
+        if packet_count is not None:
+            cmd = f"{cmd} -c {packet_count}"
+        if additional_args is not None:
+            cmd = f"{cmd} {additional_args}"
+        return cmd
diff --git a/packages/antlion/controllers/sniffer_lib/local/tshark.py b/packages/antlion/controllers/sniffer_lib/local/tshark.py
new file mode 100644
index 0000000..b873fe8
--- /dev/null
+++ b/packages/antlion/controllers/sniffer_lib/local/tshark.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shutil
+
+from antlion.controllers import sniffer
+from antlion.controllers.sniffer_lib.local import local_base
+
+
+class Sniffer(local_base.SnifferLocalBase):
+    """This class defines a sniffer which uses tshark as its back-end"""
+
+    def __init__(self, config_path, logger, base_configs=None):
+        """See base class documentation"""
+        self._executable_path = None
+
+        super().__init__(config_path, logger, base_configs=base_configs)
+
+        self._executable_path = shutil.which("tshark") or shutil.which(
+            "/usr/local/bin/tshark"
+        )
+        if self._executable_path is None:
+            raise sniffer.SnifferError(
+                "Cannot find a path to the 'tshark' "
+                "executable (or to '/usr/local/bin/tshark')"
+            )
+
+    def get_descriptor(self):
+        """See base class documentation"""
+        return "local-tshark-{}-ch{}".format(self._interface)
+
+    def get_subtype(self):
+        """See base class documentation"""
+        return "tshark"
+
+    def _get_command_line(self, additional_args=None, duration=None, packet_count=None):
+        cmd = "{} -i {} -w {}".format(
+            self._executable_path, self._interface, self._temp_capture_file_path
+        )
+        if duration is not None:
+            cmd = f"{cmd} -a duration:{duration}"
+        if packet_count is not None:
+            cmd = f"{cmd} -c {packet_count}"
+        if additional_args is not None:
+            cmd = f"{cmd} {additional_args}"
+        return cmd
diff --git a/src/antlion/controllers/utils_lib/__init__.py b/packages/antlion/controllers/utils_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/utils_lib/__init__.py
rename to packages/antlion/controllers/utils_lib/__init__.py
diff --git a/src/antlion/controllers/utils_lib/commands/__init__.py b/packages/antlion/controllers/utils_lib/commands/__init__.py
similarity index 100%
rename from src/antlion/controllers/utils_lib/commands/__init__.py
rename to packages/antlion/controllers/utils_lib/commands/__init__.py
diff --git a/packages/antlion/controllers/utils_lib/commands/command.py b/packages/antlion/controllers/utils_lib/commands/command.py
new file mode 100644
index 0000000..b664f2e
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/command.py
@@ -0,0 +1,85 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Protocol, TypeVar
+
+from mobly import signals
+
+from antlion.runner import CompletedProcess, Runner
+
+
+class Command(Protocol):
+    """A runnable binary."""
+
+    def binary(self) -> str:
+        """Return the binary used for this command."""
+        ...
+
+    def available(self) -> bool:
+        """Return true if this command is available to run."""
+        ...
+
+
+_C = TypeVar("_C", bound=Command)
+
+
+def require(command: _C) -> _C:
+    """Require a command to be available."""
+    if command.available():
+        return command
+    raise signals.TestAbortClass(f"Required command not found: {command.binary()}")
+
+
+def optional(command: _C) -> _C | None:
+    """Optionally require a command to be available."""
+    if command.available():
+        return command
+    return None
+
+
+class LinuxCommand(Command):
+    """A command running on a Linux machine."""
+
+    def __init__(self, runner: Runner, binary: str) -> None:
+        self._runner = runner
+        self._binary = binary
+        self._can_sudo = self._available("sudo")
+
+    def binary(self) -> str:
+        """Return the binary used for this command."""
+        return self._binary
+
+    def available(self) -> bool:
+        """Return true if this command is available to run."""
+        return self._available(self._binary)
+
+    def _available(self, binary: str) -> bool:
+        """Check if binary is available to run."""
+        return self._runner.run(["command", "-v", binary]).returncode == 0
+
+    def _run(
+        self, args: list[str], sudo: bool = False, ignore_status: bool = False
+    ) -> CompletedProcess:
+        """Run the command without having to specify the binary.
+
+        Args:
+            args: List of arguments to pass to the binary
+            sudo: Use sudo to execute the binary, if available
+            ignore_status: Ignore the exit code of the binary
+        """
+        if sudo and self._can_sudo:
+            cmd = ["sudo", self._binary]
+        else:
+            cmd = [self._binary]
+        return self._runner.run(cmd + args, ignore_status=ignore_status)
diff --git a/packages/antlion/controllers/utils_lib/commands/ip.py b/packages/antlion/controllers/utils_lib/commands/ip.py
new file mode 100644
index 0000000..5ecfd44
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/ip.py
@@ -0,0 +1,181 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import re
+import subprocess
+from typing import Iterator
+
+from mobly import signals
+
+from antlion.controllers.utils_lib.commands.command import LinuxCommand
+from antlion.runner import CompletedProcess, Runner
+
+
+class LinuxIpCommand(LinuxCommand):
+    """Interface for doing standard IP commands on a linux system.
+
+    Wraps standard shell commands used for ip into a python object that can
+    be interacted with more easily.
+    """
+
+    def __init__(self, runner: Runner, binary: str = "ip"):
+        """Create a LinuxIpCommand.
+
+        Args:
+            runner: Runner to use to execute this command.
+            binary: Path to binary to use. Defaults to "ip".
+            sudo: Requires root permissions. Defaults to False.
+        """
+        super().__init__(runner, binary)
+
+    def get_ipv4_addresses(
+        self, net_interface: str
+    ) -> Iterator[tuple[ipaddress.IPv4Interface, ipaddress.IPv4Address | None]]:
+        """Gets all ipv4 addresses of a network interface.
+
+        Args:
+            net_interface: string, The network interface to get info on
+                           (eg. wlan0).
+
+        Returns: An iterator of tuples that contain (address, broadcast).
+                 where address is a ipaddress.IPv4Interface and broadcast
+                 is an ipaddress.IPv4Address.
+        """
+        results = self._run(["addr", "show", "dev", net_interface])
+        lines = results.stdout.splitlines()
+
+        # Example stdout:
+        # 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP group default qlen 1000
+        #   link/ether 48:0f:cf:3c:9d:89 brd ff:ff:ff:ff:ff:ff
+        #   inet 192.168.1.1/24 brd 192.168.1.255 scope global eth0
+        #       valid_lft forever preferred_lft forever
+        #   inet6 2620:0:1000:1500:a968:a776:2d80:a8b3/64 scope global temporary dynamic
+        #       valid_lft 599919sec preferred_lft 80919sec
+
+        for line in lines:
+            line = line.strip()
+            match = re.search("inet (?P<address>[^\\s]*) brd (?P<bcast>[^\\s]*)", line)
+            if match:
+                d = match.groupdict()
+                address = ipaddress.IPv4Interface(d["address"])
+                bcast = ipaddress.IPv4Address(d["bcast"])
+                yield (address, bcast)
+
+            match = re.search("inet (?P<address>[^\\s]*)", line)
+            if match:
+                d = match.groupdict()
+                address = ipaddress.IPv4Interface(d["address"])
+                yield (address, None)
+
+    def add_ipv4_address(
+        self,
+        net_interface: str,
+        address: ipaddress.IPv4Interface,
+        broadcast: ipaddress.IPv4Address | None = None,
+    ) -> None:
+        """Adds an ipv4 address to a net_interface.
+
+        Args:
+            net_interface: The network interface to get the new ipv4 (eg. wlan0).
+            address: The new ipaddress and netmask to add to an interface.
+            broadcast: The broadcast address to use for this net_interfaces subnet.
+        """
+        args = ["addr", "add", str(address)]
+        if broadcast:
+            args += ["broadcast", str(broadcast)]
+        args += ["dev", net_interface]
+        self._run(args, sudo=True)
+
+    def remove_ipv4_address(
+        self,
+        net_interface: str,
+        address: ipaddress.IPv4Interface | ipaddress.IPv4Address,
+        ignore_status: bool = False,
+    ) -> CompletedProcess:
+        """Remove an ipv4 address.
+
+        Removes an ipv4 address from a network interface.
+
+        Args:
+            net_interface: The network interface to remove the ipv4 address from (eg. wlan0).
+            address: The ip address to remove from the net_interface.
+            ignore_status: True if the exit status can be ignored
+        Returns:
+            The job result from a the command
+        """
+        return self._run(
+            ["addr", "del", str(address), "dev", net_interface],
+            sudo=True,
+            ignore_status=ignore_status,
+        )
+
+    def set_ipv4_address(
+        self,
+        net_interface: str,
+        address: ipaddress.IPv4Interface,
+        broadcast: ipaddress.IPv4Address | None = None,
+    ) -> None:
+        """Set the ipv4 address.
+
+        Sets the ipv4 address of a network interface. If the network interface
+        has any other ipv4 addresses these will be cleared.
+
+        Args:
+            net_interface: The network interface to set the ip address on (eg. wlan0).
+            address: The ip address and subnet to give the net_interface.
+            broadcast: The broadcast address to use for the subnet.
+        """
+        self.clear_ipv4_addresses(net_interface)
+        self.add_ipv4_address(net_interface, address, broadcast)
+
+    def clear_ipv4_addresses(self, net_interface: str) -> None:
+        """Clears all ipv4 addresses registered to a net_interface.
+
+        Args:
+            net_interface: The network interface to clear addresses from (eg. wlan0).
+        """
+        ip_info = self.get_ipv4_addresses(net_interface)
+
+        for address, _ in ip_info:
+            try:
+                self.remove_ipv4_address(net_interface, address)
+            except subprocess.CalledProcessError as e:
+                if "RTNETLINK answers: Cannot assign requested address" in e.stderr:
+                    # It is possible that the address has already been removed by the
+                    # time this command has been called.
+                    addresses = [a for a, _ in self.get_ipv4_addresses(net_interface)]
+                    if address not in addresses:
+                        self._runner.log.warning(
+                            "Unable to remove address %s. The address was "
+                            "removed by another process.",
+                            address,
+                        )
+                    else:
+                        raise signals.TestError(
+                            f"Unable to remove address {address}. The address is still "
+                            f"registered to {net_interface}, despite call for removal.",
+                            extras={
+                                "stderr": e.stderr,
+                                "stdout": e.stdout,
+                                "returncode": e.returncode,
+                            },
+                        )
+                raise signals.TestError(
+                    f"Unable to remove address {address}: {e.stderr}",
+                    extras={
+                        "stdout": e.stdout,
+                        "returncode": e.returncode,
+                    },
+                )
diff --git a/packages/antlion/controllers/utils_lib/commands/journalctl.py b/packages/antlion/controllers/utils_lib/commands/journalctl.py
new file mode 100644
index 0000000..b06260a
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/journalctl.py
@@ -0,0 +1,77 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from datetime import datetime
+
+from antlion.controllers.utils_lib.commands import pgrep
+from antlion.controllers.utils_lib.commands.command import LinuxCommand, require
+from antlion.logger import LogLevel
+from antlion.runner import Runner
+
+# Timestamp format accepted by systemd.
+# See https://man7.org/linux/man-pages/man7/systemd.time.7.html#PARSING_TIMESTAMPS
+SYSTEMD_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S %Z"
+
+
+class LinuxJournalctlCommand(LinuxCommand):
+    """Print log entries from the systemd journal.
+
+    Only supported on Linux distributions using systemd.
+    """
+
+    def __init__(self, runner: Runner, binary: str = "journalctl") -> None:
+        super().__init__(runner, binary)
+        self._pgrep = require(pgrep.LinuxPgrepCommand(runner))
+        self._last_ran: datetime | None = None
+        self._logs_before_reset: str | None = None
+
+    def available(self) -> bool:
+        if not super().available():
+            return False
+        return self._pgrep.find("systemd-journal") is not None
+
+    def logs(self) -> str:
+        """Return log entries since the last run or current boot, in that order."""
+        if self._last_ran:
+            args = [
+                "--since",
+                self._last_ran.strftime(SYSTEMD_TIMESTAMP_FORMAT),
+            ]
+        else:
+            args = ["--boot"]
+
+        self._last_ran = datetime.now().astimezone()
+
+        with LogLevel(self._runner.log, logging.INFO):
+            logs = self._run(args, sudo=True).stdout
+
+        if self._logs_before_reset:
+            return f"{self._logs_before_reset}\n{logs}"
+        return logs
+
+    def set_runner(self, runner: Runner) -> None:
+        """Set a new runner.
+
+        Use when underlying connection to the device refreshes.
+        """
+        self._runner = runner
+
+    def save_and_reset(self) -> None:
+        """Save logs and reset the last known run time.
+
+        Run before every reboot!
+        """
+        self._logs_before_reset = self.logs()
+        self._last_ran = None
diff --git a/packages/antlion/controllers/utils_lib/commands/pgrep.py b/packages/antlion/controllers/utils_lib/commands/pgrep.py
new file mode 100644
index 0000000..cf6f271
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/pgrep.py
@@ -0,0 +1,43 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+
+from antlion.controllers.utils_lib.commands.command import LinuxCommand
+from antlion.runner import Runner
+
+
+class LinuxPgrepCommand(LinuxCommand):
+    """Look through current running processes."""
+
+    def __init__(self, runner: Runner, binary: str = "pgrep") -> None:
+        super().__init__(runner, binary)
+
+    def find(self, process: str) -> list[int] | None:
+        """Find a process by name.
+
+        Args:
+            process: Name of the process to query
+
+        Returns:
+            List of process IDs if running, otherwise None.
+        """
+        try:
+            result = self._run(["-x", process])
+            return [int(line) for line in result.stdout.splitlines()]
+        except subprocess.CalledProcessError as e:
+            if e.stdout or e.stderr:
+                # pgrep should not output anything to stdout or stderr
+                raise e
+            return None
diff --git a/packages/antlion/controllers/utils_lib/commands/route.py b/packages/antlion/controllers/utils_lib/commands/route.py
new file mode 100644
index 0000000..6493ec2
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/route.py
@@ -0,0 +1,199 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import re
+import subprocess
+from typing import Iterator, Literal
+
+from mobly import signals
+
+from antlion.controllers.utils_lib.commands.command import LinuxCommand
+from antlion.runner import Runner
+
+
+class Error(Exception):
+    """Exception thrown when a valid ip command experiences errors."""
+
+
+class LinuxRouteCommand(LinuxCommand):
+    """Interface for doing standard ip route commands on a linux system."""
+
+    def __init__(self, runner: Runner, binary: str = "ip"):
+        super().__init__(runner, binary)
+
+    def add_route(
+        self,
+        net_interface: str,
+        address: ipaddress.IPv4Interface | ipaddress.IPv6Interface | Literal["default"],
+        proto: str = "static",
+    ) -> None:
+        """Add an entry to the ip routing table.
+
+        Will add a route for either a specific ip address, or a network.
+
+        Args:
+            net_interface: Any packet that sends through this route will be sent
+                using this network interface (eg. wlan0).
+            address: The address to use. If a network is given then the entire
+                subnet will be routed. If "default" is given then this will set the
+                default route.
+            proto: Routing protocol identifier of this route (e.g. kernel,
+                redirect, boot, static, ra). See `man ip-route(8)` for details.
+
+        Raises:
+            NetworkInterfaceDown: Raised when the network interface is down.
+        """
+        try:
+            self._run(
+                [
+                    "route",
+                    "add",
+                    str(address),
+                    "dev",
+                    net_interface,
+                    "proto",
+                    proto,
+                ],
+                sudo=True,
+            )
+        except subprocess.CalledProcessError as e:
+            if "File exists" in e.stderr:
+                raise signals.TestError(
+                    "Route already exists",
+                    extras={
+                        "stderr": e.stderr,
+                        "stdout": e.stdout,
+                        "returncode": e.returncode,
+                    },
+                )
+            if "Network is down" in e.stderr:
+                raise signals.TestError(
+                    "Device must be up for adding a route.",
+                    extras={
+                        "stderr": e.stderr,
+                        "stdout": e.stdout,
+                        "returncode": e.returncode,
+                    },
+                )
+            raise e
+
+    def get_routes(
+        self, net_interface: str | None = None
+    ) -> Iterator[
+        tuple[
+            ipaddress.IPv4Interface | ipaddress.IPv6Interface | Literal["default"], str
+        ]
+    ]:
+        """Get the routes in the ip routing table.
+
+        Args:
+            net_interface: string, If given, only retrieve routes that have
+                           been registered to go through this network
+                           interface (eg. wlan0).
+
+        Returns: An iterator that returns a tuple of (address, net_interface).
+                 If it is the default route then address
+                 will be the "default". If the route is a subnet then
+                 it will be a ipaddress.IPv4Network otherwise it is a
+                 ipaddress.IPv4Address.
+        """
+        result_ipv4 = self._run(["-4", "route", "show"])
+        result_ipv6 = self._run(["-6", "route", "show"])
+
+        lines = result_ipv4.stdout.splitlines() + result_ipv6.stdout.splitlines()
+
+        # Scan through each line for valid route entries
+        # Example output:
+        # default via 192.168.1.254 dev eth0  proto static
+        # 192.168.1.0/24 dev eth0  proto kernel  scope link  src 172.22.100.19  metric 1
+        # 192.168.2.1 dev eth2 proto kernel scope link metric 1
+        # fe80::/64 dev wlan0 proto static metric 1024
+        for line in lines:
+            if not "dev" in line:
+                continue
+
+            if line.startswith("default"):
+                # The default route entry is formatted differently.
+                match = re.search("dev (?P<net_interface>\\S+)", line)
+                if not match:
+                    continue
+
+                iface = match.groupdict()["net_interface"]
+                assert isinstance(iface, str)
+
+                if net_interface and iface != net_interface:
+                    continue
+
+                # When there is a match for the route entry pattern create
+                # A pair to hold the info.
+                yield ("default", iface)
+            else:
+                # Test the normal route entry pattern.
+                match = re.search(
+                    "(?P<address>[0-9A-Fa-f\\.\\:/]+) dev (?P<net_interface>\\S+)", line
+                )
+                if not match:
+                    continue
+
+                # When there is a match for the route entry pattern create
+                # A pair to hold the info.
+                d = match.groupdict()
+
+                address_raw = d["address"]
+                assert isinstance(address_raw, str)
+
+                iface = d["net_interface"]
+                assert isinstance(iface, str)
+
+                if net_interface and iface != net_interface:
+                    continue
+
+                yield (ipaddress.ip_interface(address_raw), iface)
+
+    def remove_route(
+        self,
+        address: ipaddress.IPv4Interface | ipaddress.IPv6Interface | Literal["default"],
+        net_interface: str | None = None,
+    ) -> None:
+        """Removes a route from the ip routing table.
+
+        Removes a route from the ip routing table. If the route does not exist
+        nothing is done.
+
+        Args:
+            address: The address of the route to remove.
+            net_interface: If specified the route being removed is registered to
+                go through this network interface (eg. wlan0)
+        """
+        try:
+            args = ["route", "del", str(address)]
+            if net_interface:
+                args += ["dev", net_interface]
+            self._run(args)
+        except subprocess.CalledProcessError as e:
+            if "RTNETLINK answers: No such process" in e.stderr:
+                # The route didn't exist.
+                return
+            raise signals.TestError(f"Failed to delete route {address}: {e}") from e
+
+    def clear_routes(self, net_interface: str) -> None:
+        """Clears all routes.
+
+        Args:
+            net_interface: The network interface to clear routes on.
+        """
+        routes = self.get_routes(net_interface)
+        for a, d in routes:
+            self.remove_route(a, d)
diff --git a/packages/antlion/controllers/utils_lib/commands/shell.py b/packages/antlion/controllers/utils_lib/commands/shell.py
new file mode 100644
index 0000000..0c0358e
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/shell.py
@@ -0,0 +1,247 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import shlex
+import signal
+import time
+from typing import Iterator
+
+from antlion.libs.proc import job
+from antlion.runner import CompletedProcess, Runner
+
+
+class ShellCommand(object):
+    """Wraps basic commands that tend to be tied very closely to a shell.
+
+    This class is a wrapper for running basic shell commands through
+    any object that has a run command. Basic shell functionality for managing
+    the system, programs, and files in wrapped within this class.
+
+    Note: At the moment this only works with the ssh runner.
+    """
+
+    def __init__(self, runner: Runner, working_dir: str | None = None) -> None:
+        """Creates a new shell command invoker.
+
+        Args:
+            runner: The object that will run the shell commands.
+            working_dir: The directory that all commands should work in,
+                         if None then the runners enviroment default is used.
+        """
+        self._runner = runner
+        self._working_dir = working_dir
+
+    def run(self, command: str, timeout_sec: int = 60) -> CompletedProcess:
+        """Runs a generic command through the runner.
+
+        Takes the command and prepares it to be run in the target shell using
+        this objects settings.
+
+        Args:
+            command: The command to run.
+            timeout_sec: How long to wait for the command.
+
+        Returns:
+            A CmdResult object containing the results of the shell command.
+
+        Raises:
+            job.Error: When the command executed but had an error.
+        """
+        if self._working_dir:
+            command_str = f"cd {self._working_dir}; {command}"
+        else:
+            command_str = command
+
+        return self._runner.run(command_str, timeout_sec=timeout_sec)
+
+    def is_alive(self, identifier: str | int) -> bool:
+        """Checks to see if a program is alive.
+
+        Checks to see if a program is alive on the shells enviroment. This can
+        be used to check on generic programs, or a specific program using
+        a pid.
+
+        Args:
+            identifier: string or int, Used to identify the program to check.
+                        if given an int then it is assumed to be a pid. If
+                        given a string then it will be used as a search key
+                        to compare on the running processes.
+        Returns:
+            True if a process was found running, false otherwise.
+        """
+        try:
+            if isinstance(identifier, str):
+                self.run(f"ps aux | grep -v grep | grep {identifier}")
+            elif isinstance(identifier, int):
+                self.signal(identifier, 0)
+            else:
+                raise ValueError("Bad type was given for identifier")
+
+            return True
+        except job.Error:
+            return False
+
+    def get_pids(self, identifier: str) -> Iterator[int]:
+        """Gets the pids of a program.
+
+        Searches for a program with a specific name and grabs the pids for all
+        programs that match.
+
+        Args:
+            identifier: A search term that identifies the program.
+
+        Returns: An array of all pids that matched the identifier, or None
+                  if no pids were found.
+        """
+        try:
+            result = self.run(f"ps aux | grep -v grep | grep {identifier}")
+        except job.Error as e:
+            if e.result.exit_status == 1:
+                # Grep returns exit status 1 when no lines are selected. This is
+                # an expected return code.
+                return
+            raise e
+
+        lines = result.stdout.splitlines()
+
+        # The expected output of the above command is like so:
+        # bob    14349  0.0  0.0  34788  5552 pts/2    Ss   Oct10   0:03 bash
+        # bob    52967  0.0  0.0  34972  5152 pts/4    Ss   Oct10   0:00 bash
+        # Where the format is:
+        # USER    PID  ...
+        for line in lines:
+            pieces = line.split()
+            try:
+                yield int(pieces[1])
+            except StopIteration:
+                return
+
+    def search_file(self, search_string: str, file_name: str) -> bool:
+        """Searches through a file for a string.
+
+        Args:
+            search_string: The string or pattern to look for.
+            file_name: The name of the file to search.
+
+        Returns:
+            True if the string or pattern was found, False otherwise.
+        """
+        try:
+            self.run(f"grep {shlex.quote(search_string)} {file_name}")
+            return True
+        except job.Error:
+            return False
+
+    def read_file(self, file_name: str) -> str:
+        """Reads a file through the shell.
+
+        Args:
+            file_name: The name of the file to read.
+
+        Returns:
+            A string of the files contents.
+        """
+        return self.run(f"cat {file_name}").stdout
+
+    def write_file(self, file_name: str, data: str) -> CompletedProcess:
+        """Writes a block of data to a file through the shell.
+
+        Args:
+            file_name: The name of the file to write to.
+            data: The string of data to write.
+        """
+        return self.run(f"echo {shlex.quote(data)} > {file_name}")
+
+    def append_file(self, file_name: str, data: str) -> CompletedProcess:
+        """Appends a block of data to a file through the shell.
+
+        Args:
+            file_name: The name of the file to write to.
+            data: The string of data to write.
+        """
+        return self.run(f"echo {shlex.quote(data)} >> {file_name}")
+
+    def touch_file(self, file_name: str) -> None:
+        """Creates a file through the shell.
+
+        Args:
+            file_name: The name of the file to create.
+        """
+        self.write_file(file_name, "")
+
+    def delete_file(self, file_name: str) -> None:
+        """Deletes a file through the shell.
+
+        Args:
+            file_name: The name of the file to delete.
+        """
+        try:
+            self.run(f"rm -r {file_name}")
+        except job.Error as e:
+            if "No such file or directory" in e.result.stderr:
+                return
+            raise e
+
+    def kill(self, identifier: str, timeout_sec: int = 10) -> None:
+        """Kills a program or group of programs through the shell.
+
+        Kills all programs that match an identifier through the shell. This
+        will send an increasing queue of kill signals to all programs
+        that match the identifier until either all are dead or the timeout
+        finishes.
+
+        Programs are guaranteed to be killed after running this command.
+
+        Args:
+            identifier: A string used to identify the program.
+            timeout_sec: The time to wait for all programs to die. Each signal
+                will take an equal portion of this time.
+        """
+        if isinstance(identifier, int):
+            pids = [identifier]
+        else:
+            pids = list(self.get_pids(identifier))
+
+        signal_queue = [signal.SIGINT, signal.SIGTERM, signal.SIGKILL]
+
+        signal_duration = timeout_sec / len(signal_queue)
+        for sig in signal_queue:
+            for pid in pids:
+                try:
+                    self.signal(pid, sig)
+                except job.Error:
+                    pass
+
+            start_time = time.time()
+            while pids and time.time() - start_time < signal_duration:
+                time.sleep(0.1)
+                pids = [pid for pid in pids if self.is_alive(pid)]
+
+            if not pids:
+                break
+
+    def signal(self, pid: int, sig: int) -> None:
+        """Sends a specific signal to a program.
+
+        Args:
+            pid: The process id of the program to kill.
+            sig: The signal to send.
+
+        Raises:
+            job.Error: Raised when the signal fail to reach
+                       the specified program.
+        """
+        self.run(f"kill -{sig} {pid}")
diff --git a/packages/antlion/controllers/utils_lib/host_utils.py b/packages/antlion/controllers/utils_lib/host_utils.py
new file mode 100644
index 0000000..1b66089
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/host_utils.py
@@ -0,0 +1,62 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import socket
+
+
+def get_available_host_port():
+    """Finds a semi-random available port.
+
+    A race condition is still possible after the port number is returned, if
+    another process happens to bind it.
+
+    Returns:
+        A port number that is unused on both TCP and UDP.
+    """
+    # On the 2.6 kernel, calling _try_bind() on UDP socket returns the
+    # same port over and over. So always try TCP first.
+    while True:
+        # Ask the OS for an unused port.
+        port = _try_bind(0, socket.SOCK_STREAM, socket.IPPROTO_TCP)
+        # Check if this port is unused on the other protocol.
+        if port and _try_bind(port, socket.SOCK_DGRAM, socket.IPPROTO_UDP):
+            return port
+
+
+def is_port_available(port):
+    """Checks if a given port number is available on the system.
+
+    Args:
+        port: An integer which is the port number to check.
+
+    Returns:
+        True if the port is available; False otherwise.
+    """
+    return _try_bind(port, socket.SOCK_STREAM, socket.IPPROTO_TCP) and _try_bind(
+        port, socket.SOCK_DGRAM, socket.IPPROTO_UDP
+    )
+
+
+def _try_bind(port, socket_type, socket_proto):
+    s = socket.socket(socket.AF_INET, socket_type, socket_proto)
+    try:
+        try:
+            s.bind(("", port))
+            # The result of getsockname() is protocol dependent, but for both
+            # IPv4 and IPv6 the second field is a port number.
+            return s.getsockname()[1]
+        except socket.error:
+            return None
+    finally:
+        s.close()
diff --git a/src/antlion/controllers/utils_lib/ssh/__init__.py b/packages/antlion/controllers/utils_lib/ssh/__init__.py
similarity index 100%
rename from src/antlion/controllers/utils_lib/ssh/__init__.py
rename to packages/antlion/controllers/utils_lib/ssh/__init__.py
diff --git a/packages/antlion/controllers/utils_lib/ssh/connection.py b/packages/antlion/controllers/utils_lib/ssh/connection.py
new file mode 100644
index 0000000..058147e
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/ssh/connection.py
@@ -0,0 +1,470 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import logging
+import os
+import re
+import shlex
+import shutil
+import tempfile
+import threading
+import time
+import uuid
+
+from mobly import logger
+
+from antlion.controllers.utils_lib import host_utils
+from antlion.controllers.utils_lib.ssh import formatter
+from antlion.libs.proc import job
+from antlion.runner import CompletedProcess, Runner
+
+
+class Error(Exception):
+    """An error occurred during an ssh operation."""
+
+
+class CommandError(Exception):
+    """An error occurred with the command.
+
+    Attributes:
+        result: The results of the ssh command that had the error.
+    """
+
+    def __init__(self, result):
+        """
+        Args:
+            result: The result of the ssh command that created the problem.
+        """
+        self.result = result
+
+    def __str__(self):
+        return "cmd: %s\nstdout: %s\nstderr: %s" % (
+            self.result.command,
+            self.result.stdout,
+            self.result.stderr,
+        )
+
+
+_Tunnel = collections.namedtuple("_Tunnel", ["local_port", "remote_port", "proc"])
+
+
+class SshConnection(Runner):
+    """Provides a connection to a remote machine through ssh.
+
+    Provides the ability to connect to a remote machine and execute a command
+    on it. The connection will try to establish a persistent connection When
+    a command is run. If the persistent connection fails it will attempt
+    to connect normally.
+    """
+
+    @property
+    def socket_path(self):
+        """Returns: The os path to the master socket file."""
+        if self._master_ssh_tempdir is None:
+            raise AttributeError(
+                "socket_path is not available yet; run setup_master_ssh() first"
+            )
+        return os.path.join(self._master_ssh_tempdir, "socket")
+
+    def __init__(self, settings):
+        """
+        Args:
+            settings: The ssh settings to use for this connection.
+            formatter: The object that will handle formatting ssh command
+                       for use with the background job.
+        """
+        self._settings = settings
+        self._formatter = formatter.SshFormatter()
+        self._lock = threading.Lock()
+        self._master_ssh_proc = None
+        self._master_ssh_tempdir: str | None = None
+        self._tunnels = list()
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SshConnection | {self._settings.hostname}]",
+            },
+        )
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, _, __, ___):
+        self.close()
+
+    def __del__(self):
+        self.close()
+
+    def setup_master_ssh(self, timeout_sec: int = 5):
+        """Sets up the master ssh connection.
+
+        Sets up the initial master ssh connection if it has not already been
+        started.
+
+        Args:
+            timeout_sec: The time to wait for the master ssh connection to
+                be made.
+
+        Raises:
+            Error: When setting up the master ssh connection fails.
+        """
+        with self._lock:
+            if self._master_ssh_proc is not None:
+                socket_path = self.socket_path
+                if (
+                    not os.path.exists(socket_path)
+                    or self._master_ssh_proc.poll() is not None
+                ):
+                    self.log.debug(
+                        "Master ssh connection to %s is down.", self._settings.hostname
+                    )
+                    self._cleanup_master_ssh()
+
+            if self._master_ssh_proc is None:
+                # Create a shared socket in a temp location.
+                self._master_ssh_tempdir = tempfile.mkdtemp(prefix="ssh-master")
+
+                # Setup flags and options for running the master ssh
+                # -N: Do not execute a remote command.
+                # ControlMaster: Spawn a master connection.
+                # ControlPath: The master connection socket path.
+                extra_flags = {"-N": None}
+                extra_options = {
+                    "ControlMaster": True,
+                    "ControlPath": self.socket_path,
+                    "BatchMode": True,
+                }
+
+                # Construct the command and start it.
+                master_cmd = self._formatter.format_ssh_local_command(
+                    self._settings, extra_flags=extra_flags, extra_options=extra_options
+                )
+                self.log.info("Starting master ssh connection.")
+                self._master_ssh_proc = job.run_async(master_cmd)
+
+                end_time = time.time() + timeout_sec
+
+                while time.time() < end_time:
+                    if os.path.exists(self.socket_path):
+                        break
+                    time.sleep(0.2)
+                else:
+                    self._cleanup_master_ssh()
+                    raise Error("Master ssh connection timed out.")
+
+    def run(
+        self,
+        command: str | list[str],
+        timeout_sec: int | None = 60,
+        ignore_status: bool = False,
+        env: dict[str, str] | None = None,
+        io_encoding: str = "utf-8",
+        attempts: int = 2,
+    ) -> CompletedProcess:
+        """Runs a remote command over ssh.
+
+        Will ssh to a remote host and run a command. This method will
+        block until the remote command is finished.
+
+        Args:
+            command: The command to execute over ssh.
+            timeout_sec: seconds to wait for command to finish.
+            ignore_status: True to ignore the exit code of the remote
+                           subprocess.  Note that if you do ignore status codes,
+                           you should handle non-zero exit codes explicitly.
+            env: environment variables to setup on the remote host.
+            io_encoding: unicode encoding of command output.
+            attempts: Number of attempts before giving up on command failures.
+
+        Returns:
+            Results of the ssh command.
+
+        Raises:
+            job.TimeoutError: When the remote command took to long to execute.
+            job.Error: When the ssh connection failed to be created.
+            CommandError: Ssh worked, but the command had an error executing.
+        """
+        if attempts < 1:
+            raise TypeError("attempts must be a positive, non-zero integer")
+        if env is None:
+            env = {}
+
+        try:
+            self.setup_master_ssh(self._settings.connect_timeout)
+        except Error:
+            self.log.warning(
+                "Failed to create master ssh connection, using "
+                "normal ssh connection."
+            )
+
+        extra_options: dict[str, str | bool] = {"BatchMode": True}
+        if self._master_ssh_proc:
+            extra_options["ControlPath"] = self.socket_path
+
+        if isinstance(command, list):
+            flat_command = shlex.join(command)
+        else:
+            flat_command = command
+
+        identifier = str(uuid.uuid4())
+        full_command = f'echo "CONNECTED: {identifier}"; {flat_command}'
+
+        terminal_command = self._formatter.format_command(
+            full_command, env, self._settings, extra_options=extra_options
+        )
+
+        dns_retry_count = 2
+        while True:
+            result = job.run(
+                terminal_command,
+                ignore_status=True,
+                timeout_sec=timeout_sec,
+                io_encoding=io_encoding,
+            )
+            output = result.stdout
+
+            # Check for a connected message to prevent false negatives.
+            valid_connection = re.search(
+                f"^CONNECTED: {identifier}", output, flags=re.MULTILINE
+            )
+            if valid_connection:
+                # Remove the first line that contains the connect message.
+                line_index = output.find("\n") + 1
+                if line_index == 0:
+                    line_index = len(output)
+                real_output = output[line_index:].encode(io_encoding)
+
+                result._raw_stdout = real_output
+                result._stdout_str = None
+
+                if result.exit_status and not ignore_status:
+                    raise job.Error(result)
+                return result
+
+            error_string = result.stderr
+
+            had_dns_failure = result.exit_status == 255 and re.search(
+                r"^ssh: .*: Name or service not known", error_string, flags=re.MULTILINE
+            )
+            if had_dns_failure:
+                dns_retry_count -= 1
+                if not dns_retry_count:
+                    raise Error("DNS failed to find host.", result)
+                self.log.debug("Failed to connect to host, retrying...")
+            else:
+                break
+
+        had_timeout = re.search(
+            r"^ssh: connect to host .* port .*: " r"Connection timed out\r$",
+            error_string,
+            flags=re.MULTILINE,
+        )
+        if had_timeout:
+            raise Error("Ssh timed out.", result)
+
+        permission_denied = "Permission denied" in error_string
+        if permission_denied:
+            raise Error("Permission denied.", result)
+
+        unknown_host = re.search(
+            r"ssh: Could not resolve hostname .*: " r"Name or service not known",
+            error_string,
+            flags=re.MULTILINE,
+        )
+        if unknown_host:
+            raise Error("Unknown host.", result)
+
+        self.log.error(f"An unknown error has occurred. Job result: {result}")
+        ping_output = job.run(
+            f"ping {self._settings.hostname} -c 3 -w 1", ignore_status=True
+        )
+        self.log.error(f"Ping result: {ping_output}")
+        if attempts > 1:
+            self._cleanup_master_ssh()
+            self.run(
+                command, timeout_sec, ignore_status, env, io_encoding, attempts - 1
+            )
+        raise Error("The job failed for unknown reasons.", result)
+
+    def run_async(self, command, env=None) -> CompletedProcess:
+        """Starts up a background command over ssh.
+
+        Will ssh to a remote host and startup a command. This method will
+        block until there is confirmation that the remote command has started.
+
+        Args:
+            command: The command to execute over ssh. Can be either a string
+                     or a list.
+            env: A dictionary of environment variables to setup on the remote
+                 host.
+
+        Returns:
+            The result of the command to launch the background job.
+
+        Raises:
+            CmdTimeoutError: When the remote command took to long to execute.
+            SshTimeoutError: When the connection took to long to established.
+            SshPermissionDeniedError: When permission is not allowed on the
+                                      remote host.
+        """
+        return self.run(
+            f"({command}) < /dev/null > /dev/null 2>&1 & echo -n $!", env=env
+        )
+
+    def close(self):
+        """Clean up open connections to remote host."""
+        self._cleanup_master_ssh()
+        while self._tunnels:
+            self.close_ssh_tunnel(self._tunnels[0].local_port)
+
+    def _cleanup_master_ssh(self):
+        """
+        Release all resources (process, temporary directory) used by an active
+        master SSH connection.
+        """
+        # If a master SSH connection is running, kill it.
+        if self._master_ssh_proc is not None:
+            self.log.debug("Nuking master_ssh_job.")
+            self._master_ssh_proc.kill()
+            self._master_ssh_proc.wait()
+            self._master_ssh_proc = None
+
+        # Remove the temporary directory for the master SSH socket.
+        if self._master_ssh_tempdir is not None:
+            self.log.debug("Cleaning master_ssh_tempdir.")
+            shutil.rmtree(self._master_ssh_tempdir)
+            self._master_ssh_tempdir = None
+
+    def create_ssh_tunnel(self, port, local_port=None):
+        """Create an ssh tunnel from local_port to port.
+
+        This securely forwards traffic from local_port on this machine to the
+        remote SSH host at port.
+
+        Args:
+            port: remote port on the host.
+            local_port: local forwarding port, or None to pick an available
+                        port.
+
+        Returns:
+            the created tunnel process.
+        """
+        if not local_port:
+            local_port = host_utils.get_available_host_port()
+        else:
+            for tunnel in self._tunnels:
+                if tunnel.remote_port == port:
+                    return tunnel.local_port
+
+        extra_flags = {
+            "-n": None,  # Read from /dev/null for stdin
+            "-N": None,  # Do not execute a remote command
+            "-q": None,  # Suppress warnings and diagnostic commands
+            "-L": f"{local_port}:localhost:{port}",
+        }
+        extra_options = dict()
+        if self._master_ssh_proc:
+            extra_options["ControlPath"] = self.socket_path
+        tunnel_cmd = self._formatter.format_ssh_local_command(
+            self._settings, extra_flags=extra_flags, extra_options=extra_options
+        )
+        self.log.debug("Full tunnel command: %s", tunnel_cmd)
+        # Exec the ssh process directly so that when we deliver signals, we
+        # deliver them straight to the child process.
+        tunnel_proc = job.run_async(tunnel_cmd)
+        self.log.debug(
+            "Started ssh tunnel, local = %d remote = %d, pid = %d",
+            local_port,
+            port,
+            tunnel_proc.pid,
+        )
+        self._tunnels.append(_Tunnel(local_port, port, tunnel_proc))
+        return local_port
+
+    def close_ssh_tunnel(self, local_port):
+        """Close a previously created ssh tunnel of a TCP port.
+
+        Args:
+            local_port: int port on localhost previously forwarded to the remote
+                        host.
+
+        Returns:
+            integer port number this port was forwarded to on the remote host or
+            None if no tunnel was found.
+        """
+        idx = None
+        for i, tunnel in enumerate(self._tunnels):
+            if tunnel.local_port == local_port:
+                idx = i
+                break
+        if idx is not None:
+            tunnel = self._tunnels.pop(idx)
+            tunnel.proc.kill()
+            tunnel.proc.wait()
+            return tunnel.remote_port
+        return None
+
+    def send_file(self, local_path, remote_path, ignore_status=False):
+        """Send a file from the local host to the remote host.
+
+        Args:
+            local_path: string path of file to send on local host.
+            remote_path: string path to copy file to on remote host.
+            ignore_status: Whether or not to ignore the command's exit_status.
+        """
+        # TODO: This may belong somewhere else: b/32572515
+        user_host = self._formatter.format_host_name(self._settings)
+        job.run(
+            f"scp {local_path} {user_host}:{remote_path}",
+            ignore_status=ignore_status,
+        )
+
+    def pull_file(self, local_path, remote_path, ignore_status=False):
+        """Send a file from remote host to local host
+
+        Args:
+            local_path: string path of file to recv on local host
+            remote_path: string path to copy file from on remote host.
+            ignore_status: Whether or not to ignore the command's exit_status.
+        """
+        user_host = self._formatter.format_host_name(self._settings)
+        job.run(
+            f"scp {user_host}:{remote_path} {local_path}",
+            ignore_status=ignore_status,
+        )
+
+    def find_free_port(self, interface_name="localhost"):
+        """Find a unused port on the remote host.
+
+        Note that this method is inherently racy, since it is impossible
+        to promise that the remote port will remain free.
+
+        Args:
+            interface_name: string name of interface to check whether a
+                            port is used against.
+
+        Returns:
+            integer port number on remote interface that was free.
+        """
+        # TODO: This may belong somewhere else: b/3257251
+        free_port_cmd = (
+            'python -c "import socket; s=socket.socket(); '
+            "s.bind(('%s', 0)); print(s.getsockname()[1]); s.close()\""
+        ) % interface_name
+        port = int(self.run(free_port_cmd).stdout)
+        # Yield to the os to ensure the port gets cleaned up.
+        time.sleep(0.001)
+        return port
diff --git a/packages/antlion/controllers/utils_lib/ssh/formatter.py b/packages/antlion/controllers/utils_lib/ssh/formatter.py
new file mode 100644
index 0000000..eb2ed09
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/ssh/formatter.py
@@ -0,0 +1,202 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class SshFormatter(object):
+    """Handles formatting ssh commands.
+
+    Handler for formatting chunks of the ssh command to run.
+    """
+
+    def format_ssh_executable(self, settings):
+        """Format the executable name.
+
+        Formats the executable name as a string.
+
+        Args:
+            settings: The ssh settings being used.
+
+        Returns:
+            A string for the ssh executable name.
+        """
+        return settings.executable
+
+    def format_host_name(self, settings):
+        """Format hostname.
+
+        Formats the hostname to connect to.
+
+        Args:
+            settings: The ssh settings being used.
+
+        Returns:
+            A string of the connection host name to connect to.
+        """
+        return f"{settings.username}@{settings.hostname}"
+
+    def format_value(self, value):
+        """Formats a command line value.
+
+        Takes in a value and formats it so it can be safely used in the
+        command line.
+
+        Args:
+            value: The value to format.
+
+        Returns:
+            A string representation of the formatted value.
+        """
+        if isinstance(value, bool):
+            return "yes" if value else "no"
+
+        return str(value)
+
+    def format_options_list(self, options):
+        """Format the option list.
+
+        Formats a dictionary of options into a list of strings to be used
+        on the command line.
+
+        Args:
+            options: A dictionary of options.
+
+        Returns:
+            An iterator of strings that should go on the command line.
+        """
+        for option_name in options:
+            option = options[option_name]
+
+            yield "-o"
+            yield f"{option_name}={self.format_value(option)}"
+
+    def format_flag_list(self, flags):
+        """Format the flags list.
+
+        Formats a dictionary of flags into a list of strings to be used
+        on the command line.
+
+        Args:
+            flags: A dictonary of options.
+
+        Returns:
+            An iterator of strings that should be used on the command line.
+        """
+        for flag_name in flags:
+            flag = flags[flag_name]
+
+            yield flag_name
+            if flag is not None:
+                yield self.format_value(flag)
+
+    def format_ssh_local_command(self, settings, extra_flags={}, extra_options={}):
+        """Formats the local part of the ssh command.
+
+        Formats the local section of the ssh command. This is the part of the
+        command that will actual launch ssh on our local machine with the
+        specified settings.
+
+        Args:
+            settings: The ssh settings.
+            extra_flags: Extra flags to inlcude.
+            extra_options: Extra options to include.
+
+        Returns:
+            An array of strings that make up the command and its local
+            arguments.
+        """
+        options = settings.construct_ssh_options()
+        for extra_option_name in extra_options:
+            options[extra_option_name] = extra_options[extra_option_name]
+        options_list = list(self.format_options_list(options))
+
+        flags = settings.construct_ssh_flags()
+        for extra_flag_name in extra_flags:
+            flags[extra_flag_name] = extra_flags[extra_flag_name]
+        flags_list = list(self.format_flag_list(flags))
+
+        all_options = options_list + flags_list
+        host_name = self.format_host_name(settings)
+        executable = self.format_ssh_executable(settings)
+
+        base_command = [executable] + all_options + [host_name]
+
+        return base_command
+
+    def format_ssh_command(
+        self, remote_command, settings, extra_flags={}, extra_options={}
+    ):
+        """Formats the full ssh command.
+
+        Creates the full format for an ssh command.
+
+        Args:
+            remote_command: A string that represents the remote command to
+                            execute.
+            settings: The ssh settings to use.
+            extra_flags: Extra flags to include in the settings.
+            extra_options: Extra options to include in the settings.
+
+        Returns:
+            A list of strings that make up the total ssh command.
+        """
+        local_command = self.format_ssh_local_command(
+            settings, extra_flags, extra_options
+        )
+
+        local_command.append(remote_command)
+        return local_command
+
+    def format_remote_command(self, command, env):
+        """Formats the remote part of the ssh command.
+
+        Formatts the command that will run on the remote machine.
+
+        Args:
+            command: string, The command to be executed.
+            env: Enviroment variables to add to the remote envirment.
+
+        Returns:
+            A string that represents the command line to execute on the remote
+            machine.
+        """
+        if not env:
+            env_str = ""
+        else:
+            env_str = "export "
+            for name in env:
+                value = env[name]
+                env_str += f"{name}={str(value)} "
+            env_str += ";"
+
+        execution_line = f"{env_str} {command};"
+        return execution_line
+
+    def format_command(self, command, env, settings, extra_flags={}, extra_options={}):
+        """Formats a full command.
+
+        Formats the full command to run in order to run a command on a remote
+        machine.
+
+        Args:
+            command: The command to run on the remote machine. Can either be
+                     a string or a list.
+            env: The enviroment variables to include on the remote machine.
+            settings: The ssh settings to use.
+            extra_flags: Extra flags to include with the settings.
+            extra_options: Extra options to include with the settings.
+        """
+        remote_command = self.format_remote_command(command, env)
+        return self.format_ssh_command(
+            remote_command, settings, extra_flags, extra_options
+        )
diff --git a/packages/antlion/controllers/utils_lib/ssh/settings.py b/packages/antlion/controllers/utils_lib/ssh/settings.py
new file mode 100644
index 0000000..045b782
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/ssh/settings.py
@@ -0,0 +1,138 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Create a SshSettings from a dictionary from an ACTS config
+
+Args:
+    config dict instance from an ACTS config
+
+Returns:
+    An instance of SshSettings or None
+"""
+
+from typing import Mapping
+
+
+class SshSettings(object):
+    """Contains settings for ssh.
+
+    Container for ssh connection settings.
+
+    Attributes:
+        username: The name of the user to log in as.
+        hostname: The name of the host to connect to.
+        executable: The ssh executable to use.
+        port: The port to connect through (usually 22).
+        host_file: The known host file to use.
+        connect_timeout: How long to wait on a connection before giving a
+                         timeout.
+        alive_interval: How long between ssh heartbeat signals to keep the
+                        connection alive.
+    """
+
+    def __init__(
+        self,
+        hostname: str,
+        username: str,
+        identity_file: str,
+        port: int = 22,
+        host_file: str = "/dev/null",
+        connect_timeout: int = 30,
+        alive_interval: int = 300,
+        executable: str = "/usr/bin/ssh",
+        ssh_config: str | None = None,
+    ):
+        self.username = username
+        self.hostname = hostname
+        self.executable = executable
+        self.port = port
+        self.host_file = host_file
+        self.connect_timeout = connect_timeout
+        self.alive_interval = alive_interval
+        self.identity_file = identity_file
+        self.ssh_config = ssh_config
+
+    def construct_ssh_options(self) -> dict[str, str | int | bool]:
+        """Construct the ssh options.
+
+        Constructs a dictionary of option that should be used with the ssh
+        command.
+
+        Returns:
+            A dictionary of option name to value.
+        """
+        current_options: dict[str, str | int | bool] = {}
+        current_options["StrictHostKeyChecking"] = False
+        current_options["UserKnownHostsFile"] = self.host_file
+        current_options["ConnectTimeout"] = self.connect_timeout
+        current_options["ServerAliveInterval"] = self.alive_interval
+        return current_options
+
+    def construct_ssh_flags(self) -> dict[str, None | str | int]:
+        """Construct the ssh flags.
+
+        Constructs what flags should be used in the ssh connection.
+
+        Returns:
+            A dictionary of flag name to value. If value is none then it is
+            treated as a binary flag.
+        """
+        current_flags: dict[str, None | str | int] = {}
+        current_flags["-a"] = None
+        current_flags["-x"] = None
+        current_flags["-p"] = self.port
+        if self.identity_file:
+            current_flags["-i"] = self.identity_file
+        if self.ssh_config:
+            current_flags["-F"] = self.ssh_config
+        return current_flags
+
+
+def from_config(config: Mapping[str, str | int]) -> SshSettings:
+    ssh_binary_path = config.get("ssh_binary_path", "/usr/bin/ssh")
+    if type(ssh_binary_path) != str:
+        raise ValueError(f"ssh_binary_path must be a string, got {ssh_binary_path}")
+
+    user = config.get("user", None)
+    if type(user) != str:
+        raise ValueError(f"user must be a string, got {user}")
+
+    host = config.get("host", None)
+    if type(host) != str:
+        raise ValueError(f"host must be a string, got {host}")
+
+    port = config.get("port", 22)
+    if type(port) != int:
+        raise ValueError(f"port must be an integer, got {port}")
+
+    identity_file = config.get("identity_file", None)
+    if identity_file is None or type(identity_file) != str:
+        raise ValueError(f"identity_file must be a string, got {identity_file}")
+
+    ssh_config = config.get("ssh_config", None)
+    if ssh_config is not None and type(ssh_config) != str:
+        raise ValueError(f"ssh_config must be a string, got {ssh_config}")
+
+    connect_timeout = config.get("connect_timeout", 30)
+    if type(connect_timeout) != int:
+        raise ValueError(f"connect_timeout must be an integer, got {connect_timeout}")
+
+    return SshSettings(
+        host,
+        user,
+        identity_file,
+        port=port,
+        ssh_config=ssh_config,
+        connect_timeout=connect_timeout,
+        executable=ssh_binary_path,
+    )
diff --git a/packages/antlion/decorators.py b/packages/antlion/decorators.py
new file mode 100644
index 0000000..6ac7875
--- /dev/null
+++ b/packages/antlion/decorators.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import typing
+from threading import RLock
+from typing import Callable, Generic, TypeVar
+
+S = TypeVar("S")
+T = TypeVar("T")
+O = TypeVar("O")
+
+
+_NOT_FOUND = object()
+
+
+class cached_property(Generic[T]):
+    """A property whose value is computed then cached; deleter can be overridden.
+
+    Similar to functools.cached_property(), with the addition of deleter function that
+    can be overridden to provide custom clean up. The deleter function doesn't throw an
+    AttributeError if the value doesn't already exist.
+
+    Useful for properties that are tied to the lifetime of a device and need to be
+    recomputed upon reboot of said device.
+
+    Example:
+
+    ```
+    class LinuxDevice:
+        @cached_property
+        def ssh(self) -> SSH:
+            return SSH(self.ip)
+
+        @ssh.deleter
+        def ssh(self, ssh: SSH) -> None:
+            ssh.terminate_connections()
+    ```
+    """
+
+    def __init__(
+        self, func: Callable[[S], T], deleter: Callable[[S, T], None] | None = None
+    ) -> None:
+        self.func = func
+        self._deleter = deleter
+        self.name: str | None = None
+        self.__doc__ = func.__doc__
+        self.lock = RLock()
+
+    def __set_name__(self, owner: O, name: str) -> None:
+        if self.name is None:
+            self.name = name
+        elif name != self.name:
+            raise TypeError(
+                "Cannot assign the same cached_property to two different names "
+                f"({self.name!r} and {name!r})."
+            )
+
+    def _cache(self, instance: S) -> dict[str, object]:
+        if self.name is None:
+            raise TypeError(
+                "Cannot use cached_property instance without calling __set_name__ on it."
+            )
+        try:
+            return instance.__dict__
+        except (
+            AttributeError
+        ):  # not all objects have __dict__ (e.g. class defines slots)
+            msg = (
+                f"No '__dict__' attribute on {type(instance).__name__!r} "
+                f"instance to cache {self.name!r} property."
+            )
+            raise TypeError(msg) from None
+
+    def __get__(self, instance: S, owner: O | None = None) -> T:
+        cache = self._cache(instance)
+        assert self.name is not None
+        val = cache.get(self.name, _NOT_FOUND)
+        if val is _NOT_FOUND:
+            with self.lock:
+                # check if another thread filled cache while we awaited lock
+                val = cache.get(self.name, _NOT_FOUND)
+                if val is _NOT_FOUND:
+                    val = self.func(instance)
+                    try:
+                        cache[self.name] = val
+                    except TypeError:
+                        msg = (
+                            f"The '__dict__' attribute on {type(instance).__name__!r} instance "
+                            f"does not support item assignment for caching {self.name!r} property."
+                        )
+                        raise TypeError(msg) from None
+                    return val
+        return typing.cast(T, val)
+
+    def __delete__(self, instance: S) -> None:
+        cache = self._cache(instance)
+        assert self.name is not None
+        with self.lock:
+            val = cache.pop(self.name, _NOT_FOUND)
+            if val is _NOT_FOUND:
+                return
+            if self._deleter:
+                self._deleter(instance, typing.cast(T, val))
+
+    def deleter(self, deleter: Callable[[S, T], None]) -> cached_property:
+        self._deleter = deleter
+        prop = type(self)(self.func, deleter)
+        prop.name = self.name
+        prop.__doc__ = self.__doc__
+        prop.lock = self.lock
+        return prop
diff --git a/packages/antlion/error.py b/packages/antlion/error.py
new file mode 100644
index 0000000..e4f0a3c
--- /dev/null
+++ b/packages/antlion/error.py
@@ -0,0 +1,41 @@
+"""This class is where error information will be stored.
+"""
+
+from mobly import signals
+
+
+class ActsError(signals.TestError):
+    """Base Acts Error"""
+
+    def __init__(self, *args, **kwargs):
+        class_name = self.__class__.__name__
+        self.error_doc = self.__class__.__doc__
+        self.error_code = getattr(ActsErrorCode, class_name, ActsErrorCode.UNKNOWN)
+        extras = dict(**kwargs, error_doc=self.error_doc, error_code=self.error_code)
+        details = args[0] if len(args) > 0 else ""
+        super().__init__(details, extras)
+
+
+class ActsErrorCode:
+    # Framework Errors 0-999
+
+    UNKNOWN = 0
+
+    # This error code is used to implement unittests for this class.
+    ActsError = 100
+    AndroidDeviceError = 101
+
+    # Controllers Errors 1000-3999
+
+    Sl4aStartError = 1001
+    Sl4aApiError = 1002
+    Sl4aConnectionError = 1003
+    Sl4aProtocolError = 1004
+    Sl4aNotInstalledError = 1005
+    Sl4aRpcTimeoutError = 1006
+
+    # Util Errors 4000-9999
+
+    FastbootError = 9000
+    AdbError = 9001
+    AdbCommandError = 9002
diff --git a/src/antlion/event/__init__.py b/packages/antlion/event/__init__.py
similarity index 100%
rename from src/antlion/event/__init__.py
rename to packages/antlion/event/__init__.py
diff --git a/packages/antlion/event/decorators.py b/packages/antlion/event/decorators.py
new file mode 100644
index 0000000..42b6dca
--- /dev/null
+++ b/packages/antlion/event/decorators.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from antlion.event.subscription_handle import StaticSubscriptionHandle
+
+
+def subscribe_static(event_type, event_filter=None, order=0):
+    """A decorator that subscribes a static or module-level function.
+
+    This function must be registered manually.
+    """
+
+    class InnerSubscriptionHandle(StaticSubscriptionHandle):
+        def __init__(self, func):
+            super().__init__(event_type, func, event_filter=event_filter, order=order)
+
+    return InnerSubscriptionHandle
diff --git a/src/antlion/event/event.py b/packages/antlion/event/event.py
similarity index 100%
rename from src/antlion/event/event.py
rename to packages/antlion/event/event.py
diff --git a/packages/antlion/event/event_bus.py b/packages/antlion/event/event_bus.py
new file mode 100644
index 0000000..c9ec9f0
--- /dev/null
+++ b/packages/antlion/event/event_bus.py
@@ -0,0 +1,296 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import bisect
+import inspect
+import logging
+from threading import RLock
+
+from antlion.event.event_subscription import EventSubscription
+from antlion.event.subscription_handle import SubscriptionHandle
+
+
+class _EventBus(object):
+    """
+    Attributes:
+        _subscriptions: A dictionary of {EventType: list<EventSubscription>}.
+        _registration_id_map: A dictionary of
+                             {RegistrationID: EventSubscription}
+        _subscription_lock: The lock to prevent concurrent removal or addition
+                            to events.
+    """
+
+    def __init__(self):
+        self._subscriptions = {}
+        self._registration_id_map = {}
+        self._subscription_lock = RLock()
+
+    def register(self, event_type, func, filter_fn=None, order=0):
+        """Subscribes the given function to the event type given.
+
+        Args:
+            event_type: The type of the event to subscribe to.
+            func: The function to call when the event is posted.
+            filter_fn: An option function to be called before calling the
+                       subscribed func. If this function returns falsy, then the
+                       function will not be invoked.
+            order: The order the the subscription should run in. Lower values
+                   run first, with the default value set to 0. In the case of a
+                   tie between two subscriptions of the same event type, the
+                   subscriber added first executes first. In the case of a tie
+                   between two subscribers of a different type, the type of the
+                   subscription that is more specific goes first (i.e.
+                   BaseEventType will execute after ChildEventType if they share
+                   the same order).
+
+        Returns:
+            A registration ID.
+        """
+        subscription = EventSubscription(
+            event_type, func, event_filter=filter_fn, order=order
+        )
+        return self.register_subscription(subscription)
+
+    def register_subscriptions(self, subscriptions):
+        """Registers all subscriptions to the event bus.
+
+        Args:
+            subscriptions: an iterable that returns EventSubscriptions
+
+        Returns:
+            The list of registration IDs.
+        """
+        registration_ids = []
+        for subscription in subscriptions:
+            registration_ids.append(self.register_subscription(subscription))
+
+        return registration_ids
+
+    def register_subscription(self, subscription):
+        """Registers the given subscription to the event bus.
+
+        Args:
+            subscription: An EventSubscription object
+
+        Returns:
+            A registration ID.
+        """
+        with self._subscription_lock:
+            if subscription.event_type in self._subscriptions.keys():
+                subscription_list = self._subscriptions[subscription.event_type]
+                subscription_list.append(subscription)
+                subscription_list.sort(key=lambda x: x.order)
+            else:
+                subscription_list = list()
+                bisect.insort(subscription_list, subscription)
+                self._subscriptions[subscription.event_type] = subscription_list
+
+            registration_id = id(subscription)
+            self._registration_id_map[registration_id] = subscription
+
+        return registration_id
+
+    def post(self, event, ignore_errors=False):
+        """Posts an event to its subscribers.
+
+        Args:
+            event: The event object to send to the subscribers.
+            ignore_errors: Deliver to all subscribers, ignoring any errors.
+        """
+        listening_subscriptions = []
+        for current_type in inspect.getmro(type(event)):
+            if current_type not in self._subscriptions.keys():
+                continue
+            for subscription in self._subscriptions[current_type]:
+                listening_subscriptions.append(subscription)
+
+        # The subscriptions will be collected in sorted runs of sorted order.
+        # Running timsort here is the optimal way to sort this list.
+        listening_subscriptions.sort(key=lambda x: x.order)
+        for subscription in listening_subscriptions:
+            try:
+                subscription.deliver(event)
+            except Exception:
+                if ignore_errors:
+                    logging.exception(
+                        "An exception occurred while handling " "an event."
+                    )
+                    continue
+                raise
+
+    def unregister(self, registration_id):
+        """Unregisters an EventSubscription.
+
+        Args:
+            registration_id: the Subscription or registration_id to unsubscribe.
+        """
+        if type(registration_id) is SubscriptionHandle:
+            subscription = registration_id.subscription
+            registration_id = id(registration_id.subscription)
+        elif type(registration_id) is EventSubscription:
+            subscription = registration_id
+            registration_id = id(registration_id)
+        elif registration_id in self._registration_id_map.keys():
+            subscription = self._registration_id_map[registration_id]
+        elif type(registration_id) is not int:
+            raise ValueError(
+                'Subscription ID "%s" is not a valid ID. This value'
+                "must be an integer ID returned from subscribe()." % registration_id
+            )
+        else:
+            # The value is a "valid" id, but is not subscribed. It's possible
+            # another thread has unsubscribed this value.
+            logging.warning(
+                "Attempted to unsubscribe %s, but the matching "
+                "subscription cannot be found." % registration_id
+            )
+            return False
+
+        event_type = subscription.event_type
+        with self._subscription_lock:
+            self._registration_id_map.pop(registration_id, None)
+            if (
+                event_type in self._subscriptions
+                and subscription in self._subscriptions[event_type]
+            ):
+                self._subscriptions[event_type].remove(subscription)
+        return True
+
+    def unregister_all(self, from_list=None, from_event=None):
+        """Removes all event subscriptions.
+
+        Args:
+            from_list: Unregisters all events from a given list.
+            from_event: Unregisters all events of a given event type.
+        """
+        if from_list is None:
+            from_list = list(self._registration_id_map.values())
+
+        for subscription in from_list:
+            if from_event is None or subscription.event_type == from_event:
+                self.unregister(subscription)
+
+
+_event_bus = _EventBus()
+
+
+def register(event_type, func, filter_fn=None, order=0):
+    """Subscribes the given function to the event type given.
+
+    Args:
+        event_type: The type of the event to subscribe to.
+        func: The function to call when the event is posted.
+        filter_fn: An option function to be called before calling the subscribed
+                   func. If this function returns falsy, then the function will
+                   not be invoked.
+        order: The order the the subscription should run in. Lower values run
+               first, with the default value set to 0. In the case of a tie
+               between two subscriptions of the same event type, the
+               subscriber added first executes first. In the case of a tie
+               between two subscribers of a different type, the type of the
+               subscription that is more specific goes first (i.e. BaseEventType
+               will execute after ChildEventType if they share the same order).
+
+    Returns:
+        A registration ID.
+    """
+    return _event_bus.register(event_type, func, filter_fn=filter_fn, order=order)
+
+
+def register_subscriptions(subscriptions):
+    """Registers all subscriptions to the event bus.
+
+    Args:
+        subscriptions: an iterable that returns EventSubscriptions
+
+    Returns:
+        The list of registration IDs.
+    """
+    return _event_bus.register_subscriptions(subscriptions)
+
+
+def register_subscription(subscription):
+    """Registers the given subscription to the event bus.
+
+    Args:
+        subscription: An EventSubscription object
+
+    Returns:
+        A registration ID.
+    """
+    return _event_bus.register_subscription(subscription)
+
+
+def post(event, ignore_errors=False):
+    """Posts an event to its subscribers.
+
+    Args:
+        event: The event object to send to the subscribers.
+        ignore_errors: Deliver to all subscribers, ignoring any errors.
+    """
+    _event_bus.post(event, ignore_errors)
+
+
+def unregister(registration_id):
+    """Unregisters an EventSubscription.
+
+    Args:
+        registration_id: the Subscription or registration_id to unsubscribe.
+    """
+    # null check for the corner case where the _event_bus is destroyed before
+    # the subscribers unregister. In such case there is nothing else to
+    # be done.
+    if _event_bus is None:
+        return True
+    return _event_bus.unregister(registration_id)
+
+
+def unregister_all(from_list=None, from_event=None):
+    """Removes all event subscriptions.
+
+    Args:
+        from_list: Unregisters all events from a given list.
+        from_event: Unregisters all events of a given event type.
+    """
+    return _event_bus.unregister_all(from_list=from_list, from_event=from_event)
+
+
+class listen_for(object):
+    """A context-manager class (with statement) for listening to an event within
+    a given section of code.
+
+    Usage:
+
+    with listen_for(EventType, event_listener):
+        func_that_posts_event()  # Will call event_listener
+
+    func_that_posts_event()  # Will not call event_listener
+
+    """
+
+    def __init__(self, event_type, func, filter_fn=None, order=0):
+        self.event_type = event_type
+        self.func = func
+        self.filter_fn = filter_fn
+        self.order = order
+        self.registration_id = None
+
+    def __enter__(self):
+        self.registration_id = _event_bus.register(
+            self.event_type, self.func, filter_fn=self.filter_fn, order=self.order
+        )
+
+    def __exit__(self, *_):
+        _event_bus.unregister(self.registration_id)
diff --git a/packages/antlion/event/event_subscription.py b/packages/antlion/event/event_subscription.py
new file mode 100644
index 0000000..ee8720c
--- /dev/null
+++ b/packages/antlion/event/event_subscription.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class EventSubscription(object):
+    """A class that defines the way a function is subscribed to an event.
+
+    Attributes:
+        event_type: The type of the event.
+        _func: The subscribed function.
+        _event_filter: A lambda that returns True if an event should be passed
+                       to the subscribed function.
+        order: The order value in which this subscription should be called.
+    """
+
+    def __init__(self, event_type, func, event_filter=None, order=0):
+        self._event_type = event_type
+        self._func = func
+        self._event_filter = event_filter
+        self.order = order
+
+    @property
+    def event_type(self):
+        return self._event_type
+
+    def deliver(self, event):
+        """Delivers an event to the subscriber.
+
+        This function will not deliver the event if the event filter rejects the
+        event.
+
+        Args:
+            event: The event to send to the subscriber.
+        """
+        if self._event_filter and not self._event_filter(event):
+            return
+        self._func(event)
diff --git a/packages/antlion/event/subscription_handle.py b/packages/antlion/event/subscription_handle.py
new file mode 100644
index 0000000..3c6a0cc
--- /dev/null
+++ b/packages/antlion/event/subscription_handle.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from antlion.event.event_subscription import EventSubscription
+
+
+class SubscriptionHandle(object):
+    """The object created by a method decorated with an event decorator."""
+
+    def __init__(self, event_type, func, event_filter=None, order=0):
+        self._event_type = event_type
+        self._func = func
+        self._event_filter = event_filter
+        self._order = order
+        self._subscription = None
+        self._owner = None
+
+    @property
+    def subscription(self):
+        if self._subscription:
+            return self._subscription
+        self._subscription = EventSubscription(
+            self._event_type,
+            self._func,
+            event_filter=self._event_filter,
+            order=self._order,
+        )
+        return self._subscription
+
+    def __get__(self, instance, owner):
+        # If our owner has been initialized, or do not have an instance owner,
+        # return self.
+        if self._owner is not None or instance is None:
+            return self
+
+        # Otherwise, we create a new SubscriptionHandle that will only be used
+        # for the instance that owns this SubscriptionHandle.
+        ret = SubscriptionHandle(
+            self._event_type, self._func, self._event_filter, self._order
+        )
+        ret._owner = instance
+        ret._func = ret._wrap_call(ret._func)
+        for attr, value in owner.__dict__.items():
+            if value is self:
+                setattr(instance, attr, ret)
+                break
+        return ret
+
+    def _wrap_call(self, func):
+        def _wrapped_call(*args, **kwargs):
+            if self._owner is None:
+                return func(*args, **kwargs)
+            else:
+                return func(self._owner, *args, **kwargs)
+
+        return _wrapped_call
+
+    def __call__(self, *args, **kwargs):
+        return self._func(*args, **kwargs)
+
+
+class StaticSubscriptionHandle(SubscriptionHandle):
+    """A SubscriptionHandle for static methods."""
diff --git a/packages/antlion/keys.py b/packages/antlion/keys.py
new file mode 100644
index 0000000..b545d44
--- /dev/null
+++ b/packages/antlion/keys.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+
+"""This module has the global key values that are used across framework
+modules.
+"""
+
+
+class Config(enum.Enum):
+    """Enum values for test config related lookups."""
+
+    # Keys used to look up values from test config files.
+    # These keys define the wording of test configs and their internal
+    # references.
+    key_log_path = "logpath"
+    key_testbeds_under_test = "testbeds_under_test"
+    key_testbed = "testbed"
+    key_testbed_name = "name"
+    # configpath is the directory. key_config_full_path is the file path.
+    key_config_path = "configpath"
+    key_config_full_path = "config_full_path"
+    key_test_paths = "testpaths"
+    key_port = "Port"
+    key_address = "Address"
+    key_test_case_iterations = "test_case_iterations"
+    key_test_failure_tracebacks = "test_failure_tracebacks"
+    # Config names for controllers packaged in ACTS.
+    key_access_point = "AccessPoint"
+    key_android_device = "AndroidDevice"
+    key_attenuator = "Attenuator"
+    key_bluetooth_pts_device = "BluetoothPtsDevice"
+    key_fuchsia_device = "FuchsiaDevice"
+    key_iperf_client = "IPerfClient"
+    key_iperf_server = "IPerfServer"
+    key_openwrt_ap = "OpenWrtAP"
+    key_packet_capture = "PacketCapture"
+    key_packet_sender = "PacketSender"
+    key_pdu = "PduDevice"
+    key_sniffer = "Sniffer"
+    # Internal keys, used internally, not exposed to user's config files.
+    ikey_user_param = "user_params"
+    ikey_testbed_name = "testbed_name"
+    ikey_logger = "log"
+    ikey_logpath = "log_path"
+    ikey_summary_writer = "summary_writer"
+    # module name of controllers packaged in ACTS.
+    m_key_access_point = "access_point"
+    m_key_android_device = "android_device"
+    m_key_attenuator = "attenuator"
+    m_key_bluetooth_pts_device = "bluetooth_pts_device"
+    m_key_fuchsia_device = "fuchsia_device"
+    m_key_iperf_client = "iperf_client"
+    m_key_iperf_server = "iperf_server"
+    m_key_openwrt_ap = "openwrt_ap"
+    m_key_packet_capture = "packet_capture"
+    m_key_packet_sender = "packet_sender"
+    m_key_pdu = "pdu"
+    m_key_sniffer = "sniffer"
+
+    # A list of keys whose values in configs should not be passed to test
+    # classes without unpacking first.
+    reserved_keys = (key_testbed, key_log_path, key_test_paths)
+
+    # Controller names packaged with ACTS.
+    builtin_controller_names = [
+        key_access_point,
+        key_android_device,
+        key_attenuator,
+        key_bluetooth_pts_device,
+        key_fuchsia_device,
+        key_iperf_client,
+        key_iperf_server,
+        key_openwrt_ap,
+        key_packet_capture,
+        key_packet_sender,
+        key_pdu,
+        key_sniffer,
+    ]
+
+
+def get_name_by_value(value):
+    for name, member in Config.__members__.items():
+        if member.value == value:
+            return name
+    return None
+
+
+def get_module_name(name_in_config):
+    """Translates the name of a controller in config file to its module name."""
+    return value_to_value(name_in_config, "m_%s")
+
+
+def value_to_value(ref_value, pattern):
+    """Translates the value of a key to the value of its corresponding key. The
+    corresponding key is chosen based on the variable name pattern.
+    """
+    ref_key_name = get_name_by_value(ref_value)
+    if not ref_key_name:
+        return None
+    target_key_name = pattern % ref_key_name
+    try:
+        return getattr(Config, target_key_name).value
+    except AttributeError:
+        return None
diff --git a/src/antlion/libs/__init__.py b/packages/antlion/libs/__init__.py
similarity index 100%
rename from src/antlion/libs/__init__.py
rename to packages/antlion/libs/__init__.py
diff --git a/src/antlion/libs/logging/__init__.py b/packages/antlion/libs/logging/__init__.py
similarity index 100%
rename from src/antlion/libs/logging/__init__.py
rename to packages/antlion/libs/logging/__init__.py
diff --git a/packages/antlion/libs/logging/log_stream.py b/packages/antlion/libs/logging/log_stream.py
new file mode 100644
index 0000000..47c33d0
--- /dev/null
+++ b/packages/antlion/libs/logging/log_stream.py
@@ -0,0 +1,425 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import os
+import sys
+from logging import FileHandler, Handler, StreamHandler
+from logging.handlers import RotatingFileHandler
+
+from antlion import context
+from antlion.context import ContextLevel
+from antlion.event import event_bus
+from antlion.event.decorators import subscribe_static
+
+
+# yapf: disable
+class LogStyles:
+    NONE         = 0x00
+    LOG_DEBUG    = 0x01
+    LOG_INFO     = 0x02
+    LOG_WARNING  = 0x04
+    LOG_ERROR    = 0x08
+    LOG_CRITICAL = 0x10
+
+    DEFAULT_LEVELS = LOG_DEBUG + LOG_INFO + LOG_ERROR
+    ALL_LEVELS = LOG_DEBUG + LOG_INFO + LOG_WARNING + LOG_ERROR + LOG_CRITICAL
+
+    MONOLITH_LOG  = 0x0100
+    TESTCLASS_LOG = 0x0200
+    TESTCASE_LOG  = 0x0400
+    TO_STDOUT     = 0x0800
+    TO_ACTS_LOG   = 0x1000
+    ROTATE_LOGS   = 0x2000
+
+    ALL_FILE_LOGS = MONOLITH_LOG + TESTCLASS_LOG + TESTCASE_LOG
+
+    LEVEL_NAMES = {
+        LOG_DEBUG: 'debug',
+        LOG_INFO: 'info',
+        LOG_WARNING: 'warning',
+        LOG_ERROR: 'error',
+        LOG_CRITICAL: 'critical',
+    }
+
+    LOG_LEVELS = [
+        LOG_DEBUG,
+        LOG_INFO,
+        LOG_WARNING,
+        LOG_ERROR,
+        LOG_CRITICAL,
+    ]
+
+    LOG_LOCATIONS = [
+        TO_STDOUT,
+        TO_ACTS_LOG,
+        MONOLITH_LOG,
+        TESTCLASS_LOG,
+        TESTCASE_LOG
+    ]
+
+    LEVEL_TO_NO = {
+        LOG_DEBUG: logging.DEBUG,
+        LOG_INFO: logging.INFO,
+        LOG_WARNING: logging.WARNING,
+        LOG_ERROR: logging.ERROR,
+        LOG_CRITICAL: logging.CRITICAL,
+    }
+
+    LOCATION_TO_CONTEXT_LEVEL = {
+        MONOLITH_LOG: ContextLevel.ROOT,
+        TESTCLASS_LOG: ContextLevel.TESTCLASS,
+        TESTCASE_LOG: ContextLevel.TESTCASE
+    }
+# yapf: enable
+
+_log_streams = dict()
+_null_handler = logging.NullHandler()
+
+
+@subscribe_static(context.NewContextEvent)
+def _update_handlers(event):
+    for log_stream in _log_streams.values():
+        log_stream.update_handlers(event)
+
+
+event_bus.register_subscription(_update_handlers.subscription)
+
+
+def create_logger(
+    name,
+    log_name=None,
+    base_path="",
+    subcontext="",
+    log_styles=LogStyles.NONE,
+    stream_format=None,
+    file_format=None,
+):
+    """Creates a Python Logger object with the given attributes.
+
+    Creation through this method will automatically manage the logger in the
+    background for test-related events, such as TestCaseBegin and TestCaseEnd
+    Events.
+
+    Args:
+        name: The name of the LogStream. Used as the file name prefix.
+        log_name: The name of the underlying logger. Use LogStream name as
+            default.
+        base_path: The base path used by the logger.
+        subcontext: Location of logs relative to the test context path.
+        log_styles: An integer or array of integers that are the sum of
+            corresponding flag values in LogStyles. Examples include:
+
+            >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
+
+            >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG
+
+            >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG]
+            >>>  LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG]
+        stream_format: Format used for log output to stream
+        file_format: Format used for log output to files
+    """
+    if name in _log_streams:
+        _log_streams[name].cleanup()
+    log_stream = _LogStream(
+        name, log_name, base_path, subcontext, log_styles, stream_format, file_format
+    )
+    _set_logger(log_stream)
+    return log_stream.logger
+
+
+def _set_logger(log_stream):
+    _log_streams[log_stream.name] = log_stream
+    return log_stream
+
+
+class AlsoToLogHandler(Handler):
+    """Logs a message at a given level also to another logger.
+
+    Used for logging messages at a high enough level to the main log, or another
+    logger.
+    """
+
+    def __init__(self, to_logger=None, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self._log = logging.getLogger(to_logger)
+
+    def emit(self, record):
+        self._log.log(record.levelno, record.getMessage())
+
+
+class MovableFileHandler(FileHandler):
+    """FileHandler implementation that allows the output file to be changed
+    during operation.
+    """
+
+    def set_file(self, file_name):
+        """Set the target output file to file_name.
+
+        Args:
+            file_name: path to the new output file
+        """
+        self.baseFilename = os.path.abspath(file_name)
+        if self.stream is not None:
+            new_stream = self._open()
+            # An atomic operation redirects the output and closes the old file
+            os.dup2(new_stream.fileno(), self.stream.fileno())
+            self.stream = new_stream
+
+
+class MovableRotatingFileHandler(RotatingFileHandler):
+    """RotatingFileHandler implementation that allows the output file to be
+    changed during operation. Rotated files will automatically adopt the newest
+    output path.
+    """
+
+    set_file = MovableFileHandler.set_file
+
+
+class InvalidStyleSetError(Exception):
+    """Raised when the given LogStyles are an invalid set."""
+
+
+class _LogStream(object):
+    """A class that sets up a logging.Logger object.
+
+    The LogStream class creates a logging.Logger object. LogStream is also
+    responsible for managing the logger when events take place, such as
+    TestCaseEndedEvents and TestCaseBeginEvents.
+
+    Attributes:
+        name: The name of the LogStream.
+        logger: The logger created by this LogStream.
+        base_path: The base path used by the logger. Use logging.log_path
+            as default.
+        subcontext: Location of logs relative to the test context path.
+        stream_format: Format used for log output to stream
+        file_format: Format used for log output to files
+    """
+
+    def __init__(
+        self,
+        name,
+        log_name=None,
+        base_path="",
+        subcontext="",
+        log_styles=LogStyles.NONE,
+        stream_format=None,
+        file_format=None,
+    ):
+        """Creates a LogStream.
+
+        Args:
+            name: The name of the LogStream. Used as the file name prefix.
+            log_name: The name of the underlying logger. Use LogStream name
+                as default.
+            base_path: The base path used by the logger. Use logging.log_path
+                as default.
+            subcontext: Location of logs relative to the test context path.
+            log_styles: An integer or array of integers that are the sum of
+                corresponding flag values in LogStyles. Examples include:
+
+                >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
+
+                >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG
+
+                >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG]
+                >>>  LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG]
+            stream_format: Format used for log output to stream
+            file_format: Format used for log output to files
+        """
+        self.name = name
+        if log_name is not None:
+            self.logger = logging.getLogger(log_name)
+        else:
+            self.logger = logging.getLogger(name)
+        # Add a NullHandler to suppress unwanted console output
+        self.logger.addHandler(_null_handler)
+        self.logger.propagate = False
+        self.base_path = base_path or getattr(logging, "log_path", "/tmp/acts_logs")
+        self.subcontext = subcontext
+        context.TestContext.add_base_output_path(self.logger.name, self.base_path)
+        context.TestContext.add_subcontext(self.logger.name, self.subcontext)
+        self.stream_format = stream_format
+        self.file_format = file_format
+        self._testclass_handlers = []
+        self._testcase_handlers = []
+        if not isinstance(log_styles, list):
+            log_styles = [log_styles]
+        self.__validate_styles(log_styles)
+        for log_style in log_styles:
+            self.__handle_style(log_style)
+
+    @staticmethod
+    def __validate_styles(_log_styles_list):
+        """Determines if the given list of styles is valid.
+
+        Terminology:
+            Log-level: any of [DEBUG, INFO, WARNING, ERROR, CRITICAL].
+            Log Location: any of [MONOLITH_LOG, TESTCLASS_LOG,
+                                  TESTCASE_LOG, TO_STDOUT, TO_ACTS_LOG].
+
+        Styles are invalid when any of the below criteria are met:
+            A log-level is not set within an element of the list.
+            A log location is not set within an element of the list.
+            A log-level, log location pair appears twice within the list.
+            A log-level has both TESTCLASS and TESTCASE locations set
+                within the list.
+            ROTATE_LOGS is set without MONOLITH_LOG,
+                TESTCLASS_LOG, or TESTCASE_LOG.
+
+        Raises:
+            InvalidStyleSetError if the given style cannot be achieved.
+        """
+
+        def invalid_style_error(message):
+            raise InvalidStyleSetError(
+                "{LogStyle Set: %s} %s" % (_log_styles_list, message)
+            )
+
+        # Store the log locations that have already been set per level.
+        levels_dict = {}
+        for log_style in _log_styles_list:
+            for level in LogStyles.LOG_LEVELS:
+                if log_style & level:
+                    levels_dict[level] = levels_dict.get(level, LogStyles.NONE)
+                    # Check that a log-level, log location pair has not yet
+                    # been set.
+                    for log_location in LogStyles.LOG_LOCATIONS:
+                        if log_style & log_location:
+                            if log_location & levels_dict[level]:
+                                invalid_style_error(
+                                    "The log location %s for log level %s has "
+                                    "been set multiple times" % (log_location, level)
+                                )
+                            else:
+                                levels_dict[level] |= log_location
+                    # Check that for a given log-level, not more than one
+                    # of MONOLITH_LOG, TESTCLASS_LOG, TESTCASE_LOG is set.
+                    locations = levels_dict[level] & LogStyles.ALL_FILE_LOGS
+                    valid_locations = [
+                        LogStyles.TESTCASE_LOG,
+                        LogStyles.TESTCLASS_LOG,
+                        LogStyles.MONOLITH_LOG,
+                        LogStyles.NONE,
+                    ]
+                    if locations not in valid_locations:
+                        invalid_style_error(
+                            "More than one of MONOLITH_LOG, TESTCLASS_LOG, "
+                            "TESTCASE_LOG is set for log level %s." % level
+                        )
+            if log_style & LogStyles.ALL_LEVELS == 0:
+                invalid_style_error(f"LogStyle {log_style} needs to set a log level.")
+            if log_style & ~LogStyles.ALL_LEVELS == 0:
+                invalid_style_error(
+                    f"LogStyle {log_style} needs to set a log location."
+                )
+            if log_style & LogStyles.ROTATE_LOGS and not log_style & (
+                LogStyles.MONOLITH_LOG
+                | LogStyles.TESTCLASS_LOG
+                | LogStyles.TESTCASE_LOG
+            ):
+                invalid_style_error(
+                    "LogStyle %s has ROTATE_LOGS set, but does "
+                    "not specify a log type." % log_style
+                )
+
+    @staticmethod
+    def __create_rotating_file_handler(filename):
+        """Generates a callable to create an appropriate RotatingFileHandler."""
+        # Magic number explanation: 10485760 == 10MB
+        return MovableRotatingFileHandler(filename, maxBytes=10485760, backupCount=5)
+
+    @staticmethod
+    def __get_file_handler_creator(log_style):
+        """Gets the callable to create the correct FileLogHandler."""
+        create_file_handler = MovableFileHandler
+        if log_style & LogStyles.ROTATE_LOGS:
+            create_file_handler = _LogStream.__create_rotating_file_handler
+        return create_file_handler
+
+    @staticmethod
+    def __get_lowest_log_level(log_style):
+        """Returns the lowest log level's LogStyle for the given log_style."""
+        for log_level in LogStyles.LOG_LEVELS:
+            if log_level & log_style:
+                return log_level
+        return LogStyles.NONE
+
+    def __get_current_output_dir(self, depth=ContextLevel.TESTCASE):
+        """Gets the current output directory from the context system. Make the
+        directory if it doesn't exist.
+
+        Args:
+            depth: The desired level of the output directory. For example,
+                the TESTCLASS level would yield the directory associated with
+                the current test class context, even if the test is currently
+                within a test case.
+        """
+        curr_context = context.get_current_context(depth)
+        return curr_context.get_full_output_path(self.logger.name)
+
+    def __create_handler(self, creator, level, location):
+        """Creates the FileHandler.
+
+        Args:
+            creator: The callable that creates the FileHandler
+            level: The logging level (INFO, DEBUG, etc.) for this handler.
+            location: The log location (MONOLITH, TESTCLASS, TESTCASE) for this
+                handler.
+
+        Returns: A FileHandler
+        """
+        directory = self.__get_current_output_dir(
+            LogStyles.LOCATION_TO_CONTEXT_LEVEL[location]
+        )
+        base_name = f"{self.name}_{LogStyles.LEVEL_NAMES[level]}.txt"
+        handler = creator(os.path.join(directory, base_name))
+        handler.setLevel(LogStyles.LEVEL_TO_NO[level])
+        if self.file_format:
+            handler.setFormatter(self.file_format)
+        return handler
+
+    def __handle_style(self, log_style):
+        """Creates the handlers described in the given log_style."""
+        handler_creator = self.__get_file_handler_creator(log_style)
+
+        # Handle streaming logs to STDOUT or the ACTS Logger
+        if log_style & (LogStyles.TO_ACTS_LOG | LogStyles.TO_STDOUT):
+            lowest_log_level = self.__get_lowest_log_level(log_style)
+
+            if log_style & LogStyles.TO_ACTS_LOG:
+                handler = AlsoToLogHandler()
+            else:  # LogStyles.TO_STDOUT:
+                handler = StreamHandler(sys.stdout)
+                if self.stream_format:
+                    handler.setFormatter(self.stream_format)
+
+            handler.setLevel(LogStyles.LEVEL_TO_NO[lowest_log_level])
+            self.logger.addHandler(handler)
+
+        # Handle streaming logs to log-level files
+        for log_level in LogStyles.LOG_LEVELS:
+            log_location = log_style & LogStyles.ALL_FILE_LOGS
+            if not (log_style & log_level and log_location):
+                continue
+
+            handler = self.__create_handler(handler_creator, log_level, log_location)
+            self.logger.addHandler(handler)
+
+            if log_style & LogStyles.TESTCLASS_LOG:
+                self._testclass_handlers.append(handler)
+            if log_style & LogStyles.TESTCASE_LOG:
+                self._testcase_handlers.append(handler)
diff --git a/src/antlion/libs/ota/__init__.py b/packages/antlion/libs/ota/__init__.py
similarity index 100%
rename from src/antlion/libs/ota/__init__.py
rename to packages/antlion/libs/ota/__init__.py
diff --git a/src/antlion/libs/ota/ota_runners/__init__.py b/packages/antlion/libs/ota/ota_runners/__init__.py
similarity index 100%
rename from src/antlion/libs/ota/ota_runners/__init__.py
rename to packages/antlion/libs/ota/ota_runners/__init__.py
diff --git a/packages/antlion/libs/ota/ota_runners/ota_runner.py b/packages/antlion/libs/ota/ota_runners/ota_runner.py
new file mode 100644
index 0000000..848290a
--- /dev/null
+++ b/packages/antlion/libs/ota/ota_runners/ota_runner.py
@@ -0,0 +1,225 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+from zipfile import ZipFile
+
+"""The setup time in seconds."""
+SL4A_SERVICE_SETUP_TIME = 5
+"""The path to the metadata found within the OTA package."""
+OTA_PACKAGE_METADATA_PATH = "META-INF/com/android/metadata"
+
+
+class OtaError(Exception):
+    """Raised when an error in the OTA Update process occurs."""
+
+
+class InvalidOtaUpdateError(OtaError):
+    """Raised when the update from one version to another is not valid."""
+
+
+class OtaRunner(object):
+    """The base class for all OTA Update Runners."""
+
+    def __init__(self, ota_tool, android_device):
+        self.ota_tool = ota_tool
+        self.android_device = android_device
+        self.serial = self.android_device.serial
+
+    def _update(self):
+        post_build_id = self.get_post_build_id()
+        log = self.android_device.log
+        old_info = self.android_device.adb.getprop("ro.build.fingerprint")
+        log.info("Starting Update. Beginning build info: %s", old_info)
+        log.info("Stopping services.")
+        self.android_device.stop_services()
+        log.info("Beginning tool.")
+        self.ota_tool.update(self)
+        log.info("Tool finished. Waiting for boot completion.")
+        self.android_device.wait_for_boot_completion()
+        new_info = self.android_device.adb.getprop("ro.build.fingerprint")
+        if not old_info or old_info == new_info:
+            raise OtaError(
+                "The device was not updated to a new build. "
+                "Previous build: %s. Current build: %s. "
+                "Expected build: %s" % (old_info, new_info, post_build_id)
+            )
+        log.info("Boot completed. Rooting adb.")
+        self.android_device.root_adb()
+        log.info("Root complete.")
+        if self.android_device.skip_sl4a:
+            self.android_device.log.info("Skipping SL4A install.")
+        else:
+            for _ in range(3):
+                self.android_device.log.info(
+                    'Re-installing SL4A from "%s".', self.get_sl4a_apk()
+                )
+                self.android_device.adb.install(
+                    f"-r -g {self.get_sl4a_apk()}", ignore_status=True
+                )
+                time.sleep(SL4A_SERVICE_SETUP_TIME)
+                if self.android_device.is_sl4a_installed():
+                    break
+        log.info("Starting services.")
+        self.android_device.start_services()
+        self.android_device.update_sdk_api_level()
+        log.info("Services started. Running ota tool cleanup.")
+        self.ota_tool.cleanup(self)
+        log.info("Cleanup complete.")
+
+    def get_ota_package_metadata(self, requested_field):
+        """Returns a variable found within the OTA package's metadata.
+
+        Args:
+            requested_field: the name of the metadata field
+
+        Will return None if the variable cannot be found.
+        """
+        ota_zip = ZipFile(self.get_ota_package(), "r")
+        if OTA_PACKAGE_METADATA_PATH in ota_zip.namelist():
+            with ota_zip.open(OTA_PACKAGE_METADATA_PATH) as metadata:
+                timestamp_line = requested_field.encode("utf-8")
+                timestamp_offset = len(timestamp_line) + 1
+
+                for line in metadata.readlines():
+                    if line.startswith(timestamp_line):
+                        return line[timestamp_offset:].decode("utf-8").strip()
+        return None
+
+    def validate_update(self):
+        """Raises an error if updating to the next build is not valid.
+
+        Raises:
+            InvalidOtaUpdateError if the ota version is not valid, or cannot be
+                validated.
+        """
+        # The timestamp the current device build was created at.
+        cur_img_timestamp = self.android_device.adb.getprop("ro.build.date.utc")
+        ota_img_timestamp = self.get_ota_package_metadata("post-timestamp")
+
+        if ota_img_timestamp is None:
+            raise InvalidOtaUpdateError(
+                "Unable to find the timestamp " "for the OTA build."
+            )
+
+        try:
+            if int(ota_img_timestamp) <= int(cur_img_timestamp):
+                cur_fingerprint = self.android_device.adb.getprop(
+                    "ro.bootimage.build.fingerprint"
+                )
+                ota_fingerprint = self.get_post_build_id()
+                raise InvalidOtaUpdateError(
+                    "The OTA image comes from an earlier build than the "
+                    "source build. Current build: Time: %s -- %s, "
+                    "OTA build: Time: %s -- %s"
+                    % (
+                        cur_img_timestamp,
+                        cur_fingerprint,
+                        ota_img_timestamp,
+                        ota_fingerprint,
+                    )
+                )
+        except ValueError:
+            raise InvalidOtaUpdateError(
+                "Unable to parse timestamps. Current timestamp: %s, OTA "
+                "timestamp: %s" % (ota_img_timestamp, cur_img_timestamp)
+            )
+
+    def get_post_build_id(self):
+        """Returns the post-build ID found within the OTA package metadata.
+
+        Raises:
+            InvalidOtaUpdateError if the post-build ID cannot be found.
+        """
+        return self.get_ota_package_metadata("post-build")
+
+    def can_update(self):
+        """Whether or not an update package is available for the device."""
+        return NotImplementedError()
+
+    def get_ota_package(self):
+        raise NotImplementedError()
+
+    def get_sl4a_apk(self):
+        raise NotImplementedError()
+
+
+class SingleUseOtaRunner(OtaRunner):
+    """A single use OtaRunner.
+
+    SingleUseOtaRunners can only be ran once. If a user attempts to run it more
+    than once, an error will be thrown. Users can avoid the error by checking
+    can_update() before calling update().
+    """
+
+    def __init__(self, ota_tool, android_device, ota_package, sl4a_apk):
+        super(SingleUseOtaRunner, self).__init__(ota_tool, android_device)
+        self._ota_package = ota_package
+        self._sl4a_apk = sl4a_apk
+        self._called = False
+
+    def can_update(self):
+        return not self._called
+
+    def update(self):
+        """Starts the update process."""
+        if not self.can_update():
+            raise OtaError(
+                "A SingleUseOtaTool instance cannot update a device " "multiple times."
+            )
+        self._called = True
+        self._update()
+
+    def get_ota_package(self):
+        return self._ota_package
+
+    def get_sl4a_apk(self):
+        return self._sl4a_apk
+
+
+class MultiUseOtaRunner(OtaRunner):
+    """A multiple use OtaRunner.
+
+    MultiUseOtaRunner can only be ran for as many times as there have been
+    packages provided to them. If a user attempts to run it more than the number
+    of provided packages, an error will be thrown. Users can avoid the error by
+    checking can_update() before calling update().
+    """
+
+    def __init__(self, ota_tool, android_device, ota_packages, sl4a_apks):
+        super(MultiUseOtaRunner, self).__init__(ota_tool, android_device)
+        self._ota_packages = ota_packages
+        self._sl4a_apks = sl4a_apks
+        self.current_update_number = 0
+
+    def can_update(self):
+        return not self.current_update_number == len(self._ota_packages)
+
+    def update(self):
+        """Starts the update process."""
+        if not self.can_update():
+            raise OtaError(
+                "This MultiUseOtaRunner has already updated all "
+                "given packages onto the phone."
+            )
+        self._update()
+        self.current_update_number += 1
+
+    def get_ota_package(self):
+        return self._ota_packages[self.current_update_number]
+
+    def get_sl4a_apk(self):
+        return self._sl4a_apks[self.current_update_number]
diff --git a/packages/antlion/libs/ota/ota_runners/ota_runner_factory.py b/packages/antlion/libs/ota/ota_runners/ota_runner_factory.py
new file mode 100644
index 0000000..f5b09f4
--- /dev/null
+++ b/packages/antlion/libs/ota/ota_runners/ota_runner_factory.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from antlion.libs.ota.ota_runners import ota_runner
+from antlion.libs.ota.ota_tools import adb_sideload_ota_tool, ota_tool_factory
+
+_bound_devices = {}
+
+DEFAULT_OTA_TOOL = adb_sideload_ota_tool.AdbSideloadOtaTool.__name__
+DEFAULT_OTA_COMMAND = "adb"
+
+
+class OtaConfigError(Exception):
+    """Raised when there is a problem in test configuration file."""
+
+
+def create_from_configs(config, android_device):
+    """Creates a new OtaTool for the given AndroidDevice.
+
+    After an OtaTool is assigned to a device, another OtaTool cannot be created
+    for that device. This will prevent OTA Update tests that accidentally flash
+    the same build onto a device more than once.
+
+    Args:
+        config: the ACTS config user_params.
+        android_device: The device to run the OTA Update on.
+
+    Returns:
+        An OtaRunner responsible for updating the given device.
+    """
+    # Default to adb sideload
+    try:
+        ota_tool_class_name = get_ota_value_from_config(
+            config, "ota_tool", android_device
+        )
+    except OtaConfigError:
+        ota_tool_class_name = DEFAULT_OTA_TOOL
+
+    if ota_tool_class_name not in config:
+        if ota_tool_class_name is not DEFAULT_OTA_TOOL:
+            raise OtaConfigError(
+                "If the ota_tool is overloaded, the path to the tool must be "
+                'added to the ACTS config file under {"OtaToolName": '
+                '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.'
+                % ota_tool_class_name
+            )
+        else:
+            command = DEFAULT_OTA_COMMAND
+    else:
+        command = config[ota_tool_class_name]
+        if type(command) is list:
+            # If file came as a list in the config.
+            if len(command) == 1:
+                command = command[0]
+            else:
+                raise OtaConfigError(
+                    'Config value for "%s" must be either a string or a list '
+                    "of exactly one element" % ota_tool_class_name
+                )
+
+    ota_package = get_ota_value_from_config(config, "ota_package", android_device)
+    ota_sl4a = get_ota_value_from_config(config, "ota_sl4a", android_device)
+    if type(ota_sl4a) != type(ota_package):
+        raise OtaConfigError(
+            "The ota_package and ota_sl4a must either both be strings, or "
+            'both be lists. Device with serial "%s" has mismatched types.'
+            % android_device.serial
+        )
+    return create(ota_package, ota_sl4a, android_device, ota_tool_class_name, command)
+
+
+def create(
+    ota_package,
+    ota_sl4a,
+    android_device,
+    ota_tool_class_name=DEFAULT_OTA_TOOL,
+    command=DEFAULT_OTA_COMMAND,
+    use_cached_runners=True,
+):
+    """
+    Args:
+        ota_package: A string or list of strings corresponding to the
+            update.zip package location(s) for running an OTA update.
+        ota_sl4a: A string or list of strings corresponding to the
+            sl4a.apk package location(s) for running an OTA update.
+        ota_tool_class_name: The class name for the desired ota_tool
+        command: The command line tool name for the updater
+        android_device: The AndroidDevice to run the OTA Update on.
+        use_cached_runners: Whether or not to use runners cached by previous
+            create calls.
+
+    Returns:
+        An OtaRunner with the given properties from the arguments.
+    """
+    ota_tool = ota_tool_factory.create(ota_tool_class_name, command)
+    return create_from_package(
+        ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners
+    )
+
+
+def create_from_package(
+    ota_package, ota_sl4a, android_device, ota_tool, use_cached_runners=True
+):
+    """
+    Args:
+        ota_package: A string or list of strings corresponding to the
+            update.zip package location(s) for running an OTA update.
+        ota_sl4a: A string or list of strings corresponding to the
+            sl4a.apk package location(s) for running an OTA update.
+        ota_tool: The OtaTool to be paired with the returned OtaRunner
+        android_device: The AndroidDevice to run the OTA Update on.
+        use_cached_runners: Whether or not to use runners cached by previous
+            create calls.
+
+    Returns:
+        An OtaRunner with the given properties from the arguments.
+    """
+    if android_device in _bound_devices and use_cached_runners:
+        logging.warning(
+            "Android device %s has already been assigned an "
+            "OtaRunner. Returning previously created runner."
+        )
+        return _bound_devices[android_device]
+
+    if type(ota_package) != type(ota_sl4a):
+        raise TypeError(
+            "The ota_package and ota_sl4a must either both be strings, or "
+            'both be lists. Device with serial "%s" has requested mismatched '
+            "types." % android_device.serial
+        )
+
+    if type(ota_package) is str:
+        runner = ota_runner.SingleUseOtaRunner(
+            ota_tool, android_device, ota_package, ota_sl4a
+        )
+    elif type(ota_package) is list:
+        runner = ota_runner.MultiUseOtaRunner(
+            ota_tool, android_device, ota_package, ota_sl4a
+        )
+    else:
+        raise TypeError(
+            'The "ota_package" value in the acts config must be '
+            "either a list or a string."
+        )
+
+    _bound_devices[android_device] = runner
+    return runner
+
+
+def get_ota_value_from_config(config, key, android_device):
+    """Returns a key for the given AndroidDevice.
+
+    Args:
+        config: The ACTS config
+        key: The base key desired (ota_tool, ota_sl4a, or ota_package)
+        android_device: An AndroidDevice
+
+    Returns: The value at the specified key.
+    Throws: ActsConfigError if the value cannot be determined from the config.
+    """
+    suffix = ""
+    if "ota_map" in config:
+        if android_device.serial in config["ota_map"]:
+            suffix = f"_{config['ota_map'][android_device.serial]}"
+
+    ota_package_key = f"{key}{suffix}"
+    if ota_package_key not in config:
+        if suffix != "":
+            raise OtaConfigError(
+                "Asked for an OTA Update without specifying a required value. "
+                '"ota_map" has entry {"%s": "%s"}, but there is no '
+                'corresponding entry {"%s":"/path/to/file"} found within the '
+                "ACTS config." % (android_device.serial, suffix[1:], ota_package_key)
+            )
+        else:
+            raise OtaConfigError(
+                "Asked for an OTA Update without specifying a required value. "
+                '"ota_map" does not exist or have a key for serial "%s", and '
+                'the default value entry "%s" cannot be found within the ACTS '
+                "config." % (android_device.serial, ota_package_key)
+            )
+
+    return config[ota_package_key]
diff --git a/src/antlion/libs/ota/ota_tools/__init__.py b/packages/antlion/libs/ota/ota_tools/__init__.py
similarity index 100%
rename from src/antlion/libs/ota/ota_tools/__init__.py
rename to packages/antlion/libs/ota/ota_tools/__init__.py
diff --git a/packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py b/packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
new file mode 100644
index 0000000..ad9e883
--- /dev/null
+++ b/packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from antlion.libs.ota.ota_tools.ota_tool import OtaTool
+
+# OTA Packages can be upwards of 1 GB. This may take some time to transfer over
+# USB 2.0.
+PUSH_TIMEOUT = 10 * 60
+
+
+class AdbSideloadOtaTool(OtaTool):
+    """Updates an AndroidDevice using adb sideload."""
+
+    def __init__(self, ignored_command):
+        # "command" is ignored. The ACTS adb version is used to prevent
+        # differing adb versions from constantly killing adbd.
+        super(AdbSideloadOtaTool, self).__init__(ignored_command)
+
+    def update(self, ota_runner):
+        logging.info("Rooting adb")
+        ota_runner.android_device.root_adb()
+        logging.info("Rebooting to sideload")
+        ota_runner.android_device.adb.reboot("sideload")
+        ota_runner.android_device.adb.wait_for_sideload()
+        logging.info("Sideloading ota package")
+        package_path = ota_runner.get_ota_package()
+        logging.info(f'Running adb sideload with package "{package_path}"')
+        ota_runner.android_device.adb.sideload(package_path, timeout=PUSH_TIMEOUT)
+        logging.info("Sideload complete. Waiting for device to come back up.")
+        ota_runner.android_device.adb.wait_for_recovery()
+        ota_runner.android_device.reboot(stop_at_lock_screen=True)
+        logging.info("Device is up. Update complete.")
diff --git a/src/antlion/libs/ota/ota_tools/ota_tool.py b/packages/antlion/libs/ota/ota_tools/ota_tool.py
similarity index 100%
rename from src/antlion/libs/ota/ota_tools/ota_tool.py
rename to packages/antlion/libs/ota/ota_tools/ota_tool.py
diff --git a/packages/antlion/libs/ota/ota_tools/ota_tool_factory.py b/packages/antlion/libs/ota/ota_tools/ota_tool_factory.py
new file mode 100644
index 0000000..0eff707
--- /dev/null
+++ b/packages/antlion/libs/ota/ota_tools/ota_tool_factory.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion.libs.ota.ota_tools.adb_sideload_ota_tool import AdbSideloadOtaTool
+from antlion.libs.ota.ota_tools.update_device_ota_tool import UpdateDeviceOtaTool
+
+_CONSTRUCTORS = {
+    AdbSideloadOtaTool.__name__: lambda command: AdbSideloadOtaTool(command),
+    UpdateDeviceOtaTool.__name__: lambda command: UpdateDeviceOtaTool(command),
+}
+_constructed_tools = {}
+
+
+def create(ota_tool_class, command):
+    """Returns an OtaTool with the given class name.
+
+    If the tool has already been created, the existing instance will be
+    returned.
+
+    Args:
+        ota_tool_class: the class/type of the tool you wish to use.
+        command: the command line tool being used.
+
+    Returns:
+        An OtaTool.
+    """
+    if ota_tool_class in _constructed_tools:
+        return _constructed_tools[ota_tool_class]
+
+    if ota_tool_class not in _CONSTRUCTORS:
+        raise KeyError(
+            "Given Ota Tool class name does not match a known "
+            'name. Found "%s". Expected any of %s. If this tool '
+            "does exist, add it to the _CONSTRUCTORS dict in this "
+            "module." % (ota_tool_class, _CONSTRUCTORS.keys())
+        )
+
+    new_update_tool = _CONSTRUCTORS[ota_tool_class](command)
+    _constructed_tools[ota_tool_class] = new_update_tool
+
+    return new_update_tool
diff --git a/packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py b/packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py
new file mode 100644
index 0000000..5e4f709
--- /dev/null
+++ b/packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import shutil
+import tempfile
+
+from antlion import utils
+from antlion.libs.ota.ota_tools import ota_tool
+from antlion.libs.proc import job
+
+# OTA Packages can be upwards of 1 GB. This may take some time to transfer over
+# USB 2.0. A/B devices must also complete the update in the background.
+UPDATE_TIMEOUT = 60 * 60
+UPDATE_LOCATION = "/data/ota_package/update.zip"
+
+
+class UpdateDeviceOtaTool(ota_tool.OtaTool):
+    """Runs an OTA Update with system/update_engine/scripts/update_device.py."""
+
+    def __init__(self, command):
+        super(UpdateDeviceOtaTool, self).__init__(command)
+
+        self.unzip_path = tempfile.mkdtemp()
+        utils.unzip_maintain_permissions(self.command, self.unzip_path)
+
+        self.command = os.path.join(self.unzip_path, "update_device.py")
+
+    def update(self, ota_runner):
+        logging.info("Forcing adb to be in root mode.")
+        ota_runner.android_device.root_adb()
+        update_command = "python3 %s -s %s %s" % (
+            self.command,
+            ota_runner.serial,
+            ota_runner.get_ota_package(),
+        )
+        logging.info(f"Running {update_command}")
+        result = job.run(update_command, timeout_sec=UPDATE_TIMEOUT)
+        logging.info(f"Output: {result.stdout}")
+
+        logging.info("Rebooting device for update to go live.")
+        ota_runner.android_device.reboot(stop_at_lock_screen=True)
+        logging.info("Reboot sent.")
+
+    def __del__(self):
+        """Delete the unzipped update_device folder before ACTS exits."""
+        shutil.rmtree(self.unzip_path)
diff --git a/packages/antlion/libs/ota/ota_updater.py b/packages/antlion/libs/ota/ota_updater.py
new file mode 100644
index 0000000..6db9649
--- /dev/null
+++ b/packages/antlion/libs/ota/ota_updater.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from antlion import utils
+from antlion.libs.ota.ota_runners import ota_runner_factory
+
+"""Maps AndroidDevices to OtaRunners."""
+ota_runners = {}
+
+
+def initialize(user_params, android_devices):
+    """Initialize OtaRunners for each device.
+
+    Args:
+        user_params: The user_params from the ACTS config.
+        android_devices: The android_devices in the test.
+    """
+    for ad in android_devices:
+        ota_runners[ad] = ota_runner_factory.create_from_configs(user_params, ad)
+
+
+def _check_initialization(android_device):
+    """Check if a given device was initialized."""
+    if android_device not in ota_runners:
+        raise KeyError(
+            'Android Device with serial "%s" has not been '
+            "initialized for OTA Updates. Did you forget to call"
+            "ota_updater.initialize()?" % android_device.serial
+        )
+
+
+def update(android_device, ignore_update_errors=False):
+    """Update a given AndroidDevice.
+
+    Args:
+        android_device: The device to update
+        ignore_update_errors: Whether or not to ignore update errors such as
+           no more updates available for a given device. Default is false.
+    Throws:
+        OtaError if ignore_update_errors is false and the OtaRunner has run out
+        of packages to update the phone with.
+    """
+    _check_initialization(android_device)
+    ota_runners[android_device].validate_update()
+    try:
+        ota_runners[android_device].update()
+    except Exception as e:
+        if ignore_update_errors:
+            return
+        android_device.log.error(e)
+        android_device.take_bug_report("ota_update", utils.get_current_epoch_time())
+        raise e
+
+
+def can_update(android_device):
+    """Whether or not a device can be updated."""
+    _check_initialization(android_device)
+    return ota_runners[android_device].can_update()
diff --git a/src/antlion/libs/proc/__init__.py b/packages/antlion/libs/proc/__init__.py
similarity index 100%
rename from src/antlion/libs/proc/__init__.py
rename to packages/antlion/libs/proc/__init__.py
diff --git a/packages/antlion/libs/proc/job.py b/packages/antlion/libs/proc/job.py
new file mode 100644
index 0000000..4d9eab6
--- /dev/null
+++ b/packages/antlion/libs/proc/job.py
@@ -0,0 +1,213 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import subprocess
+import time
+
+from antlion.runner import CompletedProcess
+
+
+class Error(Exception):
+    """Indicates that a command failed, is fatal to the test unless caught."""
+
+    def __init__(self, result):
+        super(Error, self).__init__(result)
+        self.result: Result = result
+
+
+class TimeoutError(Error):
+    """Thrown when a BackgroundJob times out on wait."""
+
+
+class Result(CompletedProcess):
+    """Command execution result.
+
+    Contains information on subprocess execution after it has exited.
+
+    Attributes:
+        command: An array containing the command and all arguments that
+                 was executed.
+        exit_status: Integer exit code of the process.
+        stdout_raw: The raw bytes output from standard out.
+        stderr_raw: The raw bytes output from standard error
+        duration: How long the process ran for.
+        did_timeout: True if the program timed out and was killed.
+    """
+
+    def __init__(
+        self,
+        command: str | list[str],
+        stdout: bytes,
+        stderr: bytes,
+        exit_status: int,
+        duration: float = 0,
+        did_timeout: bool = False,
+        encoding: str = "utf-8",
+    ) -> None:
+        """
+        Args:
+            command: The command that was run. This will be a list containing
+                     the executed command and all args.
+            stdout: The raw bytes that standard output gave.
+            stderr: The raw bytes that standard error gave.
+            exit_status: The exit status of the command.
+            duration: How long the command ran.
+            did_timeout: True if the command timed out.
+            encoding: The encoding standard that the program uses.
+        """
+        self.command = command
+        self.exit_status = exit_status
+        self._raw_stdout = stdout
+        self._raw_stderr = stderr
+        self._stdout_str: str | None = None
+        self._stderr_str: str | None = None
+        self._encoding = encoding
+        self.duration = duration
+        self.did_timeout = did_timeout
+
+    @property
+    def stdout(self) -> str:
+        """String representation of standard output."""
+        if not self._stdout_str:
+            self._stdout_str = self._raw_stdout.decode(
+                encoding=self._encoding, errors="replace"
+            )
+            self._stdout_str = self._stdout_str.strip()
+        return self._stdout_str
+
+    @property
+    def stderr(self) -> str:
+        """String representation of standard error."""
+        if not self._stderr_str:
+            self._stderr_str = self._raw_stderr.decode(
+                encoding=self._encoding, errors="replace"
+            )
+            self._stderr_str = self._stderr_str.strip()
+        return self._stderr_str
+
+    @property
+    def returncode(self) -> int:
+        return self.exit_status
+
+    def __repr__(self) -> str:
+        if self.did_timeout:
+            prefix = f"Command timed out"
+        else:
+            prefix = f"Command exited with {self.exit_status}"
+
+        return (
+            f'{prefix} after {self.duration}s: {" ".join(self.command)}\n'
+            f"stdout: {self._raw_stdout}\n"
+            f"stderr: {self._raw_stderr}"
+        )
+
+
+def run(
+    command: str | list[str],
+    timeout_sec: float | None = 60,
+    ignore_status: bool = False,
+    env: dict[str, str] | None = None,
+    io_encoding: str = "utf-8",
+) -> Result:
+    """Execute a command in a subprocess and return its output.
+
+    Commands can be either shell commands (given as strings) or the
+    path and arguments to an executable (given as a list).  This function
+    will block until the subprocess finishes or times out.
+
+    Args:
+        command: The command to execute.
+        timeout_sec: number seconds to wait for command to finish.
+        ignore_status: True to ignore the exit code of the remote
+                       subprocess.  Note that if you do ignore status codes,
+                       you should handle non-zero exit codes explicitly.
+        env: environment variables to setup on the remote host.
+        io_encoding: unicode encoding of command output.
+
+    Returns:
+        Result of the ssh command.
+
+    Raises:
+        job.TimeoutError: When the remote command took to long to execute.
+        Error: When the command had an error executing and ignore_status==False.
+    """
+    start_time = time.time()
+    proc = subprocess.Popen(
+        command,
+        env=env,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        shell=not isinstance(command, list),
+    )
+    # Wait on the process terminating
+    timed_out = False
+    out = bytes()
+    err = bytes()
+    try:
+        (out, err) = proc.communicate(timeout=timeout_sec)
+    except subprocess.TimeoutExpired:
+        timed_out = True
+        proc.kill()
+        proc.wait()
+
+    result = Result(
+        command=command,
+        stdout=out,
+        stderr=err,
+        exit_status=proc.returncode,
+        duration=time.time() - start_time,
+        encoding=io_encoding,
+        did_timeout=timed_out,
+    )
+    logging.debug(result)
+
+    if timed_out:
+        raise TimeoutError(result)
+
+    if not ignore_status and proc.returncode != 0:
+        raise Error(result)
+
+    return result
+
+
+def run_async(command, env=None):
+    """Execute a command in a subproccess asynchronously.
+
+    It is the callers responsibility to kill/wait on the resulting
+    subprocess.Popen object.
+
+    Commands can be either shell commands (given as strings) or the
+    path and arguments to an executable (given as a list).  This function
+    will not block.
+
+    Args:
+        command: The command to execute. Can be either a string or a list.
+        env: dict enviroment variables to setup on the remote host.
+
+    Returns:
+        A subprocess.Popen object representing the created subprocess.
+
+    """
+    proc = subprocess.Popen(
+        command,
+        env=env,
+        preexec_fn=os.setpgrp,
+        shell=not isinstance(command, list),
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+    )
+    logging.debug("command %s started with pid %s", command, proc.pid)
+    return proc
diff --git a/packages/antlion/libs/proc/process.py b/packages/antlion/libs/proc/process.py
new file mode 100644
index 0000000..40ff342
--- /dev/null
+++ b/packages/antlion/libs/proc/process.py
@@ -0,0 +1,277 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import shlex
+import signal
+import subprocess
+import sys
+import time
+from threading import Thread
+
+_on_windows = sys.platform == "win32"
+
+
+class ProcessError(Exception):
+    """Raised when invalid operations are run on a Process."""
+
+
+class Process(object):
+    """A Process object used to run various commands.
+
+    Attributes:
+        _command: The initial command to run.
+        _subprocess_kwargs: The kwargs to send to Popen for more control over
+                            execution.
+        _process: The subprocess.Popen object currently executing a process.
+        _listening_thread: The thread that is listening for the process to stop.
+        _redirection_thread: The thread that is redirecting process output.
+        _on_output_callback: The callback to call when output is received.
+        _on_terminate_callback: The callback to call when the process terminates
+                                without stop() being called first.
+        _started: Whether or not start() was called.
+        _stopped: Whether or not stop() was called.
+    """
+
+    def __init__(self, command, **kwargs):
+        """Creates a Process object.
+
+        Note that this constructor does not begin the process. To start the
+        process, use Process.start().
+        """
+        # Split command string into list if shell=True is not specified
+        self._use_shell = kwargs.get("shell", False)
+        if not self._use_shell and isinstance(command, str):
+            command = shlex.split(command)
+        self._command = command
+        self._subprocess_kwargs = kwargs
+        if _on_windows:
+            self._subprocess_kwargs["creationflags"] = (
+                subprocess.CREATE_NEW_PROCESS_GROUP
+            )
+        else:
+            self._subprocess_kwargs["start_new_session"] = True
+        self._process = None
+
+        self._listening_thread = None
+        self._redirection_thread = None
+        self._on_output_callback = lambda *args, **kw: None
+        self._binary_output = False
+        self._on_terminate_callback = lambda *args, **kw: ""
+
+        self._started = False
+        self._stopped = False
+
+    def set_on_output_callback(self, on_output_callback, binary=False):
+        """Sets the on_output_callback function.
+
+        Args:
+            on_output_callback: The function to be called when output is sent to
+                the output. The output callback has the following signature:
+
+                >>> def on_output_callback(output_line):
+                >>>     return None
+
+            binary: If True, read the process output as raw binary.
+        Returns:
+            self
+        """
+        self._on_output_callback = on_output_callback
+        self._binary_output = binary
+        return self
+
+    def set_on_terminate_callback(self, on_terminate_callback):
+        """Sets the on_self_terminate callback function.
+
+        Args:
+            on_terminate_callback: The function to be called when the process
+                has terminated on its own. The callback has the following
+                signature:
+
+                >>> def on_self_terminate_callback(popen_process):
+                >>>     return 'command to run' or None
+
+                If a string is returned, the string returned will be the command
+                line used to run the command again. If None is returned, the
+                process will end without restarting.
+
+        Returns:
+            self
+        """
+        self._on_terminate_callback = on_terminate_callback
+        return self
+
+    def start(self):
+        """Starts the process's execution."""
+        if self._started:
+            raise ProcessError("Process has already started.")
+        self._started = True
+        self._process = None
+
+        self._listening_thread = Thread(target=self._exec_loop)
+        self._listening_thread.start()
+
+        time_up_at = time.time() + 1
+
+        while self._process is None:
+            if time.time() > time_up_at:
+                raise OSError("Unable to open process!")
+
+        self._stopped = False
+
+    @staticmethod
+    def _get_timeout_left(timeout, start_time):
+        return max(0.1, timeout - (time.time() - start_time))
+
+    def is_running(self):
+        """Checks that the underlying Popen process is still running
+
+        Returns:
+            True if the process is running.
+        """
+        return self._process is not None and self._process.poll() is None
+
+    def _join_threads(self):
+        """Waits for the threads associated with the process to terminate."""
+        if self._listening_thread is not None:
+            self._listening_thread.join()
+            self._listening_thread = None
+
+        if self._redirection_thread is not None:
+            self._redirection_thread.join()
+            self._redirection_thread = None
+
+    def _kill_process(self):
+        """Kills the underlying process/process group. Implementation is
+        platform-dependent."""
+        if _on_windows:
+            subprocess.check_call(f"taskkill /F /T /PID {self._process.pid}")
+        else:
+            self.signal(signal.SIGKILL)
+
+    def wait(self, kill_timeout=60.0):
+        """Waits for the process to finish execution.
+
+        If the process has reached the kill_timeout, the process will be killed
+        instead.
+
+        Note: the on_self_terminate callback will NOT be called when calling
+        this function.
+
+        Args:
+            kill_timeout: The amount of time to wait until killing the process.
+        """
+        if self._stopped:
+            raise ProcessError("Process is already being stopped.")
+        self._stopped = True
+
+        try:
+            self._process.wait(kill_timeout)
+        except subprocess.TimeoutExpired:
+            self._kill_process()
+        finally:
+            self._join_threads()
+            self._started = False
+
+    def signal(self, sig):
+        """Sends a signal to the process.
+
+        Args:
+            sig: The signal to be sent.
+        """
+        if _on_windows:
+            raise ProcessError("Unable to call Process.signal on windows.")
+
+        pgid = os.getpgid(self._process.pid)
+        os.killpg(pgid, sig)
+
+    def stop(self):
+        """Stops the process.
+
+        This command is effectively equivalent to kill, but gives time to clean
+        up any related work on the process, such as output redirection.
+
+        Note: the on_self_terminate callback will NOT be called when calling
+        this function.
+        """
+        self.wait(0)
+
+    def _redirect_output(self):
+        """Redirects the output from the command into the on_output_callback."""
+        if self._binary_output:
+            while True:
+                data = self._process.stdout.read(1024)
+
+                if not data:
+                    return
+                else:
+                    self._on_output_callback(data)
+        else:
+            while True:
+                line = self._process.stdout.readline().decode("utf-8", errors="replace")
+
+                if not line:
+                    return
+                else:
+                    # Output the line without trailing \n and whitespace.
+                    self._on_output_callback(line.rstrip())
+
+    @staticmethod
+    def __start_process(command, **kwargs):
+        """A convenient wrapper function for starting the process."""
+        acts_logger = logging.getLogger()
+        acts_logger.debug('Starting command "%s" with kwargs %s', command, kwargs)
+        return subprocess.Popen(command, **kwargs)
+
+    def _exec_loop(self):
+        """Executes Popen in a loop.
+
+        When Popen terminates without stop() being called,
+        self._on_terminate_callback() will be called. The returned value from
+        _on_terminate_callback will then be used to determine if the loop should
+        continue and start up the process again. See set_on_terminate_callback()
+        for more information.
+        """
+        command = self._command
+        while True:
+            self._process = self.__start_process(
+                command,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.STDOUT,
+                bufsize=1,
+                **self._subprocess_kwargs,
+            )
+            self._redirection_thread = Thread(target=self._redirect_output)
+            self._redirection_thread.start()
+            self._process.wait()
+
+            if self._stopped:
+                logging.debug("The process for command %s was stopped.", command)
+                break
+            else:
+                logging.debug("The process for command %s terminated.", command)
+                # Wait for all output to be processed before sending
+                # _on_terminate_callback()
+                self._redirection_thread.join()
+                logging.debug("Beginning on_terminate_callback for %s.", command)
+                retry_value = self._on_terminate_callback(self._process)
+                if retry_value:
+                    if not self._use_shell and isinstance(retry_value, str):
+                        retry_value = shlex.split(retry_value)
+                    command = retry_value
+                else:
+                    break
diff --git a/packages/antlion/logger.py b/packages/antlion/logger.py
new file mode 100755
index 0000000..24ec118
--- /dev/null
+++ b/packages/antlion/logger.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import logging
+import re
+from types import TracebackType
+
+log_line_timestamp_len = 23
+logline_timestamp_re = re.compile("\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d\d\d")
+
+
+def _parse_logline_timestamp(t):
+    """Parses a logline timestamp into a tuple.
+
+    Args:
+        t: Timestamp in logline format.
+
+    Returns:
+        An iterable of date and time elements in the order of month, day, hour,
+        minute, second, microsecond.
+    """
+    date, time = t.split(" ")
+    year, month, day = date.split("-")
+    h, m, s = time.split(":")
+    s, ms = s.split(".")
+    return year, month, day, h, m, s, ms
+
+
+def is_valid_logline_timestamp(timestamp):
+    if len(timestamp) == log_line_timestamp_len:
+        if logline_timestamp_re.match(timestamp):
+            return True
+    return False
+
+
+def logline_timestamp_comparator(t1, t2):
+    """Comparator for timestamps in logline format.
+
+    Args:
+        t1: Timestamp in logline format.
+        t2: Timestamp in logline format.
+
+    Returns:
+        -1 if t1 < t2; 1 if t1 > t2; 0 if t1 == t2.
+    """
+    dt1 = _parse_logline_timestamp(t1)
+    dt2 = _parse_logline_timestamp(t2)
+    for u1, u2 in zip(dt1, dt2):
+        if u1 < u2:
+            return -1
+        elif u1 > u2:
+            return 1
+    return 0
+
+
+def _get_timestamp(time_format, delta=None):
+    t = datetime.datetime.now()
+    if delta:
+        t = t + datetime.timedelta(seconds=delta)
+    return t.strftime(time_format)[:-3]
+
+
+def epoch_to_log_line_timestamp(epoch_time):
+    """Converts an epoch timestamp in ms to log line timestamp format, which
+    is readable for humans.
+
+    Args:
+        epoch_time: integer, an epoch timestamp in ms.
+
+    Returns:
+        A string that is the corresponding timestamp in log line timestamp
+        format.
+    """
+    s, ms = divmod(epoch_time, 1000)
+    d = datetime.datetime.fromtimestamp(s)
+    return d.strftime("%Y-%m-%d %H:%M:%S.") + str(ms)
+
+
+def get_log_line_timestamp(delta=None):
+    """Returns a timestamp in the format used by log lines.
+
+    Default is current time. If a delta is set, the return value will be
+    the current time offset by delta seconds.
+
+    Args:
+        delta: Number of seconds to offset from current time; can be negative.
+
+    Returns:
+        A timestamp in log line format with an offset.
+    """
+    return _get_timestamp("%Y-%m-%d %H:%M:%S.%f", delta)
+
+
+def get_log_file_timestamp(delta=None):
+    """Returns a timestamp in the format used for log file names.
+
+    Default is current time. If a delta is set, the return value will be
+    the current time offset by delta seconds.
+
+    Args:
+        delta: Number of seconds to offset from current time; can be negative.
+
+    Returns:
+        A timestamp in log file name format with an offset.
+    """
+    return _get_timestamp("%Y-%m-%d_%H-%M-%S-%f", delta)
+
+
+def normalize_log_line_timestamp(log_line_timestamp):
+    """Replace special characters in log line timestamp with normal characters.
+
+    Args:
+        log_line_timestamp: A string in the log line timestamp format. Obtained
+            with get_log_line_timestamp.
+
+    Returns:
+        A string representing the same time as input timestamp, but without
+        special characters.
+    """
+    norm_tp = log_line_timestamp.replace(" ", "_")
+    norm_tp = norm_tp.replace(":", "-")
+    return norm_tp
+
+
+class LogLevel:
+    """Sets the logging level threshold for logger within this context.
+
+    Logging messages which are equal or less severe than level will be ignored.
+    See https://docs.python.org/3/library/logging.html#levels for a list of
+    levels.
+    """
+
+    def __init__(
+        self, logger: logging.Logger | logging.LoggerAdapter, level: int
+    ) -> None:
+        self._logger = logger
+        if isinstance(logger, logging.Logger):
+            self._old_level = logger.level
+        else:
+            self._old_level = logger.logger.level
+        self._new_level = level
+
+    def __enter__(self) -> logging.Logger | logging.LoggerAdapter:
+        self._logger.setLevel(self._new_level)
+        return self._logger
+
+    def __exit__(
+        self,
+        _exit_type: type[BaseException] | None,
+        _exit_value: BaseException | None,
+        _exit_traceback: TracebackType | None,
+    ) -> None:
+        self._logger.setLevel(self._old_level)
diff --git a/packages/antlion/net.py b/packages/antlion/net.py
new file mode 100644
index 0000000..e4a1851
--- /dev/null
+++ b/packages/antlion/net.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import errno
+import socket
+import time
+
+
+def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None:
+    """Wait for the host to start accepting connections on the port.
+
+    Some services take some time to start. Call this after launching the service
+    to avoid race conditions.
+
+    Args:
+        host: IP of the running service.
+        port: Port of the running service.
+        timeout_sec: Seconds to wait until raising TimeoutError
+
+    Raises:
+        TimeoutError: when timeout_sec has expired without a successful
+            connection to the service
+    """
+    last_error: OSError | None = None
+    timeout = time.perf_counter() + timeout_sec
+
+    while True:
+        try:
+            time_left = max(timeout - time.perf_counter(), 0)
+            with socket.create_connection((host, port), timeout=time_left):
+                return
+        except ConnectionRefusedError as e:
+            # Occurs when the host is online but not ready to accept connections
+            # yet; wait to see if the host becomes ready.
+            last_error = e
+        except TimeoutError as e:
+            last_error = e
+        except OSError as e:
+            if e.errno == errno.EHOSTUNREACH:
+                # No route to host. Occurs when the interface to the host is
+                # torn down; wait to see if the interface comes back.
+                last_error = e
+            else:
+                # Unexpected error
+                raise e
+
+        if time.perf_counter() >= timeout:
+            raise TimeoutError(
+                f"Waited over {timeout_sec}s for the service to start "
+                f"accepting connections at {host}:{port}"
+            ) from last_error
diff --git a/src/antlion/__init__.py b/packages/antlion/py.typed
similarity index 100%
copy from src/antlion/__init__.py
copy to packages/antlion/py.typed
diff --git a/packages/antlion/runner.py b/packages/antlion/runner.py
new file mode 100644
index 0000000..7634d1d
--- /dev/null
+++ b/packages/antlion/runner.py
@@ -0,0 +1,76 @@
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+from typing import Protocol
+
+
+class Runner(Protocol):
+    """A command runner."""
+
+    log: logging.LoggerAdapter
+
+    def run(
+        self,
+        command: str | list[str],
+        timeout_sec: int | None = None,
+        ignore_status: bool = False,
+    ) -> CompletedProcess:
+        """Run command with arguments.
+
+        Args:
+            command: Command to execute
+            timeout: Seconds to wait for command to finish
+            ignore_status: Ignore the exit code of command. Non-zero exit codes
+                need to be handled manually.
+
+        Returns:
+            Result of the completed command.
+
+        Raises:
+            subprocess.CalledProcessError: when the process exits with a non-zero status
+            subprocess.TimeoutExpired: when the timeout expires while waiting
+                for a child process
+        """
+        ...
+
+    def run_async(self, command: str) -> CompletedProcess:
+        """Run command asynchronously.
+
+        Args:
+            command: Command to execute
+
+        Returns:
+            Results of the dispatched command.
+        """
+        ...
+
+
+class CompletedProcess(Protocol):
+    @property
+    def returncode(self) -> int:
+        """Exit status."""
+        ...
+
+    @property
+    def stdout(self) -> str:
+        """Output stream."""
+        ...
+
+    @property
+    def stderr(self) -> str:
+        """Error output stream."""
+        ...
diff --git a/src/antlion/test_utils/OWNERS b/packages/antlion/test_utils/OWNERS
similarity index 100%
rename from src/antlion/test_utils/OWNERS
rename to packages/antlion/test_utils/OWNERS
diff --git a/src/antlion/test_utils/__init__.py b/packages/antlion/test_utils/__init__.py
similarity index 100%
rename from src/antlion/test_utils/__init__.py
rename to packages/antlion/test_utils/__init__.py
diff --git a/src/antlion/test_utils/abstract_devices/__init__.py b/packages/antlion/test_utils/abstract_devices/__init__.py
similarity index 100%
rename from src/antlion/test_utils/abstract_devices/__init__.py
rename to packages/antlion/test_utils/abstract_devices/__init__.py
diff --git a/packages/antlion/test_utils/abstract_devices/wlan_device.py b/packages/antlion/test_utils/abstract_devices/wlan_device.py
new file mode 100644
index 0000000..e1c959f
--- /dev/null
+++ b/packages/antlion/test_utils/abstract_devices/wlan_device.py
@@ -0,0 +1,510 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import enum
+from typing import Protocol, runtime_checkable
+
+from honeydew.typing.wlan import (
+    ClientStatusConnected,
+    ClientStatusConnecting,
+    ClientStatusIdle,
+    ConnectionState,
+    RequestStatus,
+)
+from mobly.records import TestResultRecord
+
+from antlion.controllers import iperf_client
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.ap_lib.hostapd_security import SecurityMode
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyControllerError,
+)
+from antlion.controllers.iperf_client import IPerfClientBase
+from antlion.controllers.pdu import PduDevice
+from antlion.test_utils.wifi import wifi_test_utils as awutils
+from antlion.utils import PingResult, adb_shell_ping
+
+FUCHSIA_VALID_SECURITY_TYPES = {"none", "wep", "wpa", "wpa2", "wpa3"}
+DEFAULT_ASSOCIATE_TIMEOUT_SEC = 30
+
+
+@runtime_checkable
+class SupportsWLAN(Protocol):
+    """A generic WLAN device."""
+
+    @property
+    def identifier(self) -> str:
+        """Unique identifier for this device."""
+        ...
+
+    def take_bug_report(self, record: TestResultRecord) -> None:
+        """Take a bug report on the device and stores it on the host.
+
+        Will store the bug report in the output directory for the currently running
+        test, as specified by `record`.
+
+        Args:
+            record: Information about the current running test.
+        """
+        ...
+
+    def associate(
+        self,
+        target_ssid: str,
+        target_pwd: str | None = None,
+        key_mgmt: str | None = None,
+        check_connectivity: bool = True,
+        hidden: bool = False,
+        target_security: SecurityMode = SecurityMode.OPEN,
+    ) -> bool:
+        """Associate to a target network.
+
+        Args:
+            target_ssid: SSID to associate to.
+            target_pwd: Password for the SSID, if necessary.
+            key_mgmt: The hostapd wpa_key_mgmt, if specified.
+            check_connectivity: Whether to check for internet connectivity.
+            hidden: Whether the network is hidden.
+            target_security: Target security for network, used to
+                save the network in policy connects (see wlan_policy_lib)
+        Returns:
+            True if successfully connected to WLAN, False if not.
+        """
+        ...
+
+    def disconnect(self) -> None:
+        """Disconnect from all WLAN networks."""
+        ...
+
+    def get_default_wlan_test_interface(self) -> str:
+        """Name of default WLAN interface to use for testing."""
+        ...
+
+    def is_connected(self, ssid: str | None = None) -> bool:
+        """Determines if wlan_device is connected to wlan network.
+
+        Args:
+            ssid: If specified, check if device is connected to a specific network.
+
+        Returns:
+            True if connected to requested network; or if ssid not specified, True if
+            connected to any network; otherwise, False.
+        """
+        ...
+
+    def create_iperf_client(self, test_interface: str | None = None) -> IPerfClientBase:
+        """Create an iPerf3 client on this device.
+
+        Args:
+            test_interface: Name of test interface. Defaults to first found wlan client
+                interface.
+
+        Returns:
+            IPerfClient object
+        """
+        ...
+
+    def get_wlan_interface_id_list(self) -> list[int]:
+        """List available WLAN interfaces.
+
+        Returns:
+            A list of wlan interface IDs.
+        """
+        ...
+
+    def destroy_wlan_interface(self, iface_id: int) -> None:
+        """Destroy the specified WLAN interface.
+
+        Args:
+            iface_id: ID of the interface to destroy.
+        """
+        ...
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+        additional_ping_params: str | None = None,
+    ) -> PingResult:
+        """Pings from a device to an IP address or hostname
+
+        Args:
+            dest_ip: IP or hostname to ping
+            count: How many icmp packets to send
+            interval: Milliseconds to wait between pings
+            timeout: Milliseconds to wait before having the icmp packet timeout
+            size: Size of the icmp packet in bytes
+            additional_ping_params: Command option flags to append to the command string
+
+        Returns:
+            A dictionary for the results of the ping. The dictionary contains
+            the following items:
+                status: Whether the ping was successful.
+                rtt_min: The minimum round trip time of the ping.
+                rtt_max: The minimum round trip time of the ping.
+                rtt_avg: The avg round trip time of the ping.
+                stdout: The standard out of the ping command.
+                stderr: The standard error of the ping command.
+        """
+        ...
+
+    def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
+        """Reboot a device abruptly without notification.
+
+        Args:
+            pdus: All testbed PDUs
+        """
+        ...
+
+    def feature_is_present(self, feature: str) -> bool:
+        """Check if a WLAN feature is present.
+
+        Args:
+            feature: WLAN feature to query
+
+        Returns:
+            True if `feature` is present; otherwise, False.
+        """
+        ...
+
+    def wifi_toggle_state(self, state: bool | None) -> None:
+        """Toggle the state of Wi-Fi.
+
+        Args:
+            state: Wi-Fi state to set to. If None, opposite of the current state.
+        """
+        ...
+
+    def reset_wifi(self) -> None:
+        """Clears all saved Wi-Fi networks on a device.
+
+        This will turn Wi-Fi on.
+        """
+        ...
+
+    def turn_location_off_and_scan_toggle_off(self) -> None:
+        """Turn off Wi-Fi location scans."""
+        ...
+
+
+class AndroidWlanDevice(SupportsWLAN):
+    """Android device that supports WLAN."""
+
+    def __init__(self, android_device: AndroidDevice) -> None:
+        self.device = android_device
+
+    @property
+    def identifier(self) -> str:
+        return self.device.serial
+
+    def wifi_toggle_state(self, state: bool | None) -> None:
+        awutils.wifi_toggle_state(self.device, state)
+
+    def reset_wifi(self) -> None:
+        awutils.reset_wifi(self.device)
+
+    def take_bug_report(self, record: TestResultRecord) -> None:
+        self.device.take_bug_report(record.test_name, record.begin_time)
+
+    def turn_location_off_and_scan_toggle_off(self) -> None:
+        awutils.turn_location_off_and_scan_toggle_off(self.device)
+
+    def associate(
+        self,
+        target_ssid: str,
+        target_pwd: str | None = None,
+        key_mgmt: str | None = None,
+        check_connectivity: bool = True,
+        hidden: bool = False,
+        target_security: SecurityMode = SecurityMode.OPEN,
+    ) -> bool:
+        network = {"SSID": target_ssid, "hiddenSSID": hidden}
+        if target_pwd:
+            network["password"] = target_pwd
+        if key_mgmt:
+            network["security"] = key_mgmt
+        try:
+            awutils.connect_to_wifi_network(
+                self.device,
+                network,
+                check_connectivity=check_connectivity,
+                hidden=hidden,
+            )
+            return True
+        except Exception as e:
+            self.device.log.info(f"Failed to associated ({e})")
+            return False
+
+    def disconnect(self) -> None:
+        awutils.turn_location_off_and_scan_toggle_off(self.device)
+
+    def get_wlan_interface_id_list(self) -> list[int]:
+        raise NotImplementedError("get_wlan_interface_id_list is not implemented")
+
+    def get_default_wlan_test_interface(self) -> str:
+        return "wlan0"
+
+    def destroy_wlan_interface(self, iface_id: int) -> None:
+        raise NotImplementedError("destroy_wlan_interface is not implemented")
+
+    def is_connected(self, ssid: str | None = None) -> bool:
+        wifi_info = self.device.droid.wifiGetConnectionInfo()
+        if ssid:
+            return "BSSID" in wifi_info and wifi_info["SSID"] == ssid
+        return "BSSID" in wifi_info
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+        additional_ping_params: str | None = None,
+    ) -> PingResult:
+        success = adb_shell_ping(self.device, dest_ip, count=count, timeout=timeout)
+        return PingResult(
+            exit_status=0 if success else 1,
+            # TODO: Implement the rest if needed for any tests
+            stdout="",
+            stderr="",
+            transmitted=None,
+            received=None,
+            time_ms=None,
+            rtt_min_ms=None,
+            rtt_avg_ms=None,
+            rtt_max_ms=None,
+            rtt_mdev_ms=None,
+        )
+
+    def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
+        raise NotImplementedError("hard_power_cycle is not implemented")
+
+    def create_iperf_client(self, test_interface: str | None = None) -> IPerfClientBase:
+        if not test_interface:
+            test_interface = self.get_default_wlan_test_interface()
+
+        return iperf_client.IPerfClientOverAdb(
+            android_device=self.device, test_interface=test_interface
+        )
+
+    def feature_is_present(self, feature: str) -> bool:
+        raise NotImplementedError("feature_is_present is not implemented")
+
+
+class AssociationMode(enum.Enum):
+    """Defines which FIDLs to use for WLAN association and disconnect."""
+
+    DRIVER = 1
+    """Call WLAN core FIDLs to provide all association and disconnect."""
+    POLICY = 2
+    """Call WLAN policy FIDLs to provide all association and disconnect."""
+
+
+class FuchsiaWlanDevice(SupportsWLAN):
+    """Fuchsia device that supports WLAN."""
+
+    def __init__(self, fuchsia_device: FuchsiaDevice, mode: AssociationMode):
+        self.device = fuchsia_device
+        self.device.configure_wlan()
+        self.association_mode = mode
+
+    @property
+    def identifier(self) -> str:
+        return self.device.ip
+
+    def wifi_toggle_state(self, state: bool | None) -> None:
+        pass
+
+    def reset_wifi(self) -> None:
+        pass
+
+    def take_bug_report(self, _: TestResultRecord) -> None:
+        self.device.take_bug_report()
+
+    def turn_location_off_and_scan_toggle_off(self) -> None:
+        pass
+
+    def associate(
+        self,
+        target_ssid: str,
+        target_pwd: str | None = None,
+        key_mgmt: str | None = None,
+        check_connectivity: bool = True,
+        hidden: bool = False,
+        target_security: SecurityMode = SecurityMode.OPEN,
+        timeout_sec: int = DEFAULT_ASSOCIATE_TIMEOUT_SEC,
+    ) -> bool:
+        match self.association_mode:
+            case AssociationMode.DRIVER:
+                ssid_bss_desc_map = self.device.sl4f.wlan_lib.scan_for_bss_info()
+
+                bss_descs_for_ssid = ssid_bss_desc_map.get(target_ssid, None)
+                if not bss_descs_for_ssid or len(bss_descs_for_ssid) < 1:
+                    self.device.log.error(
+                        "Scan failed to find a BSS description for target_ssid "
+                        f"{target_ssid}"
+                    )
+                    return False
+
+                return self.device.sl4f.wlan_lib.connect(
+                    target_ssid, target_pwd, bss_descs_for_ssid[0]
+                )
+            case AssociationMode.POLICY:
+                try:
+                    self.device.sl4f.wlan_policy_lib.save_network(
+                        target_ssid,
+                        target_security.fuchsia_security_type(),
+                        target_pwd=target_pwd,
+                    )
+                    status = self.device.sl4f.wlan_policy_lib.connect(
+                        target_ssid,
+                        target_security.fuchsia_security_type(),
+                    )
+                    if status is RequestStatus.ACKNOWLEDGED:
+                        self.device.wlan_policy_controller.wait_for_network_state(
+                            target_ssid,
+                            ConnectionState.CONNECTED,
+                            timeout_sec=timeout_sec,
+                        )
+                    else:
+                        self.device.log.debug(
+                            f"Received request status: {status} while trying to "
+                            f"connect to ssid: {target_ssid}."
+                        )
+                        return False
+
+                    return True
+                except WlanPolicyControllerError as e:
+                    self.device.log.error(
+                        f"Failed to save and connect to {target_ssid} with "
+                        f"error: {e}"
+                    )
+                    return False
+
+    def disconnect(self) -> None:
+        """Function to disconnect from a Fuchsia WLAN device.
+        Asserts if disconnect was not successful.
+        """
+        match self.association_mode:
+            case AssociationMode.DRIVER:
+                self.device.sl4f.wlan_lib.disconnect()
+            case AssociationMode.POLICY:
+                self.device.sl4f.wlan_policy_lib.remove_all_networks()
+                self.device.wlan_policy_controller.wait_for_no_connections()
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+        additional_ping_params: str | None = None,
+    ) -> PingResult:
+        return self.device.ping(
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
+
+    def get_wlan_interface_id_list(self) -> list[int]:
+        return self.device.sl4f.wlan_lib.get_iface_id_list()
+
+    def get_default_wlan_test_interface(self) -> str:
+        if self.device.wlan_client_test_interface_name is None:
+            raise TypeError("Expected wlan_client_test_interface_name to be str")
+        return self.device.wlan_client_test_interface_name
+
+    def destroy_wlan_interface(self, iface_id: int) -> None:
+        self.device.sl4f.wlan_lib.destroy_iface(iface_id)
+
+    def is_connected(self, ssid: str | None = None) -> bool:
+        result = self.device.sl4f.wlan_lib.status()
+        match result:
+            case ClientStatusIdle():
+                self.device.log.info("Client status idle")
+                return False
+            case ClientStatusConnecting():
+                ssid_bytes = bytearray(result.ssid).decode(
+                    encoding="utf-8", errors="replace"
+                )
+                self.device.log.info(f"Client status connecting to ssid: {ssid_bytes}")
+                return False
+            case ClientStatusConnected():
+                ssid_bytes = bytearray(result.ssid).decode(
+                    encoding="utf-8", errors="replace"
+                )
+                self.device.log.info(f"Client connected to ssid: {ssid_bytes}")
+                if ssid is None:
+                    return True
+                return ssid == ssid_bytes
+            case _:
+                raise ValueError(
+                    "Status did not return a valid status response: " f"{result}"
+                )
+
+    def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
+        self.device.reboot(reboot_type="hard", testbed_pdus=pdus)
+
+    def create_iperf_client(self, test_interface: str | None = None) -> IPerfClientBase:
+        if not test_interface:
+            test_interface = self.get_default_wlan_test_interface()
+
+        # A package server is necessary to acquire the iperf3 client for
+        # some builds.
+        self.device.start_package_server()
+
+        return iperf_client.IPerfClientOverSsh(
+            ssh_provider=self.device.ssh,
+            test_interface=test_interface,
+        )
+
+    def feature_is_present(self, feature: str) -> bool:
+        return feature in self.device.wlan_features
+
+
+def create_wlan_device(
+    hardware_device: FuchsiaDevice | AndroidDevice,
+    associate_mode: AssociationMode,
+) -> SupportsWLAN:
+    """Creates a generic WLAN device based on type of device that is sent to
+    the functions.
+
+    Args:
+        hardware_device: A WLAN hardware device that is supported by ACTS.
+    """
+    device: SupportsWLAN
+    if isinstance(hardware_device, FuchsiaDevice):
+        device = FuchsiaWlanDevice(hardware_device, associate_mode)
+    elif isinstance(hardware_device, AndroidDevice):
+        device = AndroidWlanDevice(hardware_device)
+    else:
+        raise ValueError(
+            f"Unable to create WLAN device for type {type(hardware_device)}"
+        )
+
+    assert isinstance(device, SupportsWLAN)
+    return device
diff --git a/packages/antlion/test_utils/abstract_devices/wmm_transceiver.py b/packages/antlion/test_utils/abstract_devices/wmm_transceiver.py
new file mode 100644
index 0000000..8609807
--- /dev/null
+++ b/packages/antlion/test_utils/abstract_devices/wmm_transceiver.py
@@ -0,0 +1,682 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import multiprocessing
+import time
+from datetime import datetime
+from multiprocessing.managers import DictProxy
+from typing import Any, Mapping
+from uuid import UUID, uuid4
+
+from mobly import logger, signals
+
+from antlion import utils
+from antlion.controllers import iperf_client, iperf_server
+from antlion.controllers.access_point import AccessPoint
+from antlion.test_utils.abstract_devices.wlan_device import SupportsWLAN
+from antlion.validation import MapValidator
+
+AC_VO = "AC_VO"
+AC_VI = "AC_VI"
+AC_BE = "AC_BE"
+AC_BK = "AC_BK"
+
+# TODO(fxb/61421): Add tests to check all DSCP classes are mapped to the correct
+# AC (there are many that aren't included here). Requires implementation of
+# sniffer.
+DEFAULT_AC_TO_TOS_TAG_MAP = {AC_VO: "0xC0", AC_VI: "0x80", AC_BE: "0x0", AC_BK: "0x20"}
+UDP = "udp"
+TCP = "tcp"
+DEFAULT_IPERF_PORT = 5201
+DEFAULT_STREAM_TIME = 10
+DEFAULT_IP_ADDR_TIMEOUT = 15
+PROCESS_JOIN_TIMEOUT = 60
+AVAILABLE = True
+UNAVAILABLE = False
+
+
+class WmmTransceiverError(signals.ControllerError):
+    pass
+
+
+def create(
+    config: Mapping[str, Any],
+    identifier: str | None = None,
+    wlan_devices: list[SupportsWLAN] | None = None,
+    access_points: list[AccessPoint] | None = None,
+):
+    """Creates a WmmTransceiver from a config.
+
+    Args:
+        config: Config parameters for the transceiver. Contains:
+            - iperf_config: dict, the config to use for creating IPerfClients and
+                IPerfServers (excluding port).
+            - port_range_start: int, the lower bound of the port range to use for
+                creating IPerfServers. Defaults to 5201.
+            - wlan_device: string, the identifier of the wlan_device used for this
+                WmmTransceiver (optional)
+
+        identifier: Identifier for the WmmTransceiver. Must be provided either as arg or
+            in the config.
+        wlan_devices: WLAN devices from which to get the wlan_device, if any, used as
+            this transceiver
+        access_points: Access points from which to get the access_point, if any, used as
+            this transceiver
+    """
+    try:
+        iperf_config = config["iperf_config"]
+    except KeyError as err:
+        raise WmmTransceiverError(
+            f"Parameter not provided as func arg, nor found in config: {err}"
+        )
+
+    if not identifier:
+        # If identifier is not provided as func arg, it must be provided via
+        # config file.
+        identifier = MapValidator(config).get(str, "identifier")
+
+    if wlan_devices is None:
+        wlan_devices = []
+
+    if access_points is None:
+        access_points = []
+
+    port_range_start = config.get("port_range_start", DEFAULT_IPERF_PORT)
+
+    wd = None
+    ap = None
+    if "wlan_device" in config:
+        wd = _find_wlan_device(config["wlan_device"], wlan_devices)
+    elif "access_point" in config:
+        ap = _find_access_point(config["access_point"], access_points)
+
+    return WmmTransceiver(
+        iperf_config,
+        identifier,
+        wlan_device=wd,
+        access_point=ap,
+        port_range_start=port_range_start,
+    )
+
+
+def _find_wlan_device(
+    wlan_device_identifier: str, wlan_devices: list[SupportsWLAN]
+) -> SupportsWLAN:
+    """Returns WLAN device based on string identifier (e.g. ip, serial, etc.)
+
+    Args:
+        wlan_device_identifier: Identifier for the desired WLAN device
+        wlan_devices: WLAN devices to search through
+
+    Returns:
+        A WLAN device matching wlan_device_identifier
+
+    Raises:
+        WmmTransceiverError, if no WLAN devices matches wlan_device_identifier
+    """
+    for wd in wlan_devices:
+        if wlan_device_identifier == wd.identifier:
+            return wd
+    raise WmmTransceiverError(
+        f'No WLAN device with identifier "{wlan_device_identifier}"'
+    )
+
+
+def _find_access_point(
+    access_point_ip: str, access_points: list[AccessPoint]
+) -> AccessPoint:
+    """Returns AccessPoint based on string ip address
+
+    Args:
+        access_point_ip: Control plane IP address of the desired AP
+        access_points: Access points to search through
+
+    Returns:
+        Access point with hostname matching access_point_ip
+
+    Raises:
+        WmmTransceiverError, if no access points matches access_point_ip
+    """
+    for ap in access_points:
+        if ap.ssh_settings.hostname == access_point_ip:
+            return ap
+    raise WmmTransceiverError(f"No AccessPoint with ip: {access_point_ip}")
+
+
+class WmmTransceiver(object):
+    """Object for handling WMM tagged streams between devices"""
+
+    def __init__(
+        self,
+        iperf_config,
+        identifier,
+        wlan_device=None,
+        access_point=None,
+        port_range_start=5201,
+    ):
+        self.identifier = identifier
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: (
+                    f"[WmmTransceiver | {self.identifier}]"
+                    if self.identifier
+                    else "[WmmTransceiver]"
+                ),
+            },
+        )
+        # WLAN device or AccessPoint, that is used as the transceiver. Only one
+        # will be set. This helps consolodate association, setup, teardown, etc.
+        self.wlan_device = wlan_device
+        self.access_point = access_point
+
+        # Parameters used to create IPerfClient and IPerfServer objects on
+        # device
+        self._iperf_config = iperf_config
+        self._test_interface = self._iperf_config.get("test_interface")
+        self._port_range_start = port_range_start
+        self._next_server_port = port_range_start
+
+        # Maps IPerfClients, used for streams from this device, to True if
+        # available, False if reserved
+        self._iperf_clients = {}
+
+        # Maps IPerfServers, used to receive streams from other devices, to True
+        # if available, False if reserved
+        self._iperf_servers = {}
+
+        # Maps ports of servers, which are provided to other transceivers, to
+        # the actual IPerfServer objects
+        self._iperf_server_ports = {}
+
+        # Maps stream UUIDs to IPerfClients reserved for that streams use
+        self._reserved_clients = {}
+
+        # Maps stream UUIDs to (WmmTransceiver, IPerfServer) tuples, where the
+        # server is reserved on the transceiver for that streams use
+        self._reserved_servers = {}
+
+        # Maps with shared memory functionality to be used across the parallel
+        # streams. active_streams holds UUIDs of streams that are currently
+        # running on this device (mapped to True, since there is no
+        # multiprocessing set). stream_results maps UUIDs of streams completed
+        # on this device to IPerfResult results for that stream.
+        self._manager = multiprocessing.Manager()
+        self._active_streams = self._manager.dict()
+        self._stream_results = self._manager.dict()
+
+        # Holds parameters for streams that are prepared to run asynchronously
+        # (i.e. resources have been allocated). Maps UUIDs of the future streams
+        # to a dict, containing the stream parameters.
+        self._pending_async_streams = {}
+
+        # Set of UUIDs of asynchronous streams that have at least started, but
+        # have not had their resources reclaimed yet
+        self._ran_async_streams = set()
+
+        # Set of stream parallel process, which can be joined if completed
+        # successfully, or  terminated and joined in the event of an error
+        self._running_processes = set()
+
+    def run_synchronous_traffic_stream(self, stream_parameters, subnet):
+        """Runs a traffic stream with IPerf3 between two WmmTransceivers and
+        saves the results.
+
+        Args:
+            stream_parameters: dict, containing parameters to used for the
+                stream. See _parse_stream_parameters for details.
+            subnet: string, the subnet of the network to use for the stream
+
+        Returns:
+            uuid: UUID object, identifier of the stream
+        """
+        (
+            receiver,
+            access_category,
+            bandwidth,
+            stream_time,
+        ) = self._parse_stream_parameters(stream_parameters)
+        uuid = uuid4()
+
+        (client, server_ip, server_port) = self._get_stream_resources(
+            uuid, receiver, subnet
+        )
+
+        self._validate_server_address(server_ip, uuid)
+
+        self.log.info(
+            f"Running synchronous stream to {receiver.identifier} WmmTransceiver"
+        )
+        self._run_traffic(
+            uuid,
+            client,
+            server_ip,
+            server_port,
+            self._active_streams,
+            self._stream_results,
+            access_category=access_category,
+            bandwidth=bandwidth,
+            stream_time=stream_time,
+        )
+
+        self._return_stream_resources(uuid)
+        return uuid
+
+    def prepare_asynchronous_stream(self, stream_parameters, subnet):
+        """Reserves resources and saves configs for upcoming asynchronous
+        traffic streams, so they can be started more simultaneously.
+
+        Args:
+            stream_parameters: dict, containing parameters to used for the
+                stream. See _parse_stream_parameters for details.
+            subnet: string, the subnet of the network to use for the stream
+
+        Returns:
+            uuid: UUID object, identifier of the stream
+        """
+        (receiver, access_category, bandwidth, time) = self._parse_stream_parameters(
+            stream_parameters
+        )
+        uuid = uuid4()
+
+        (client, server_ip, server_port) = self._get_stream_resources(
+            uuid, receiver, subnet
+        )
+
+        self._validate_server_address(server_ip, uuid)
+
+        pending_stream_config = {
+            "client": client,
+            "server_ip": server_ip,
+            "server_port": server_port,
+            "access_category": access_category,
+            "bandwidth": bandwidth,
+            "time": time,
+        }
+
+        self._pending_async_streams[uuid] = pending_stream_config
+        self.log.info(f"Stream to {receiver.identifier} WmmTransceiver prepared.")
+        return uuid
+
+    def start_asynchronous_streams(self, start_time=None):
+        """Starts pending asynchronous streams between two WmmTransceivers as
+        parallel processes.
+
+        Args:
+            start_time: float, time, seconds since epoch, at which to start the
+                stream (for better synchronicity). If None, start immediately.
+        """
+        for uuid in self._pending_async_streams:
+            pending_stream_config = self._pending_async_streams[uuid]
+            client = pending_stream_config["client"]
+            server_ip = pending_stream_config["server_ip"]
+            server_port = pending_stream_config["server_port"]
+            access_category = pending_stream_config["access_category"]
+            bandwidth = pending_stream_config["bandwidth"]
+            time = pending_stream_config["time"]
+
+            process = multiprocessing.Process(
+                target=self._run_traffic,
+                args=[
+                    uuid,
+                    client,
+                    server_ip,
+                    server_port,
+                    self._active_streams,
+                    self._stream_results,
+                ],
+                kwargs={
+                    "access_category": access_category,
+                    "bandwidth": bandwidth,
+                    "stream_time": time,
+                    "start_time": start_time,
+                },
+            )
+
+            # This needs to be set here to ensure its marked active before
+            # it even starts.
+            self._active_streams[uuid] = True
+            process.start()
+            self._ran_async_streams.add(uuid)
+            self._running_processes.add(process)
+
+        self._pending_async_streams.clear()
+
+    def cleanup_asynchronous_streams(self, timeout=PROCESS_JOIN_TIMEOUT):
+        """Releases reservations on resources (IPerfClients and IPerfServers)
+        that were held for asynchronous streams, both pending and finished.
+        Attempts to join any running processes, logging an error if timeout is
+        exceeded.
+
+        Args:
+            timeout: time, in seconds, to wait for each running process, if any,
+                to join
+        """
+        self.log.info("Cleaning up any asynchronous streams.")
+
+        # Releases resources for any streams that were prepared, but no run
+        for uuid in self._pending_async_streams:
+            self.log.error(f"Pending asynchronous stream {uuid} never ran. Cleaning.")
+            self._return_stream_resources(uuid)
+        self._pending_async_streams.clear()
+
+        # Attempts to join any running streams, terminating them after timeout
+        # if necessary.
+        while self._running_processes:
+            process = self._running_processes.pop()
+            process.join(timeout)
+            if process.is_alive():
+                self.log.error(
+                    f"Stream process failed to join in {timeout} seconds. Terminating."
+                )
+                process.terminate()
+                process.join()
+        self._active_streams.clear()
+
+        # Release resources for any finished streams
+        while self._ran_async_streams:
+            uuid = self._ran_async_streams.pop()
+            self._return_stream_resources(uuid)
+
+    def get_results(self, uuid):
+        """Retrieves a streams IPerfResults from stream_results
+
+        Args:
+            uuid: UUID object, identifier of the stream
+        """
+        return self._stream_results.get(uuid, None)
+
+    def destroy_resources(self):
+        for server in self._iperf_servers:
+            server.stop()
+        self._iperf_servers.clear()
+        self._iperf_server_ports.clear()
+        self._iperf_clients.clear()
+        self._next_server_port = self._port_range_start
+        self._stream_results.clear()
+
+    @property
+    def has_active_streams(self):
+        return bool(self._active_streams)
+
+    # Helper Functions
+
+    def _run_traffic(
+        self,
+        uuid: UUID,
+        client: iperf_client.IPerfClientBase,
+        server_ip: str,
+        server_port: int,
+        active_streams: DictProxy[Any, Any],
+        stream_results: DictProxy[Any, Any],
+        access_category: str | None = None,
+        bandwidth: int | None = None,
+        stream_time: int = DEFAULT_STREAM_TIME,
+        start_time: float | None = None,
+    ):
+        """Runs an iperf3 stream.
+
+        1. Adds stream UUID to active_streams
+        2. Runs stream
+        3. Saves results to stream_results
+        4. Removes stream UUID from active_streams
+
+        Args:
+            uuid: Identifier for stream
+            client: IPerfClient object on device
+            server_ip: IP address of IPerfServer for stream
+            server_port: port of the IPerfServer for stream
+            active_streams: holds stream UUIDs of active streams on the device
+            stream_results: maps stream UUIDs of streams to IPerfResult objects
+            access_category: WMM access category to use with iperf (AC_BK, AC_BE, AC_VI,
+                AC_VO). Unset if None.
+            bandwidth: Bandwidth in mbps to use with iperf. Implies UDP. Unlimited if
+                None.
+            stream_time: Time in seconds, to run iperf stream
+            start_time: Time, seconds since epoch, at which to start the stream (for
+                better synchronicity). If None, start immediately.
+        """
+        active_streams[uuid] = True
+
+        ac_flag = ""
+        bandwidth_flag = ""
+        time_flag = f"-t {stream_time}"
+
+        if access_category:
+            ac_flag = f" -S {DEFAULT_AC_TO_TOS_TAG_MAP[access_category]}"
+
+        if bandwidth:
+            bandwidth_flag = f" -u -b {bandwidth}M"
+
+        iperf_flags = f"-p {server_port} -i 1 {time_flag}{ac_flag}{bandwidth_flag} -J"
+        if not start_time:
+            start_time = time.time()
+        time_str = datetime.fromtimestamp(start_time).strftime("%H:%M:%S.%f")
+        self.log.info(
+            "At %s, starting %s second stream to %s:%s with (AC: %s, Bandwidth: %s)"
+            % (
+                time_str,
+                stream_time,
+                server_ip,
+                server_port,
+                access_category,
+                bandwidth if bandwidth else "Unlimited",
+            )
+        )
+
+        # If present, wait for stream start time
+        if start_time:
+            current_time = time.time()
+            while current_time < start_time:
+                current_time = time.time()
+        path = client.start(server_ip, iperf_flags, f"{uuid}")
+        stream_results[uuid] = iperf_server.IPerfResult(
+            path, reporting_speed_units="mbps"
+        )
+
+        active_streams.pop(uuid)
+
+    def _get_stream_resources(self, uuid, receiver, subnet):
+        """Reserves an IPerfClient and IPerfServer for a stream.
+
+        Args:
+            uuid: UUID object, identifier of the stream
+            receiver: WmmTransceiver object, which will be the streams receiver
+            subnet: string, subnet of test network, to retrieve the appropriate
+                server address
+
+        Returns:
+            (IPerfClient, string, int) representing the client, server address,
+            and server port to use for the stream
+        """
+        client = self._get_client(uuid)
+        server_ip, server_port = self._get_server(receiver, uuid, subnet)
+        return (client, server_ip, server_port)
+
+    def _return_stream_resources(self, uuid):
+        """Releases reservations on a streams IPerfClient and IPerfServer, so
+        they can be used by a future stream.
+
+        Args:
+            uuid: UUID object, identifier of the stream
+        """
+        if uuid in self._active_streams:
+            raise EnvironmentError(f"Resource still being used by stream {uuid}")
+        (receiver, server_port) = self._reserved_servers.pop(uuid)
+        receiver._release_server(server_port)
+        client = self._reserved_clients.pop(uuid)
+        self._iperf_clients[client] = AVAILABLE
+
+    def _get_client(self, uuid):
+        """Retrieves and reserves IPerfClient for use in a stream. If none are
+        available, a new one is created.
+
+        Args:
+            uuid: UUID object, identifier for stream, used to link client to
+                stream for teardown
+
+        Returns:
+            IPerfClient on device
+        """
+        reserved_client = None
+        for client in self._iperf_clients:
+            if self._iperf_clients[client] == AVAILABLE:
+                reserved_client = client
+                break
+        else:
+            reserved_client = iperf_client.create([self._iperf_config])[0]
+
+        self._iperf_clients[reserved_client] = UNAVAILABLE
+        self._reserved_clients[uuid] = reserved_client
+        return reserved_client
+
+    def _get_server(self, receiver, uuid, subnet):
+        """Retrieves the address and port of a reserved IPerfServer object from
+        the receiver object for use in a stream.
+
+        Args:
+            receiver: WmmTransceiver, to get an IPerfServer from
+            uuid: UUID, identifier for stream, used to link server to stream
+                for teardown
+            subnet: string, subnet of test network, to retrieve the appropriate
+                server address
+
+        Returns:
+            (string, int) representing the IPerfServer address and port
+        """
+        (server_ip, server_port) = receiver._reserve_server(subnet)
+        self._reserved_servers[uuid] = (receiver, server_port)
+        return (server_ip, server_port)
+
+    def _reserve_server(self, subnet):
+        """Reserves an available IPerfServer for use in a stream from another
+        WmmTransceiver. If none are available, a new one is created.
+
+        Args:
+            subnet: string, subnet of test network, to retrieve the appropriate
+                server address
+
+        Returns:
+            (string, int) representing the IPerfServer address and port
+        """
+        reserved_server = None
+        for server in self._iperf_servers:
+            if self._iperf_servers[server] == AVAILABLE:
+                reserved_server = server
+                break
+        else:
+            iperf_server_config = self._iperf_config
+            iperf_server_config.update({"port": self._next_server_port})
+            self._next_server_port += 1
+            reserved_server = iperf_server.create([iperf_server_config])[0]
+            self._iperf_server_ports[reserved_server.port] = reserved_server
+
+        self._iperf_servers[reserved_server] = UNAVAILABLE
+        reserved_server.start()
+        end_time = time.time() + DEFAULT_IP_ADDR_TIMEOUT
+        while time.time() < end_time:
+            if self.wlan_device:
+                addresses = utils.get_interface_ip_addresses(
+                    self.wlan_device.device, self._test_interface
+                )
+            else:
+                addresses = reserved_server.get_interface_ip_addresses(
+                    self._test_interface
+                )
+            for addr in addresses["ipv4_private"]:
+                if utils.ip_in_subnet(addr, subnet):
+                    return (addr, reserved_server.port)
+        raise AttributeError(
+            f"Reserved server has no ipv4 address in the {subnet} subnet"
+        )
+
+    def _release_server(self, server_port):
+        """Releases reservation on IPerfServer, which was held for a stream
+        from another WmmTransceiver.
+
+        Args:
+            server_port: int, the port of the IPerfServer being returned (since)
+                it is the identifying characteristic
+        """
+        server = self._iperf_server_ports[server_port]
+        server.stop()
+        self._iperf_servers[server] = AVAILABLE
+
+    def _validate_server_address(self, server_ip, uuid, timeout=60):
+        """Verifies server address can be pinged before attempting to run
+        traffic, since iperf is unforgiving when the server is unreachable.
+
+        Args:
+            server_ip: string, ip address of the iperf server
+            uuid: string, uuid of the stream to use this server
+            timeout: int, time in seconds to wait for server to respond to pings
+
+        Raises:
+            WmmTransceiverError, if, after timeout, server ip is unreachable.
+        """
+        self.log.info(f"Verifying server address ({server_ip}) is reachable.")
+        end_time = time.time() + timeout
+        while time.time() < end_time:
+            if self.can_ping(server_ip):
+                break
+            else:
+                self.log.debug(
+                    "Could not ping server address (%s). Retrying in 1 second."
+                    % (server_ip)
+                )
+                time.sleep(1)
+        else:
+            self._return_stream_resources(uuid)
+            raise WmmTransceiverError(f"IPerfServer address ({server_ip}) unreachable.")
+
+    def can_ping(self, dest_ip):
+        """Utilizes can_ping function in wlan_device or access_point device to
+        ping dest_ip
+
+        Args:
+            dest_ip: string, ip address to ping
+
+        Returns:
+            True, if dest address is reachable
+            False, otherwise
+        """
+        if self.wlan_device:
+            return self.wlan_device.can_ping(dest_ip)
+        else:
+            return self.access_point.can_ping(dest_ip)
+
+    def _parse_stream_parameters(self, stream_parameters):
+        """Parses stream_parameters from dictionary.
+
+        Args:
+            stream_parameters: dict of stream parameters
+                'receiver': WmmTransceiver, the receiver for the stream
+                'access_category': String, the access category to use for the
+                    stream. Unset if None.
+                'bandwidth': int, bandwidth in mbps for the stream. If set,
+                    implies UDP. If unset, implies TCP and unlimited bandwidth.
+                'time': int, time in seconds to run stream.
+
+        Returns:
+            (receiver, access_category, bandwidth, time) as
+            (WmmTransceiver, String, int, int)
+        """
+        receiver = stream_parameters["receiver"]
+        access_category = stream_parameters.get("access_category", None)
+        bandwidth = stream_parameters.get("bandwidth", None)
+        time = stream_parameters.get("time", DEFAULT_STREAM_TIME)
+        return (receiver, access_category, bandwidth, time)
diff --git a/src/antlion/test_utils/wifi/p2p/__init__.py b/packages/antlion/test_utils/dhcp/__init__.py
similarity index 100%
rename from src/antlion/test_utils/wifi/p2p/__init__.py
rename to packages/antlion/test_utils/dhcp/__init__.py
diff --git a/packages/antlion/test_utils/dhcp/base_test.py b/packages/antlion/test_utils/dhcp/base_test.py
new file mode 100644
index 0000000..1add93f
--- /dev/null
+++ b/packages/antlion/test_utils/dhcp/base_test.py
@@ -0,0 +1,279 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+from dataclasses import dataclass
+from ipaddress import IPv4Address, IPv4Network
+
+from mobly import asserts, signals
+from mobly.config_parser import TestRunConfig
+
+from antlion import utils
+from antlion.controllers.access_point import AccessPoint, setup_ap
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.ap_lib import dhcp_config, hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+@dataclass
+class APParams:
+    id: str
+    ssid: str
+    security: Security
+    ip: IPv4Address
+    network: IPv4Network
+
+
+class Dhcpv4InteropFixture(base_test.WifiBaseTest):
+    """Test helpers for validating DHCPv4 Interop
+
+    Test Bed Requirement:
+    * One Android device or Fuchsia device
+    * One Access Point
+    """
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+        self.fuchsia_device: FuchsiaDevice | None = None
+        self.access_point: AccessPoint = self.access_points[0]
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.fuchsia_device, self.dut = self.get_dut_type(
+                FuchsiaDevice, AssociationMode.POLICY
+            )
+        elif device_type == "android_devices":
+            _, self.dut = self.get_dut_type(AndroidDevice, AssociationMode.POLICY)
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.access_point.stop_all_aps()
+
+    def setup_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockAcquireBright()
+                ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockRelease()
+                ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.access_point.stop_all_aps()
+
+    def connect(self, ap_params: APParams) -> None:
+        asserts.assert_true(
+            self.dut.associate(
+                ap_params.ssid,
+                target_pwd=ap_params.security.password,
+                target_security=ap_params.security.security_mode,
+            ),
+            "Failed to connect.",
+        )
+
+    def setup_ap(self) -> APParams:
+        """Generates a hostapd config and sets up the AP with that config.
+
+        Does not run a DHCP server.
+
+        Returns:
+            APParams for the newly setup AP.
+        """
+        ssid = utils.rand_ascii_str(20)
+        security = Security(
+            security_mode=SecurityMode.WPA2,
+            password=generate_random_password(length=20),
+            wpa_cipher="CCMP",
+            wpa2_cipher="CCMP",
+        )
+
+        ap_ids = setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            mode=hostapd_constants.MODE_11N_MIXED,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            n_capabilities=[],
+            ac_capabilities=[],
+            force_wmm=True,
+            ssid=ssid,
+            security=security,
+        )
+
+        if len(ap_ids) > 1:
+            raise Exception("Expected only one SSID on AP")
+
+        configured_subnets = self.access_point.get_configured_subnets()
+        if len(configured_subnets) > 1:
+            raise Exception("Expected only one subnet on AP")
+        router_ip = configured_subnets[0].router
+        network = configured_subnets[0].network
+
+        self.access_point.stop_dhcp()
+
+        return APParams(
+            id=ap_ids[0],
+            ssid=ssid,
+            security=security,
+            ip=router_ip,
+            network=network,
+        )
+
+    def get_device_ipv4_addr(
+        self, interface: str | None = None, timeout_sec: float = 20.0
+    ) -> IPv4Address:
+        """Checks if device has an ipv4 private address.
+
+        Only supported on Fuchsia.
+
+        Args:
+            interface: name of interface from which to get ipv4 address.
+            timeout: seconds to wait until raising ConnectionError
+
+        Raises:
+            ConnectionError, if DUT does not have an ipv4 address after all
+            timeout.
+
+        Returns:
+            The device's IP address
+        """
+        if self.fuchsia_device is None:
+            # TODO(http://b/292289291): Add get_(ipv4|ipv6)_addr to SupportsIP.
+            raise TypeError(
+                "TODO(http://b/292289291): get_device_ipv4_addr only supports "
+                "FuchsiaDevice"
+            )
+
+        self.log.debug("Fetching updated WLAN interface list")
+        if interface is None:
+            interface = self.dut.get_default_wlan_test_interface()
+        self.log.info(
+            "Checking if DUT has received an ipv4 addr on iface %s. Will retry for %s "
+            "seconds." % (interface, timeout_sec)
+        )
+        timeout_sec = time.time() + timeout_sec
+        while time.time() < timeout_sec:
+            ip_addrs = self.fuchsia_device.get_interface_ip_addresses(interface)
+
+            if len(ip_addrs["ipv4_private"]) > 0:
+                ip = ip_addrs["ipv4_private"][0]
+                self.log.info(f"DUT has an ipv4 address: {ip}")
+                return IPv4Address(ip)
+            else:
+                self.log.debug(
+                    "DUT does not yet have an ipv4 address...retrying in 1 " "second."
+                )
+                time.sleep(1)
+        else:
+            raise ConnectionError("DUT failed to get an ipv4 address.")
+
+    def run_test_case_expect_dhcp_success(
+        self, dhcp_parameters: dict[str, str], dhcp_options: dict[str, int | str]
+    ) -> None:
+        """Starts the AP and DHCP server, and validates that the client
+        connects and obtains an address.
+
+        Args:
+            dhcp_parameters: a dictionary of DHCP parameters
+            dhcp_options: a dictionary of DHCP options
+        """
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params.network,
+            router=ap_params.ip,
+            additional_parameters=dhcp_parameters,
+            additional_options=dhcp_options,
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+
+        self.log.debug("DHCP Configuration:\n" + dhcp_conf.render_config_file() + "\n")
+
+        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+        self.connect(ap_params=ap_params)
+
+        # Typical log lines look like:
+        # dhcpd[26695]: DHCPDISCOVER from f8:0f:f9:3d:ce:d1 via wlan1
+        # dhcpd[26695]: DHCPOFFER on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
+        # dhcpd[26695]: DHCPREQUEST for 192.168.9.2 (192.168.9.1) from f8:0f:f9:3d:ce:d1 via wlan1
+        # dhcpd[26695]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
+
+        try:
+            ip = self.get_device_ipv4_addr()
+        except ConnectionError:
+            dhcp_logs = self.access_point.get_dhcp_logs()
+            self.log.warn(dhcp_logs)
+            asserts.fail(f"DUT failed to get an IP address")
+
+        # Get updates to DHCP logs
+        dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestFailure("No DHCP logs")
+
+        expected_string = f"DHCPDISCOVER from"
+        asserts.assert_equal(
+            dhcp_logs.count(expected_string),
+            1,
+            f'Incorrect count of DHCP Discovers ("{expected_string}") in logs:\n'
+            + dhcp_logs
+            + "\n",
+        )
+
+        expected_string = f"DHCPOFFER on {ip}"
+        asserts.assert_equal(
+            dhcp_logs.count(expected_string),
+            1,
+            f'Incorrect count of DHCP Offers ("{expected_string}") in logs:\n'
+            + dhcp_logs
+            + "\n",
+        )
+
+        expected_string = f"DHCPREQUEST for {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 1,
+            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+        expected_string = f"DHCPACK on {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 1,
+            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+        self.log.info(f"Attempting to ping {ap_params.ip}...")
+        ping_result = self.dut.ping(str(ap_params.ip), count=2)
+        asserts.assert_true(
+            ping_result.success,
+            f"DUT failed to ping router at {ap_params.ip}: {ping_result}",
+        )
diff --git a/src/antlion/test_utils/fuchsia/__init__.py b/packages/antlion/test_utils/fuchsia/__init__.py
similarity index 100%
rename from src/antlion/test_utils/fuchsia/__init__.py
rename to packages/antlion/test_utils/fuchsia/__init__.py
diff --git a/packages/antlion/test_utils/fuchsia/utils.py b/packages/antlion/test_utils/fuchsia/utils.py
new file mode 100644
index 0000000..87fc0fd
--- /dev/null
+++ b/packages/antlion/test_utils/fuchsia/utils.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from antlion.controllers.fuchsia_lib.ssh import SSHError
+
+
+def http_file_download_by_curl(
+    fd,
+    url,
+    out_path="/tmp/",
+    curl_loc="/bin/curl",
+    remove_file_after_check=True,
+    timeout=3600,
+    limit_rate=None,
+    additional_args=None,
+    retry=3,
+):
+    """Download http file by ssh curl.
+
+    Args:
+        fd: Fuchsia Device Object.
+        url: The url that file to be downloaded from.
+        out_path: Optional. Where to download file to.
+            out_path is /tmp by default.
+        curl_loc: Location of curl binary on fd.
+        remove_file_after_check: Whether to remove the downloaded file after
+            check.
+        timeout: timeout for file download to complete.
+        limit_rate: download rate in bps. None, if do not apply rate limit.
+        additional_args: Any additional args for curl.
+        retry: the retry request times provided in curl command.
+    """
+    file_directory, file_name = _generate_file_directory_and_file_name(url, out_path)
+    file_path = os.path.join(file_directory, file_name)
+    curl_cmd = curl_loc
+    if limit_rate:
+        curl_cmd += f" --limit-rate {limit_rate}"
+    if retry:
+        curl_cmd += f" --retry {retry}"
+    if additional_args:
+        curl_cmd += f" {additional_args}"
+    curl_cmd += f" --url {url} > {file_path}"
+
+    fd.log.info(f"Download {url} to {file_path} by ssh command {curl_cmd}")
+    try:
+        fd.ssh.run(curl_cmd, timeout_sec=timeout)
+        if _check_file_existence(fd, file_path):
+            fd.log.info(f"{url} is downloaded to {file_path} successfully")
+            return True
+
+        fd.log.warning(f"Fail to download {url}")
+        return False
+    except SSHError as e:
+        fd.log.warning(f'Command "{curl_cmd}" failed with error {e}')
+        return False
+    except Exception as e:
+        fd.log.error(f"Download {url} failed with unexpected exception {e}")
+        return False
+    finally:
+        if remove_file_after_check:
+            fd.log.info(f"Remove the downloaded file {file_path}")
+            try:
+                fd.ssh.run(f"rm {file_path}")
+            except SSHError:
+                pass
+
+
+def _generate_file_directory_and_file_name(url, out_path):
+    """Splits the file from the url and specifies the appropriate location of
+       where to store the downloaded file.
+
+    Args:
+        url: A url to the file that is going to be downloaded.
+        out_path: The location of where to store the file that is downloaded.
+
+    Returns:
+        file_directory: The directory of where to store the downloaded file.
+        file_name: The name of the file that is being downloaded.
+    """
+    file_name = url.split("/")[-1]
+    if not out_path:
+        file_directory = "/tmp/"
+    elif not out_path.endswith("/"):
+        file_directory, file_name = os.path.split(out_path)
+    else:
+        file_directory = out_path
+    return file_directory, file_name
+
+
+def _check_file_existence(fd, file_path):
+    """Check file existence by file_path. If expected_file_size
+       is provided, then also check if the file meet the file size requirement.
+
+    Args:
+        fd: A fuchsia device
+        file_path: Where to store the file on the fuchsia device.
+    """
+    try:
+        fd.ssh.run(f'ls -al "{file_path}"')
+        fd.log.debug(f"File {file_path} exists.")
+        return True
+    except SSHError as e:
+        if "No such file or directory" in e.result.stderr:
+            fd.log.debug(f"File {file_path} does not exist.")
+            return False
+        raise e
diff --git a/packages/antlion/test_utils/fuchsia/wmm_test_cases.py b/packages/antlion/test_utils/fuchsia/wmm_test_cases.py
new file mode 100644
index 0000000..48eb8ce
--- /dev/null
+++ b/packages/antlion/test_utils/fuchsia/wmm_test_cases.py
@@ -0,0 +1,1326 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Internal Traffic Differentiation
+test_internal_traffic_diff_VO_VI = {
+    "phase_1": {
+        "stream_VO": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_VI": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VO")],
+        ),
+    }
+}
+
+test_internal_traffic_diff_VO_BE = {
+    "phase_1": {
+        "stream_VO": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VO")],
+        ),
+    }
+}
+
+test_internal_traffic_diff_VO_BK = {
+    "phase_1": {
+        "stream_VO": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VO")],
+        ),
+    }
+}
+
+test_internal_traffic_diff_VI_BE = {
+    "phase_1": {
+        "stream_VI": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VI")],
+        ),
+    }
+}
+
+test_internal_traffic_diff_VI_BK = {
+    "phase_1": {
+        "stream_VI": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_VI")],
+        ),
+    }
+}
+
+test_internal_traffic_diff_BE_BK = {
+    "phase_1": {
+        "stream_BE": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[dict(operator="<", phase="phase_1", stream="stream_BE")],
+        ),
+    }
+}
+# External Traffic Differentiation
+
+# Single station, STAUT transmits high priority
+test_external_traffic_diff_staut_VO_ap_VI = {
+    "phase_1": {
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_VO_ap_BE = {
+    "phase_1": {
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_VO_ap_BK = {
+    "phase_1": {
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_staut_to_ap")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_VI_ap_BE = {
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VI_staut_to_ap")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_VI_ap_BK = {
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VI_staut_to_ap")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_BE_ap_BK = {
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_BE_staut_to_ap")
+            ],
+        ),
+    }
+}
+
+# Single station, STAUT transmits low priority
+test_external_traffic_diff_staut_VI_ap_VO = {
+    "phase_1": {
+        "stream_VO_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.87, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_BE_ap_VO = {
+    "phase_1": {
+        "stream_VO_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_BK_ap_VO = {
+    "phase_1": {
+        "stream_VO_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VO_ap_to_staut")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_BE_ap_VI = {
+    "phase_1": {
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VI_ap_to_staut")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_BK_ap_VI = {
+    "phase_1": {
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_VI_ap_to_staut")
+            ],
+        ),
+    }
+}
+
+test_external_traffic_diff_staut_BK_ap_BE = {
+    "phase_1": {
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.03)
+            ],
+        ),
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=1.0,
+            validation=[
+                dict(operator="<", phase="phase_1", stream="stream_BE_ap_to_staut")
+            ],
+        ),
+    }
+}
+
+# Dual Internal/External Traffic Differetiation
+
+test_dual_traffic_diff_staut_VO_VI_ap_VI = {
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_VI_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.5,
+        ),
+    }
+}
+
+test_dual_traffic_diff_staut_VO_BE_ap_BE = {
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BE_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.5,
+        ),
+    }
+}
+
+test_dual_traffic_diff_staut_VO_BK_ap_BK = {
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BK_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+        ),
+    }
+}
+
+test_dual_traffic_diff_staut_VI_BE_ap_BE = {
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BE_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.5,
+        ),
+    }
+}
+
+test_dual_traffic_diff_staut_VI_BK_ap_BK = {
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BK_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.89, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+        ),
+    }
+}
+
+test_dual_traffic_diff_staut_BE_BK_ap_BK = {
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+            validation=[
+                dict(
+                    operator="==",
+                    phase="phase_1",
+                    stream="stream_BK_ap_to_staut",
+                    max_bw_rel_tolerance=0.15,
+                )
+            ],
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.85,
+            validation=[
+                dict(operator=">=", bandwidth_percentage=0.81, rel_tolerance=0.01)
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.5,
+        ),
+    }
+}
+
+# ACM Bit Conformance Tests (Single station, as WFA test below uses two)
+test_acm_bit_on_VI = {
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+            validation=[
+                # TODO(): This should technically be an "or"
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_BE_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+    }
+}
+
+# AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
+test_ac_param_degrade_VI = {
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+            validation=[
+                # TODO(): This should technically be an "or"
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_BE_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+    }
+}
+
+test_ac_param_degrade_VO = {
+    "phase_1": {
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.6,
+            validation=[
+                # TODO(): This should technically be an "or"
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_BE_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+        ),
+    }
+}
+
+test_ac_param_improve_BE = {
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.6,
+            validation=[
+                # TODO(): This should technically be an "or"
+                dict(
+                    operator=">",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap_1",
+                    bandwidth_percentage=0.869,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator=">",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap_2",
+                    bandwidth_percentage=0.869,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_VI_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_VI_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+        ),
+    }
+}
+
+test_ac_param_improve_BK = {
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.6,
+            validation=[
+                # TODO(): This should technically be an "or"
+                dict(
+                    operator=">",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap_1",
+                    bandwidth_percentage=0.869,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator=">",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap_2",
+                    bandwidth_percentage=0.869,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_VI_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+        ),
+        "stream_VI_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.6,
+        ),
+    }
+}
+# WFA Test Plan Cases
+
+# Traffic Differentiation in Single BSS (Single Station)
+test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE = {
+    "phase_1": {
+        "steam_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "steam_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VI_ap_to_staut",
+                    bandwidth_percentage=0.85,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+test_wfa_traffic_diff_single_station_staut_VI_BE = {
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "stream_BE_staut_to_ap_1": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap",
+                    bandwidth_percentage=0.89,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_staut_to_ap_2": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE = {
+    "phase_1": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VI_staut_to_ap",
+                    bandwidth_percentage=0.87,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK = {
+    "phase_1": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "stream_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_BE_staut_to_ap",
+                    bandwidth_percentage=0.81,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BK_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI = {
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VO_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VO_staut_to_ap",
+                    bandwidth_percentage=0.81,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_VI_ap_to_staut": dict(
+            transmitter_str="access_point",
+            receiver_str="staut",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+# Traffic Differentiation in Single BSS (Two Stations)
+test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE = {
+    "phase_1": {
+        "steam_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "steam_BE_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VI_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VI_secondary_to_ap",
+                    bandwidth_percentage=0.90,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE = {
+    "phase_1": {
+        "steam_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "steam_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="steam_VI_staut_to_ap",
+                    bandwidth_percentage=0.88,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BE_secondary_to_ap_1": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_secondary_to_ap_2": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK = {
+    "phase_1": {
+        "steam_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "steam_BK_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_BE_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap",
+                    bandwidth_percentage=0.90,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_BK_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BK",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI = {
+    "phase_1": {
+        "steam_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VO_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.45,
+        ),
+    },
+    "phase_2": {
+        "steam_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.45,
+        ),
+        "stream_VO_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VO",
+            max_bandwidth_percentage=0.45,
+            validation=[
+                dict(
+                    operator=">=",
+                    phase="phase_1",
+                    stream="stream_VO_secondary_to_ap",
+                    bandwidth_percentage=0.90,
+                    rel_tolerance=0.01,
+                )
+            ],
+        ),
+        "stream_VI_secondary_to_ap": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.65,
+        ),
+    },
+}
+
+test_wfa_acm_bit_on_VI = {
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.65,
+            validation=[
+                # TODO(): This should technically be an "or"
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_secondary_to_ap_1": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+        "stream_BE_secondary_to_ap_2": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    }
+}
+
+test_wfa_ac_param_degrade_VI = {
+    "phase_1": {
+        "stream_VI_staut_to_ap": dict(
+            transmitter_str="staut",
+            receiver_str="access_point",
+            access_category="AC_VI",
+            max_bandwidth_percentage=0.65,
+            validation=[
+                # TODO(): This should technically be an "or"
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap_1",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+                dict(
+                    operator="<",
+                    phase="phase_1",
+                    stream="stream_BE_secondary_to_ap_2",
+                    bandwidth_percentage=1.15,
+                    rel_tolerance=0.05,
+                ),
+            ],
+        ),
+        "stream_BE_secondary_to_ap_1": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+        "stream_BE_secondary_to_ap_2": dict(
+            transmitter_str="secondary_sta",
+            receiver_str="access_point",
+            access_category="AC_BE",
+            max_bandwidth_percentage=0.65,
+        ),
+    }
+}
diff --git a/src/antlion/test_utils/net/__init__.py b/packages/antlion/test_utils/net/__init__.py
similarity index 100%
rename from src/antlion/test_utils/net/__init__.py
rename to packages/antlion/test_utils/net/__init__.py
diff --git a/packages/antlion/test_utils/net/connectivity_const.py b/packages/antlion/test_utils/net/connectivity_const.py
new file mode 100644
index 0000000..05495f0
--- /dev/null
+++ b/packages/antlion/test_utils/net/connectivity_const.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+
+######################################################
+# ConnectivityManager.NetworkCallback events
+######################################################
+EVENT_NETWORK_CALLBACK = "NetworkCallback"
+
+# event types
+NETWORK_CB_PRE_CHECK = "PreCheck"
+NETWORK_CB_AVAILABLE = "Available"
+NETWORK_CB_LOSING = "Losing"
+NETWORK_CB_LOST = "Lost"
+NETWORK_CB_UNAVAILABLE = "Unavailable"
+NETWORK_CB_CAPABILITIES_CHANGED = "CapabilitiesChanged"
+NETWORK_CB_SUSPENDED = "Suspended"
+NETWORK_CB_RESUMED = "Resumed"
+NETWORK_CB_LINK_PROPERTIES_CHANGED = "LinkPropertiesChanged"
+NETWORK_CB_INVALID = "Invalid"
+
+# event data keys
+NETWORK_CB_KEY_ID = "id"
+NETWORK_CB_KEY_EVENT = "networkCallbackEvent"
+NETWORK_CB_KEY_MAX_MS_TO_LIVE = "maxMsToLive"
+NETWORK_CB_KEY_RSSI = "rssi"
+NETWORK_CB_KEY_INTERFACE_NAME = "interfaceName"
+NETWORK_CB_KEY_CREATE_TS = "creation_timestamp"
+NETWORK_CB_KEY_CURRENT_TS = "current_timestamp"
+NETWORK_CB_KEY_NETWORK_SPECIFIER = "network_specifier"
+NETWORK_CB_KEY_TRANSPORT_INFO = "transport_info"
+
+# Constants for VPN connection status
+VPN_STATE_DISCONNECTED = 0
+VPN_STATE_INITIALIZING = 1
+VPN_STATE_CONNECTING = 2
+VPN_STATE_CONNECTED = 3
+VPN_STATE_TIMEOUT = 4
+VPN_STATE_FAILED = 5
+# TODO gmoturu: determine the exact timeout value
+# This is a random value as of now
+VPN_TIMEOUT = 30
+
+# Connectiivty Manager constants
+TYPE_MOBILE = 0
+TYPE_WIFI = 1
+
+# Network request related constants.
+NETWORK_CAP_TRANSPORT_WIFI = TYPE_WIFI
+NETWORK_CAP_CAPABILITY_INTERNET = 12
+
+# Network request related keys.
+NETWORK_CAP_TRANSPORT_TYPE_KEY = "TransportType"
+NETWORK_CAP_CAPABILITY_KEY = "Capability"
+
+# Multipath preference constants
+MULTIPATH_PREFERENCE_NONE = 0
+MULTIPATH_PREFERENCE_HANDOVER = 1 << 0
+MULTIPATH_PREFERENCE_RELIABILITY = 1 << 1
+MULTIPATH_PREFERENCE_PERFORMANCE = 1 << 2
+
+# Private DNS constants
+DNS_GOOGLE_HOSTNAME = "dns.google"
+DNS_QUAD9_HOSTNAME = "dns.quad9.net"
+DNS_CLOUDFLARE_HOSTNAME = "1dot1dot1dot1.cloudflare-dns.com"
+DOH_CLOUDFLARE_HOSTNAME = "cloudflare-dns.com"
+PRIVATE_DNS_MODE_OFF = "off"
+PRIVATE_DNS_MODE_OPPORTUNISTIC = "opportunistic"
+PRIVATE_DNS_MODE_STRICT = "hostname"
+
+DNS_SUPPORT_TYPE = {
+    DNS_GOOGLE_HOSTNAME: ["Do53", "DoT", "DoH"],
+    DNS_CLOUDFLARE_HOSTNAME: ["Do53", "DoT"],
+    DOH_CLOUDFLARE_HOSTNAME: ["DoH"],
+}
+
+DNS_GOOGLE_ADDR_V4 = ["8.8.4.4", "8.8.8.8"]
+DNS_GOOGLE_ADDR_V6 = ["2001:4860:4860::8888", "2001:4860:4860::8844"]
+DNS_CLOUDFLARE_ADDR_V4 = ["1.1.1.1", "1.0.0.1"]
+DOH_CLOUDFLARE_ADDR_V4 = ["104.16.248.249", "104.16.249.249"]
+DOH_CLOUDFLARE_ADDR_V6 = ["2606:4700::6810:f8f9", "2606:4700::6810:f9f9"]
+
+# IpSec constants
+SOCK_STREAM = 1
+SOCK_DGRAM = 2
+AF_INET = 2
+AF_INET6 = 10
+DIRECTION_IN = 0
+DIRECTION_OUT = 1
+MODE_TRANSPORT = 0
+MODE_TUNNEL = 1
+CRYPT_NULL = "ecb(cipher_null)"
+CRYPT_AES_CBC = "cbc(aes)"
+AUTH_HMAC_MD5 = "hmac(md5)"
+AUTH_HMAC_SHA1 = "hmac(sha1)"
+AUTH_HMAC_SHA256 = "hmac(sha256)"
+AUTH_HMAC_SHA384 = "hmac(sha384)"
+AUTH_HMAC_SHA512 = "hmac(sha512)"
+AUTH_CRYPT_AES_GCM = "rfc4106(gcm(aes))"
+
+
+# Constants for VpnProfile
+class VpnProfile(object):
+    """This class contains all the possible
+    parameters required for VPN connection
+    """
+
+    NAME = "name"
+    TYPE = "type"
+    SERVER = "server"
+    USER = "username"
+    PWD = "password"
+    DNS = "dnsServers"
+    SEARCH_DOMAINS = "searchDomains"
+    ROUTES = "routes"
+    MPPE = "mppe"
+    L2TP_SECRET = "l2tpSecret"
+    IPSEC_ID = "ipsecIdentifier"
+    IPSEC_SECRET = "ipsecSecret"
+    IPSEC_USER_CERT = "ipsecUserCert"
+    IPSEC_CA_CERT = "ipsecCaCert"
+    IPSEC_SERVER_CERT = "ipsecServerCert"
+
+
+# Enums for VPN profile types
+class VpnProfileType(enum.Enum):
+    """Integer constant for each type of VPN"""
+
+    PPTP = 0
+    L2TP_IPSEC_PSK = 1
+    L2TP_IPSEC_RSA = 2
+    IPSEC_XAUTH_PSK = 3
+    IPSEC_XAUTH_RSA = 4
+    IPSEC_HYBRID_RSA = 5
+    IKEV2_IPSEC_USER_PASS = 6
+    IKEV2_IPSEC_PSK = 7
+    IKEV2_IPSEC_RSA = 8
+
+
+# Constants for config file
+class VpnReqParams(object):
+    """Config file parameters required for
+    VPN connection
+    """
+
+    vpn_server_addresses = "vpn_server_addresses"
+    vpn_verify_addresses = "vpn_verify_addresses"
+    vpn_username = "vpn_username"
+    vpn_password = "vpn_password"
+    psk_secret = "psk_secret"
+    client_pkcs_file_name = "client_pkcs_file_name"
+    cert_path_vpnserver = "cert_path_vpnserver"
+    cert_password = "cert_password"
+    pptp_mppe = "pptp_mppe"
+    ipsec_server_type = "ipsec_server_type"
+    wifi_network = "wifi_network"
+    vpn_identity = "vpn_identity"
+    vpn_server_hostname = "vpn_server_hostname"
diff --git a/packages/antlion/test_utils/net/net_test_utils.py b/packages/antlion/test_utils/net/net_test_utils.py
new file mode 100644
index 0000000..2862031
--- /dev/null
+++ b/packages/antlion/test_utils/net/net_test_utils.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+
+from antlion.controllers import adb
+from antlion.test_utils.net import connectivity_const as cconst
+from antlion.utils import start_standing_subprocess, stop_standing_subprocess
+
+VPN_CONST = cconst.VpnProfile
+VPN_TYPE = cconst.VpnProfileType
+VPN_PARAMS = cconst.VpnReqParams
+TCPDUMP_PATH = "/data/local/tmp/"
+USB_CHARGE_MODE = "svc usb setFunctions"
+USB_TETHERING_MODE = "svc usb setFunctions rndis"
+ENABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 0"
+DISABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 1"
+DEVICE_IP_ADDRESS = "ip address"
+LOCALHOST = "192.168.1.1"
+
+# Time to wait for radio to up and running after reboot
+WAIT_TIME_AFTER_REBOOT = 10
+
+GCE_SSH = "gcloud compute ssh "
+GCE_SCP = "gcloud compute scp "
+
+
+def start_tcpdump(ad, test_name, interface="any"):
+    """Start tcpdump on all interfaces.
+
+    Args:
+        ad: android device object.
+        test_name: tcpdump file name will have this
+    """
+    ad.log.info("Starting tcpdump on all interfaces")
+    ad.adb.shell("killall -9 tcpdump", ignore_status=True)
+    ad.adb.shell(f"mkdir {TCPDUMP_PATH}", ignore_status=True)
+    ad.adb.shell(f"rm -rf {TCPDUMP_PATH}/*", ignore_status=True)
+
+    file_name = f"{TCPDUMP_PATH}/tcpdump_{ad.serial}_{test_name}.pcap"
+    ad.log.info("tcpdump file is %s", file_name)
+    cmd = f"adb -s {ad.serial} shell tcpdump -i {interface} -s0 -w {file_name}"
+    try:
+        return start_standing_subprocess(cmd, 5)
+    except Exception:
+        ad.log.exception(f"Could not start standing process {repr(cmd)}")
+
+    return None
+
+
+def stop_tcpdump(
+    ad, proc, test_name, pull_dump=True, adb_pull_timeout=adb.DEFAULT_ADB_PULL_TIMEOUT
+):
+    """Stops tcpdump on any iface.
+
+       Pulls the tcpdump file in the tcpdump dir if necessary.
+
+    Args:
+        ad: android device object.
+        proc: need to know which pid to stop
+        test_name: test name to save the tcpdump file
+        pull_dump: pull tcpdump file or not
+        adb_pull_timeout: timeout for adb_pull
+
+    Returns:
+      log_path of the tcpdump file
+    """
+    ad.log.info("Stopping and pulling tcpdump if any")
+    if proc is None:
+        return None
+    try:
+        stop_standing_subprocess(proc)
+    except Exception as e:
+        ad.log.warning(e)
+    if pull_dump:
+        log_path = os.path.join(ad.device_log_path, f"TCPDUMP_{ad.serial}")
+        os.makedirs(log_path, exist_ok=True)
+        ad.adb.pull(f"{TCPDUMP_PATH}/. {log_path}", timeout=adb_pull_timeout)
+        ad.adb.shell(f"rm -rf {TCPDUMP_PATH}/*", ignore_status=True)
+        file_name = f"tcpdump_{ad.serial}_{test_name}.pcap"
+        return f"{log_path}/{file_name}"
+    return None
diff --git a/src/antlion/test_utils/wifi/OWNERS b/packages/antlion/test_utils/wifi/OWNERS
similarity index 100%
rename from src/antlion/test_utils/wifi/OWNERS
rename to packages/antlion/test_utils/wifi/OWNERS
diff --git a/src/antlion/test_utils/wifi/__init__.py b/packages/antlion/test_utils/wifi/__init__.py
similarity index 100%
rename from src/antlion/test_utils/wifi/__init__.py
rename to packages/antlion/test_utils/wifi/__init__.py
diff --git a/packages/antlion/test_utils/wifi/base_test.py b/packages/antlion/test_utils/wifi/base_test.py
new file mode 100644
index 0000000..36d43f2
--- /dev/null
+++ b/packages/antlion/test_utils/wifi/base_test.py
@@ -0,0 +1,882 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+    Base Class for Defining Common WiFi Test Functionality
+"""
+
+import copy
+import os
+from typing import Any, TypedDict, TypeVar
+
+from mobly import signals
+from mobly.config_parser import TestRunConfig
+from mobly.records import TestResultRecord
+
+from antlion import context, controllers, utils
+from antlion.base_test import AntlionBaseTest
+from antlion.controllers.access_point import AccessPoint
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import (
+    OpenWRTEncryptionMode,
+    SecurityMode,
+)
+from antlion.controllers.attenuator import Attenuator
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.iperf_client import IPerfClientBase
+from antlion.controllers.iperf_server import IPerfServerOverSsh
+from antlion.controllers.openwrt_ap import PMF_ENABLED, BSSIDMap, OpenWrtAP
+from antlion.controllers.openwrt_lib.wireless_config import WirelessConfig
+from antlion.controllers.packet_capture import PacketCapture
+from antlion.controllers.pdu import PduDevice
+from antlion.keys import Config
+from antlion.test_utils.abstract_devices.wlan_device import (
+    AndroidWlanDevice,
+    AssociationMode,
+    FuchsiaWlanDevice,
+    SupportsWLAN,
+)
+from antlion.test_utils.net import net_test_utils as nutils
+from antlion.test_utils.wifi import wifi_test_utils as wutils
+
+WifiEnums = wutils.WifiEnums
+MAX_AP_COUNT = 2
+
+
+class Network(TypedDict):
+    SSID: str
+    security: SecurityMode
+    password: str | None
+    hiddenSSID: bool
+    wepKeys: list[str] | None
+    ieee80211w: str | None
+
+
+class NetworkUpdate(TypedDict, total=False):
+    SSID: str
+    security: SecurityMode
+    password: str | None
+    hiddenSSID: bool
+    wepKeys: list[str] | None
+    ieee80211w: str | None
+
+
+NetworkList = dict[str, Network]
+
+_T = TypeVar("_T")
+
+
+class WifiBaseTest(AntlionBaseTest):
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.enable_packet_log = False
+        self.packet_log_2g = hostapd_constants.AP_DEFAULT_CHANNEL_2G
+        self.packet_log_5g = hostapd_constants.AP_DEFAULT_CHANNEL_5G
+        self.tcpdump_proc: list[Any] = []
+        self.packet_log_pid: dict[str, Any] = {}
+
+    def setup_class(self) -> None:
+        T = TypeVar("T")
+
+        def register_controller(module: Any) -> list[T]:
+            controllers: list[T] | None = self.register_controller(
+                module, required=False
+            )
+            if controllers is None:
+                return []
+            return controllers
+
+        self.access_points: list[AccessPoint] = register_controller(
+            controllers.access_point
+        )
+        self.openwrt_aps: list[OpenWrtAP] = register_controller(controllers.openwrt_ap)
+        self.android_devices: list[AndroidDevice] = register_controller(
+            controllers.android_device
+        )
+        self.attenuators: list[Attenuator] = register_controller(controllers.attenuator)
+        self.fuchsia_devices: list[FuchsiaDevice] = register_controller(
+            controllers.fuchsia_device
+        )
+        self.iperf_clients: list[IPerfClientBase] = register_controller(
+            controllers.iperf_client
+        )
+        self.iperf_servers: list[IPerfServerOverSsh] = register_controller(
+            controllers.iperf_server
+        )
+        self.pdu_devices: list[PduDevice] = register_controller(controllers.pdu)
+        self.packet_capture: list[PacketCapture] = register_controller(
+            controllers.packet_capture
+        )
+
+        for attenuator in self.attenuators:
+            attenuator.set_atten(0)
+
+        self.pixel_models: list[str] | None = self.user_params.get("pixel_models")
+        self.cnss_diag_file: str | list[str] | None = self.user_params.get(
+            "cnss_diag_file"
+        )
+        self.country_code_file: str | list[str] | None = self.user_params.get(
+            "country_code_file"
+        )
+
+        if self.cnss_diag_file:
+            if isinstance(self.cnss_diag_file, list):
+                self.cnss_diag_file = self.cnss_diag_file[0]
+            if not os.path.isfile(self.cnss_diag_file):
+                self.cnss_diag_file = os.path.join(
+                    self.user_params[Config.key_config_path.value], self.cnss_diag_file
+                )
+
+        self.packet_logger: PacketCapture | None = None
+        if self.enable_packet_log and self.packet_capture:
+            self.packet_logger = self.packet_capture[0]
+            self.packet_logger.configure_monitor_mode("2G", self.packet_log_2g)
+            self.packet_logger.configure_monitor_mode("5G", self.packet_log_5g)
+
+        for ad in self.android_devices:
+            wutils.wifi_test_device_init(ad)
+            if self.country_code_file:
+                if isinstance(self.country_code_file, list):
+                    self.country_code_file = self.country_code_file[0]
+                if not os.path.isfile(self.country_code_file):
+                    self.country_code_file = os.path.join(
+                        self.user_params[Config.key_config_path.value],
+                        self.country_code_file,
+                    )
+                self.country_code = utils.load_config(self.country_code_file)["country"]
+            else:
+                self.country_code = WifiEnums.CountryCode.US
+            wutils.set_wifi_country_code(ad, self.country_code)
+
+    def setup_test(self) -> None:
+        if self.android_devices and self.cnss_diag_file and self.pixel_models:
+            wutils.start_cnss_diags(
+                self.android_devices, self.cnss_diag_file, self.pixel_models
+            )
+        self.tcpdump_proc = []
+        for ad in self.android_devices:
+            proc = nutils.start_tcpdump(ad, self.current_test_info.name)
+            self.tcpdump_proc.append((ad, proc))
+        if self.packet_logger:
+            self.packet_log_pid = wutils.start_pcap(
+                self.packet_logger, "dual", self.current_test_info.name
+            )
+
+    def teardown_test(self) -> None:
+        if self.android_devices and self.cnss_diag_file and self.pixel_models:
+            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
+            for proc in self.tcpdump_proc:
+                nutils.stop_tcpdump(
+                    proc[0], proc[1], self.current_test_info.name, pull_dump=False
+                )
+            self.tcpdump_proc = []
+        if self.packet_logger and self.packet_log_pid:
+            wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=True)
+            self.packet_log_pid = {}
+
+    def teardown_class(self) -> None:
+        super().teardown_class()
+        if hasattr(self, "fuchsia_devices"):
+            for device in self.fuchsia_devices:
+                device.take_bug_report()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        """A function that is executed upon a test failure.
+
+        Args:
+        record: A copy of the test record for this test, containing all information of
+            the test execution including exception objects.
+        """
+        if self.android_devices:
+            for ad in self.android_devices:
+                ad.take_bug_report(record.test_name, record.begin_time)
+                ad.cat_adb_log(record.test_name, record.begin_time)
+                wutils.get_ssrdumps(ad)
+            if self.cnss_diag_file and self.pixel_models:
+                wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
+                for ad in self.android_devices:
+                    wutils.get_cnss_diag_log(ad)
+            for proc in self.tcpdump_proc:
+                nutils.stop_tcpdump(proc[0], proc[1], record.test_name)
+            self.tcpdump_proc = []
+        if self.packet_logger and self.packet_log_pid:
+            wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=False)
+            self.packet_log_pid = {}
+
+        # Gets a wlan_device log and calls the generic device fail on DUT.
+        for fd in self.fuchsia_devices:
+            self.on_device_fail(fd, record)
+
+    def on_device_fail(self, device: FuchsiaDevice, _: TestResultRecord) -> None:
+        """Gets a generic device DUT bug report.
+
+        This method takes a bug report if the device has the
+        'take_bug_report_on_fail' config value, and if the flag is true. This
+        method also power cycles if 'hard_reboot_on_fail' is True.
+
+        Args:
+            device: Generic device to gather logs from.
+            record: More information about the test.
+        """
+        if (
+            not hasattr(device, "take_bug_report_on_fail")
+            or device.take_bug_report_on_fail
+        ):
+            device.take_bug_report()
+
+        if hasattr(device, "hard_reboot_on_fail") and device.hard_reboot_on_fail:
+            device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices)
+
+    def get_dut(self, association_mode: AssociationMode) -> SupportsWLAN:
+        """Get the DUT based on user_params, default to Fuchsia."""
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            return self.get_dut_type(FuchsiaDevice, association_mode)[1]
+        elif device_type == "android_devices":
+            return self.get_dut_type(FuchsiaDevice, association_mode)[1]
+        else:
+            raise signals.TestAbortClass(
+                f'Invalid "dut" type specified in config: "{device_type}". '
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
+    def get_dut_type(
+        self, device_type: type[_T], association_mode: AssociationMode
+    ) -> tuple[_T, SupportsWLAN]:
+        if device_type is FuchsiaDevice:
+            if len(self.fuchsia_devices) == 0:
+                raise signals.TestAbortClass("Requires at least one Fuchsia device")
+            fd = self.fuchsia_devices[0]
+            assert isinstance(fd, device_type)
+            return fd, FuchsiaWlanDevice(fd, association_mode)
+
+        if device_type is AndroidDevice:
+            if len(self.android_devices) == 0:
+                raise signals.TestAbortClass("Requires at least one Android device")
+            ad = self.android_devices[0]
+            assert isinstance(ad, device_type)
+            return ad, AndroidWlanDevice(ad)
+
+        raise signals.TestAbortClass(
+            f"Invalid device_type specified: {device_type.__name__}. "
+            "Expected FuchsiaDevice or AndroidDevice."
+        )
+
+    def download_logs(self) -> None:
+        """Downloads the DHCP and hostapad logs from the access_point.
+
+        Using the current TestClassContext and TestCaseContext this method pulls
+        the DHCP and hostapd logs and outputs them to the correct path.
+        """
+        current_path = context.get_current_context().get_full_output_path()
+        self._download_ap_logs(current_path)
+        self._download_iperf_logs(current_path)
+
+    def _download_ap_logs(self, path: str) -> None:
+        for access_point in self.access_points:
+            dhcp_log = access_point.get_dhcp_logs()
+            if dhcp_log:
+                dhcp_log_path = os.path.join(
+                    path, f"{access_point.identifier}_dhcp_log.txt"
+                )
+                with open(dhcp_log_path, "a") as f:
+                    f.write(dhcp_log)
+
+            hostapd_logs = access_point.get_hostapd_logs()
+            for interface in hostapd_logs:
+                hostapd_log_path = os.path.join(
+                    path,
+                    f"{access_point.identifier}_hostapd_log_{interface}.txt",
+                )
+                with open(hostapd_log_path, "a") as f:
+                    f.write(hostapd_logs[interface])
+
+            radvd_log = access_point.get_radvd_logs()
+            if radvd_log:
+                radvd_log_path = os.path.join(
+                    path, f"{access_point.identifier}_radvd_log.txt"
+                )
+                with open(radvd_log_path, "a") as f:
+                    f.write(radvd_log)
+
+            systemd_journal = access_point.get_systemd_journal()
+            systemd_journal_path = os.path.join(
+                path, f"{access_point.identifier}_systemd_journal.txt"
+            )
+            with open(systemd_journal_path, "a") as f:
+                f.write(systemd_journal)
+
+    def _download_iperf_logs(self, path: str) -> None:
+        for iperf_server in self.iperf_servers:
+            iperf_systemd_journal = iperf_server.get_systemd_journal()
+            if iperf_systemd_journal:
+                iperf_systemd_journal_path = os.path.join(
+                    path, f"{iperf_server.hostname}_systemd_journal.txt"
+                )
+                with open(iperf_systemd_journal_path, "a") as f:
+                    f.write(iperf_systemd_journal)
+
+    def get_psk_network(
+        self,
+        mirror_ap: bool,
+        reference_networks: list[NetworkList],
+        hidden: bool = False,
+        same_ssid: bool = False,
+        security_mode: SecurityMode = SecurityMode.WPA2,
+        ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+    ) -> NetworkList:
+        """Generates SSID and passphrase for a WPA2 network using random
+        generator.
+
+        Args:
+            mirror_ap: Determines if both APs use the same hostapd config or
+                different configs.
+            reference_networks: PSK networks.
+            same_ssid: Determines if both bands on AP use the same SSID.
+            ssid_length_2g: Number of characters to use for 2G SSID.
+            ssid_length_5g: Number of characters to use for 5G SSID.
+            passphrase_length_2g: Length of password for 2G network.
+            passphrase_length_5g: Length of password for 5G network.
+
+        Returns: A dict of 2G and 5G network lists for hostapd configuration.
+        """
+        if same_ssid:
+            ref_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
+            ref_5g_ssid = ref_2g_ssid
+
+            ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
+            ref_5g_passphrase = ref_2g_passphrase
+
+        else:
+            ref_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
+            ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
+
+            ref_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
+            ref_5g_passphrase = utils.rand_ascii_str(passphrase_length_5g)
+
+        network_dict_2g = Network(
+            SSID=ref_2g_ssid,
+            security=security_mode,
+            password=ref_2g_passphrase,
+            hiddenSSID=hidden,
+            wepKeys=None,
+            ieee80211w=None,
+        )
+
+        network_dict_5g = Network(
+            SSID=ref_5g_ssid,
+            security=security_mode,
+            password=ref_5g_passphrase,
+            hiddenSSID=hidden,
+            wepKeys=None,
+            ieee80211w=None,
+        )
+
+        for _ in range(MAX_AP_COUNT):
+            reference_networks.append(
+                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
+            )
+            if not mirror_ap:
+                break
+        return {"2g": network_dict_2g, "5g": network_dict_5g}
+
+    def get_open_network(
+        self,
+        mirror_ap: bool,
+        open_network: list[NetworkList],
+        hidden: bool = False,
+        same_ssid: bool = False,
+        ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
+        security_mode: SecurityMode = SecurityMode.OPEN,
+    ) -> NetworkList:
+        """Generates SSIDs for a open network using a random generator.
+
+        Args:
+            mirror_ap: Boolean, determines if both APs use the same hostapd
+                       config or different configs.
+            open_network: List of open networks.
+            same_ssid: Boolean, determines if both bands on AP use the same
+                       SSID.
+            ssid_length_2g: Int, number of characters to use for 2G SSID.
+            ssid_length_5g: Int, number of characters to use for 5G SSID.
+            security_mode: 'none' for open and 'OWE' for WPA3 OWE.
+
+        Returns: A dict of 2G and 5G network lists for hostapd configuration.
+
+        """
+        if same_ssid:
+            open_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
+            open_5g_ssid = open_2g_ssid
+        else:
+            open_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
+            open_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
+
+        network_dict_2g = Network(
+            SSID=open_2g_ssid,
+            security=security_mode,
+            password=None,
+            hiddenSSID=hidden,
+            wepKeys=None,
+            ieee80211w=None,
+        )
+
+        network_dict_5g = Network(
+            SSID=open_5g_ssid,
+            security=security_mode,
+            password=None,
+            hiddenSSID=hidden,
+            wepKeys=None,
+            ieee80211w=None,
+        )
+
+        for _ in range(MAX_AP_COUNT):
+            open_network.append(
+                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
+            )
+            if not mirror_ap:
+                break
+        return {"2g": network_dict_2g, "5g": network_dict_5g}
+
+    def get_wep_network(
+        self,
+        mirror_ap: bool,
+        networks: list[NetworkList],
+        hidden: bool = False,
+        same_ssid: bool = False,
+        ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+    ) -> NetworkList:
+        """Generates SSID and passphrase for a WEP network using random
+        generator.
+
+        Args:
+            mirror_ap: Determines if both APs use the same hostapd config or
+                different configs.
+            networks: List of WEP networks.
+            same_ssid: Determines if both bands on AP use the same SSID.
+            ssid_length_2g: Number of characters to use for 2G SSID.
+            ssid_length_5g: Number of characters to use for 5G SSID.
+            passphrase_length_2g: Length of password for 2G network.
+            passphrase_length_5g: Length of password for 5G network.
+
+        Returns: A dict of 2G and 5G network lists for hostapd configuration.
+
+        """
+        if same_ssid:
+            ref_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
+            ref_5g_ssid = ref_2g_ssid
+
+            ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
+            ref_5g_passphrase = ref_2g_passphrase
+
+        else:
+            ref_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
+            ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
+
+            ref_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
+            ref_5g_passphrase = utils.rand_hex_str(passphrase_length_5g)
+
+        network_dict_2g = Network(
+            SSID=ref_2g_ssid,
+            security=SecurityMode.WEP,
+            password=None,
+            hiddenSSID=hidden,
+            wepKeys=[ref_2g_passphrase] * 4,
+            ieee80211w=None,
+        )
+
+        network_dict_5g = Network(
+            SSID=ref_5g_ssid,
+            security=SecurityMode.WEP,
+            password=None,
+            hiddenSSID=hidden,
+            wepKeys=[ref_5g_passphrase] * 4,
+            ieee80211w=None,
+        )
+
+        for _ in range(MAX_AP_COUNT):
+            networks.append(
+                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
+            )
+            if not mirror_ap:
+                break
+        return {"2g": network_dict_2g, "5g": network_dict_5g}
+
+    def configure_openwrt_ap_and_start(
+        self,
+        channel_5g: int = hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+        channel_2g: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+        channel_5g_ap2: int | None = None,
+        channel_2g_ap2: int | None = None,
+        ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
+        passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+        mirror_ap: bool = False,
+        hidden: bool = False,
+        same_ssid: bool = False,
+        open_network: bool = False,
+        wpa1_network: bool = False,
+        wpa_network: bool = False,
+        wep_network: bool = False,
+        ent_network: bool = False,
+        ent_network_pwd: bool = False,
+        owe_network: bool = False,
+        sae_network: bool = False,
+        saemixed_network: bool = False,
+        radius_conf_2g: dict[str, Any] | None = None,
+        radius_conf_5g: dict[str, Any] | None = None,
+        radius_conf_pwd: dict[str, Any] | None = None,
+        ap_count: int = 1,
+        ieee80211w: int | None = None,
+    ) -> None:
+        """Create, configure and start OpenWrt AP.
+
+        Args:
+            channel_5g: 5G channel to configure.
+            channel_2g: 2G channel to configure.
+            channel_5g_ap2: 5G channel to configure on AP2.
+            channel_2g_ap2: 2G channel to configure on AP2.
+            ssid_length_2g: Int, number of characters to use for 2G SSID.
+            passphrase_length_2g: Int, length of password for 2G network.
+            ssid_length_5g: Int, number of characters to use for 5G SSID.
+            passphrase_length_5g: Int, length of password for 5G network.
+            same_ssid: Boolean, determines if both bands on AP use the same SSID.
+            open_network: Boolean, to check if open network should be configured.
+            wpa_network: Boolean, to check if wpa network should be configured.
+            wep_network: Boolean, to check if wep network should be configured.
+            ent_network: Boolean, to check if ent network should be configured.
+            ent_network_pwd: Boolean, to check if ent pwd network should be configured.
+            owe_network: Boolean, to check if owe network should be configured.
+            sae_network: Boolean, to check if sae network should be configured.
+            saemixed_network: Boolean, to check if saemixed network should be configured.
+            radius_conf_2g: dictionary with enterprise radius server details.
+            radius_conf_5g: dictionary with enterprise radius server details.
+            radius_conf_pwd: dictionary with enterprise radiuse server details.
+            ap_count: APs to configure.
+            ieee80211w:PMF to configure
+        """
+        if mirror_ap and ap_count == 1:
+            raise ValueError("ap_count cannot be 1 if mirror_ap is True.")
+        if (channel_5g_ap2 or channel_2g_ap2) and ap_count == 1:
+            raise ValueError("ap_count cannot be 1 if channels of AP2 are provided.")
+        # we are creating a channel list for 2G and 5G bands. The list is of
+        # size 2 and this is based on the assumption that each testbed will have
+        # at most 2 APs.
+        if not channel_5g_ap2:
+            channel_5g_ap2 = channel_5g
+        if not channel_2g_ap2:
+            channel_2g_ap2 = channel_2g
+        channels_2g = [channel_2g, channel_2g_ap2]
+        channels_5g = [channel_5g, channel_5g_ap2]
+
+        if radius_conf_2g is None:
+            radius_conf_2g = {}
+        if radius_conf_5g is None:
+            radius_conf_5g = {}
+        if radius_conf_pwd is None:
+            radius_conf_pwd = {}
+
+        self.bssid_map: list[BSSIDMap] = []
+        for i in range(ap_count):
+            configs: list[WirelessConfig] = []
+
+            num_2g: int = 1
+            num_5g: int = 1
+
+            if wpa1_network:
+                networks = self.get_psk_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    SecurityMode.WPA,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    passphrase_length_2g,
+                    passphrase_length_5g,
+                )
+
+                def add_config(name, band):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.PSK,
+                            band=band,
+                            password=networks[band]["password"],
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=ieee80211w,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if wpa_network:
+                networks = self.get_psk_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    SecurityMode.WPA2,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    passphrase_length_2g,
+                    passphrase_length_5g,
+                )
+
+                def add_config(name, band):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.PSK2,
+                            band=band,
+                            password=networks[band]["password"],
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=ieee80211w,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if wep_network:
+                networks = self.get_wep_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
+
+                def add_config(name, band):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.WEP,
+                            band=band,
+                            wep_key=networks[band]["wepKeys"],
+                            hidden=networks[band]["hiddenSSID"],
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if ent_network:
+                networks = self.get_open_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    SecurityMode.WPA2,
+                )
+
+                def add_config_with_radius(name, band, radius_conf):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.WPA2,
+                            band=band,
+                            radius_server_ip=radius_conf.get("radius_server_ip"),
+                            radius_server_port=radius_conf.get("radius_server_port"),
+                            radius_server_secret=radius_conf.get(
+                                "radius_server_secret"
+                            ),
+                            hidden=networks[band]["hiddenSSID"],
+                        )
+                    )
+
+                add_config_with_radius(
+                    f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G, radius_conf_2g
+                )
+                add_config_with_radius(
+                    f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G, radius_conf_5g
+                )
+                num_2g += 1
+                num_5g += 1
+            if ent_network_pwd:
+                networks = self.get_open_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    SecurityMode.WPA2,
+                )
+
+                radius_conf = {} if radius_conf_pwd is None else radius_conf_pwd
+
+                def add_config(name, band):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.WPA2,
+                            band=band,
+                            radius_server_ip=radius_conf.get("radius_server_ip"),
+                            radius_server_port=radius_conf.get("radius_server_port"),
+                            radius_server_secret=radius_conf.get(
+                                "radius_server_secret"
+                            ),
+                            hidden=networks[band]["hiddenSSID"],
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if open_network:
+                networks = self.get_open_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
+
+                def add_config(name, band):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.NONE,
+                            band=band,
+                            hidden=networks[band]["hiddenSSID"],
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if owe_network:
+                networks = self.get_open_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
+
+                def add_config(name, band):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.OWE,
+                            band=band,
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if sae_network:
+                networks = self.get_psk_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g=ssid_length_2g,
+                    ssid_length_5g=ssid_length_5g,
+                    passphrase_length_2g=passphrase_length_2g,
+                    passphrase_length_5g=passphrase_length_5g,
+                )
+
+                def add_config(name, band):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.SAE,
+                            band=band,
+                            password=networks[band]["password"],
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if saemixed_network:
+                networks = self.get_psk_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g=ssid_length_2g,
+                    ssid_length_5g=ssid_length_5g,
+                    passphrase_length_2g=passphrase_length_2g,
+                    passphrase_length_5g=passphrase_length_5g,
+                )
+
+                def add_config(name, band):
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.SAE_MIXED,
+                            band=band,
+                            password=networks[band]["password"],
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=ieee80211w,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+
+            openwrt_ap = self.openwrt_aps[i]
+            openwrt_ap.configure_ap(configs, channels_2g[i], channels_5g[i])
+            openwrt_ap.start_ap()
+            self.bssid_map.append(openwrt_ap.get_bssids_for_wifi_networks())
+
+            if mirror_ap:
+                openwrt_ap_mirror = self.openwrt_aps[i + 1]
+                openwrt_ap_mirror.configure_ap(
+                    configs, channels_2g[i + 1], channels_5g[i + 1]
+                )
+                openwrt_ap_mirror.start_ap()
+                self.bssid_map.append(openwrt_ap_mirror.get_bssids_for_wifi_networks())
+                break
diff --git a/packages/antlion/test_utils/wifi/wifi_constants.py b/packages/antlion/test_utils/wifi/wifi_constants.py
new file mode 100644
index 0000000..a348f81
--- /dev/null
+++ b/packages/antlion/test_utils/wifi/wifi_constants.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Constants for Wifi related events.
+WIFI_CONNECTED = "WifiNetworkConnected"
+WIFI_DISCONNECTED = "WifiNetworkDisconnected"
+SUPPLICANT_CON_CHANGED = "SupplicantConnectionChanged"
+WIFI_STATE_CHANGED = "WifiStateChanged"
+WIFI_FORGET_NW_SUCCESS = "WifiManagerForgetNetworkOnSuccess"
+WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH = "WifiManagerNetworkRequestMatchCallbackOnMatch"
+WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_SUCCESS = (
+    "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectSuccess"
+)
+WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_FAILURE = (
+    "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectFailure"
+)
+WIFI_NETWORK_CB_ON_AVAILABLE = "WifiManagerNetworkCallbackOnAvailable"
+WIFI_NETWORK_CB_ON_UNAVAILABLE = "WifiManagerNetworkCallbackOnUnavailable"
+WIFI_NETWORK_CB_ON_LOST = "WifiManagerNetworkCallbackOnLost"
+WIFI_NETWORK_SUGGESTION_POST_CONNECTION = "WifiNetworkSuggestionPostConnection"
+WIFI_SUBSYSTEM_RESTARTING = "WifiSubsystemRestarting"
+WIFI_SUBSYSTEM_RESTARTED = "WifiSubsystemRestarted"
+
+# These constants will be used by the ACTS wifi tests.
+CONNECT_BY_CONFIG_SUCCESS = "WifiManagerConnectByConfigOnSuccess"
+CONNECT_BY_NETID_SUCCESS = "WifiManagerConnectByNetIdOnSuccess"
+
+# Softap related constants
+SOFTAP_CALLBACK_EVENT = "WifiManagerSoftApCallback-"
+# Callback Event for softap state change
+# WifiManagerSoftApCallback-[callbackId]-OnStateChanged
+SOFTAP_STATE_CHANGED = "-OnStateChanged"
+SOFTAP_STATE_CHANGE_CALLBACK_KEY = "State"
+WIFI_AP_DISABLING_STATE = 10
+WIFI_AP_DISABLED_STATE = 11
+WIFI_AP_ENABLING_STATE = 12
+WIFI_AP_ENABLED_STATE = 13
+WIFI_AP_FAILED_STATE = 14
+
+SOFTAP_RANDOMIZATION_NONE = 0
+SOFTAP_RANDOMIZATION_PERSISTENT = 1
+
+# Callback Event for client number change:
+# WifiManagerSoftApCallback-[callbackId]-OnNumClientsChanged
+SOFTAP_NUMBER_CLIENTS_CHANGED_WITH_INFO = "-OnConnectedClientsChangedWithInfo"
+SOFTAP_NUMBER_CLIENTS_CHANGED = "-OnNumClientsChanged"
+SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY = "NumClients"
+SOFTAP_CLIENTS_MACS_CALLBACK_KEY = "MacAddresses"
+# Callback Event for softap info change
+SOFTAP_INFO_CHANGED = "-OnInfoChanged"
+SOFTAP_INFOLIST_CHANGED = "-OnInfoListChanged"
+SOFTAP_INFO_FREQUENCY_CALLBACK_KEY = "frequency"
+SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY = "bandwidth"
+SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY = "wifiStandard"
+SOFTAP_INFO_AUTO_SHUTDOWN_CALLBACK_KEY = "autoShutdownTimeoutMillis"
+SOFTAP_INFO_BSSID_CALLBACK_KEY = "bssid"
+# Callback Event for softap client blocking
+SOFTAP_BLOCKING_CLIENT_CONNECTING = "-OnBlockedClientConnecting"
+SOFTAP_BLOCKING_CLIENT_REASON_KEY = "BlockedReason"
+SOFTAP_BLOCKING_CLIENT_WIFICLIENT_KEY = "WifiClient"
+SAP_CLIENT_BLOCK_REASON_CODE_BLOCKED_BY_USER = 0
+SAP_CLIENT_BLOCK_REASON_CODE_NO_MORE_STAS = 1
+
+# Callback Event for softap capability
+SOFTAP_CAPABILITY_CHANGED = "-OnCapabilityChanged"
+SOFTAP_CAPABILITY_MAX_SUPPORTED_CLIENTS = "maxSupportedClients"
+SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST = "supported2GHzChannellist"
+SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST = "supported5GHzChannellist"
+SOFTAP_CAPABILITY_6GHZ_SUPPORTED_CHANNEL_LIST = "supported6GHzChannellist"
+SOFTAP_CAPABILITY_60GHZ_SUPPORTED_CHANNEL_LIST = "supported60GHzChannellist"
+SOFTAP_CAPABILITY_FEATURE_ACS = "acsOffloadSupported"
+SOFTAP_CAPABILITY_FEATURE_CLIENT_CONTROL = "clientForceDisconnectSupported"
+SOFTAP_CAPABILITY_FEATURE_WPA3_SAE = "wpa3SaeSupported"
+SOFTAP_CAPABILITY_FEATURE_IEEE80211AX = "ieee80211axSupported"
+SOFTAP_CAPABILITY_FEATURE_24GHZ = "24gSupported"
+SOFTAP_CAPABILITY_FEATURE_5GHZ = "5gSupported"
+SOFTAP_CAPABILITY_FEATURE_6GHZ = "6gSupported"
+SOFTAP_CAPABILITY_FEATURE_60GHZ = "60gSupported"
+
+DEFAULT_SOFTAP_TIMEOUT_S = 600  # 10 minutes
+
+# AP related constants
+AP_MAIN = "main_AP"
+AP_AUX = "aux_AP"
+SSID = "SSID"
+
+# cnss_diag property related constants
+DEVICES_USING_LEGACY_PROP = ["sailfish", "marlin", "walleye", "taimen", "muskie"]
+CNSS_DIAG_PROP = "persist.vendor.sys.cnss.diag_txt"
+LEGACY_CNSS_DIAG_PROP = "persist.sys.cnss.diag_txt"
+
+# Delay before registering the match callback.
+NETWORK_REQUEST_CB_REGISTER_DELAY_SEC = 2
+
+# Constants for JSONObject representation of CoexUnsafeChannel
+COEX_BAND = "band"
+COEX_BAND_24_GHZ = "24_GHZ"
+COEX_BAND_5_GHZ = "5_GHZ"
+COEX_CHANNEL = "channel"
+COEX_POWER_CAP_DBM = "powerCapDbm"
+
+# Constants for bundle keys for CoexCallback#onCoexUnsafeChannelsChanged
+KEY_COEX_UNSAFE_CHANNELS = "KEY_COEX_UNSAFE_CHANNELS"
+KEY_COEX_RESTRICTIONS = "KEY_COEX_RESTRICTIONS"
+
+# WiFi standards
+WIFI_STANDARD_11AX = 6
diff --git a/packages/antlion/test_utils/wifi/wifi_test_utils.py b/packages/antlion/test_utils/wifi/wifi_test_utils.py
new file mode 100755
index 0000000..9928001
--- /dev/null
+++ b/packages/antlion/test_utils/wifi/wifi_test_utils.py
@@ -0,0 +1,1074 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import shutil
+import time
+from enum import IntEnum
+from queue import Empty
+
+from mobly import asserts, signals
+
+from antlion import context, utils
+from antlion.controllers.ap_lib.hostapd_constants import BAND_2G, BAND_5G
+from antlion.test_utils.wifi import wifi_constants
+
+# Default timeout used for reboot, toggle WiFi and Airplane mode,
+# for the system to settle down after the operation.
+DEFAULT_TIMEOUT = 10
+# Number of seconds to wait for events that are supposed to happen quickly.
+# Like onSuccess for start background scan and confirmation on wifi state
+# change.
+SHORT_TIMEOUT = 30
+ROAMING_TIMEOUT = 30
+WIFI_CONNECTION_TIMEOUT_DEFAULT = 30
+DEFAULT_SCAN_TRIES = 3
+DEFAULT_CONNECT_TRIES = 3
+# Speed of light in m/s.
+SPEED_OF_LIGHT = 299792458
+
+DEFAULT_PING_ADDR = "https://www.google.com/robots.txt"
+
+CNSS_DIAG_CONFIG_PATH = "/data/vendor/wifi/cnss_diag/"
+CNSS_DIAG_CONFIG_FILE = "cnss_diag.conf"
+
+ROAMING_ATTN = {
+    "AP1_on_AP2_off": [0, 0, 95, 95],
+    "AP1_off_AP2_on": [95, 95, 0, 0],
+    "default": [0, 0, 0, 0],
+}
+
+
+class WifiEnums:
+    SSID_KEY = "SSID"  # Used for Wifi & SoftAp
+    SSID_PATTERN_KEY = "ssidPattern"
+    NETID_KEY = "network_id"
+    BSSID_KEY = "BSSID"  # Used for Wifi & SoftAp
+    BSSID_PATTERN_KEY = "bssidPattern"
+    PWD_KEY = "password"  # Used for Wifi & SoftAp
+    frequency_key = "frequency"
+    HIDDEN_KEY = "hiddenSSID"  # Used for Wifi & SoftAp
+    IS_APP_INTERACTION_REQUIRED = "isAppInteractionRequired"
+    IS_USER_INTERACTION_REQUIRED = "isUserInteractionRequired"
+    IS_SUGGESTION_METERED = "isMetered"
+    PRIORITY = "priority"
+    SECURITY = "security"  # Used for Wifi & SoftAp
+
+    # Used for SoftAp
+    AP_BAND_KEY = "apBand"
+    AP_CHANNEL_KEY = "apChannel"
+    AP_BANDS_KEY = "apBands"
+    AP_CHANNEL_FREQUENCYS_KEY = "apChannelFrequencies"
+    AP_MAC_RANDOMIZATION_SETTING_KEY = "MacRandomizationSetting"
+    AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY = (
+        "BridgedModeOpportunisticShutdownEnabled"
+    )
+    AP_IEEE80211AX_ENABLED_KEY = "Ieee80211axEnabled"
+    AP_MAXCLIENTS_KEY = "MaxNumberOfClients"
+    AP_SHUTDOWNTIMEOUT_KEY = "ShutdownTimeoutMillis"
+    AP_SHUTDOWNTIMEOUTENABLE_KEY = "AutoShutdownEnabled"
+    AP_CLIENTCONTROL_KEY = "ClientControlByUserEnabled"
+    AP_ALLOWEDLIST_KEY = "AllowedClientList"
+    AP_BLOCKEDLIST_KEY = "BlockedClientList"
+
+    WIFI_CONFIG_SOFTAP_BAND_2G = 1
+    WIFI_CONFIG_SOFTAP_BAND_5G = 2
+    WIFI_CONFIG_SOFTAP_BAND_2G_5G = 3
+    WIFI_CONFIG_SOFTAP_BAND_6G = 4
+    WIFI_CONFIG_SOFTAP_BAND_2G_6G = 5
+    WIFI_CONFIG_SOFTAP_BAND_5G_6G = 6
+    WIFI_CONFIG_SOFTAP_BAND_ANY = 7
+
+    # DO NOT USE IT for new test case! Replaced by WIFI_CONFIG_SOFTAP_BAND_
+    WIFI_CONFIG_APBAND_2G = WIFI_CONFIG_SOFTAP_BAND_2G
+    WIFI_CONFIG_APBAND_5G = WIFI_CONFIG_SOFTAP_BAND_5G
+    WIFI_CONFIG_APBAND_AUTO = WIFI_CONFIG_SOFTAP_BAND_2G_5G
+
+    WIFI_CONFIG_APBAND_2G_OLD = 0
+    WIFI_CONFIG_APBAND_5G_OLD = 1
+    WIFI_CONFIG_APBAND_AUTO_OLD = -1
+
+    WIFI_WPS_INFO_PBC = 0
+    WIFI_WPS_INFO_DISPLAY = 1
+    WIFI_WPS_INFO_KEYPAD = 2
+    WIFI_WPS_INFO_LABEL = 3
+    WIFI_WPS_INFO_INVALID = 4
+
+    class CountryCode:
+        AUSTRALIA = "AU"
+        CHINA = "CN"
+        GERMANY = "DE"
+        JAPAN = "JP"
+        UK = "GB"
+        US = "US"
+        UNKNOWN = "UNKNOWN"
+
+    # Start of Macros for EAP
+    # EAP types
+    class Eap(IntEnum):
+        NONE = -1
+        PEAP = 0
+        TLS = 1
+        TTLS = 2
+        PWD = 3
+        SIM = 4
+        AKA = 5
+        AKA_PRIME = 6
+        UNAUTH_TLS = 7
+
+    # EAP Phase2 types
+    class EapPhase2(IntEnum):
+        NONE = 0
+        PAP = 1
+        MSCHAP = 2
+        MSCHAPV2 = 3
+        GTC = 4
+
+    class Enterprise:
+        # Enterprise Config Macros
+        EMPTY_VALUE = "NULL"
+        EAP = "eap"
+        PHASE2 = "phase2"
+        IDENTITY = "identity"
+        ANON_IDENTITY = "anonymous_identity"
+        PASSWORD = "password"
+        SUBJECT_MATCH = "subject_match"
+        ALTSUBJECT_MATCH = "altsubject_match"
+        DOM_SUFFIX_MATCH = "domain_suffix_match"
+        CLIENT_CERT = "client_cert"
+        CA_CERT = "ca_cert"
+        ENGINE = "engine"
+        ENGINE_ID = "engine_id"
+        PRIVATE_KEY_ID = "key_id"
+        REALM = "realm"
+        PLMN = "plmn"
+        FQDN = "FQDN"
+        FRIENDLY_NAME = "providerFriendlyName"
+        ROAMING_IDS = "roamingConsortiumIds"
+        OCSP = "ocsp"
+
+    # End of Macros for EAP
+
+    # Macros as specified in the WifiScanner code.
+    WIFI_BAND_UNSPECIFIED = 0  # not specified
+    WIFI_BAND_24_GHZ = 1  # 2.4 GHz band
+    WIFI_BAND_5_GHZ = 2  # 5 GHz band without DFS channels
+    WIFI_BAND_5_GHZ_DFS_ONLY = 4  # 5 GHz band with DFS channels
+    WIFI_BAND_5_GHZ_WITH_DFS = 6  # 5 GHz band with DFS channels
+    WIFI_BAND_BOTH = 3  # both bands without DFS channels
+    WIFI_BAND_BOTH_WITH_DFS = 7  # both bands with DFS channels
+
+    SCAN_TYPE_LOW_LATENCY = 0
+    SCAN_TYPE_LOW_POWER = 1
+    SCAN_TYPE_HIGH_ACCURACY = 2
+
+    # US Wifi frequencies
+    ALL_2G_FREQUENCIES = [
+        2412,
+        2417,
+        2422,
+        2427,
+        2432,
+        2437,
+        2442,
+        2447,
+        2452,
+        2457,
+        2462,
+    ]
+    DFS_5G_FREQUENCIES = [
+        5260,
+        5280,
+        5300,
+        5320,
+        5500,
+        5520,
+        5540,
+        5560,
+        5580,
+        5600,
+        5620,
+        5640,
+        5660,
+        5680,
+        5700,
+        5720,
+    ]
+    NONE_DFS_5G_FREQUENCIES = [5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825]
+    ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES
+
+    band_to_frequencies = {
+        WIFI_BAND_24_GHZ: ALL_2G_FREQUENCIES,
+        WIFI_BAND_5_GHZ: NONE_DFS_5G_FREQUENCIES,
+        WIFI_BAND_5_GHZ_DFS_ONLY: DFS_5G_FREQUENCIES,
+        WIFI_BAND_5_GHZ_WITH_DFS: ALL_5G_FREQUENCIES,
+        WIFI_BAND_BOTH: ALL_2G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES,
+        WIFI_BAND_BOTH_WITH_DFS: ALL_5G_FREQUENCIES + ALL_2G_FREQUENCIES,
+    }
+
+    # TODO: add all of the band mapping.
+    softap_band_frequencies = {
+        WIFI_CONFIG_SOFTAP_BAND_2G: ALL_2G_FREQUENCIES,
+        WIFI_CONFIG_SOFTAP_BAND_5G: ALL_5G_FREQUENCIES,
+    }
+
+    # All Wifi frequencies to channels lookup.
+    freq_to_channel = {
+        2412: 1,
+        2417: 2,
+        2422: 3,
+        2427: 4,
+        2432: 5,
+        2437: 6,
+        2442: 7,
+        2447: 8,
+        2452: 9,
+        2457: 10,
+        2462: 11,
+        2467: 12,
+        2472: 13,
+        2484: 14,
+        4915: 183,
+        4920: 184,
+        4925: 185,
+        4935: 187,
+        4940: 188,
+        4945: 189,
+        4960: 192,
+        4980: 196,
+        5035: 7,
+        5040: 8,
+        5045: 9,
+        5055: 11,
+        5060: 12,
+        5080: 16,
+        5170: 34,
+        5180: 36,
+        5190: 38,
+        5200: 40,
+        5210: 42,
+        5220: 44,
+        5230: 46,
+        5240: 48,
+        5260: 52,
+        5280: 56,
+        5300: 60,
+        5320: 64,
+        5500: 100,
+        5520: 104,
+        5540: 108,
+        5560: 112,
+        5580: 116,
+        5600: 120,
+        5620: 124,
+        5640: 128,
+        5660: 132,
+        5680: 136,
+        5700: 140,
+        5745: 149,
+        5765: 153,
+        5785: 157,
+        5795: 159,
+        5805: 161,
+        5825: 165,
+    }
+
+    # All Wifi channels to frequencies lookup.
+    channel_2G_to_freq = {
+        1: 2412,
+        2: 2417,
+        3: 2422,
+        4: 2427,
+        5: 2432,
+        6: 2437,
+        7: 2442,
+        8: 2447,
+        9: 2452,
+        10: 2457,
+        11: 2462,
+        12: 2467,
+        13: 2472,
+        14: 2484,
+    }
+
+    channel_5G_to_freq = {
+        183: 4915,
+        184: 4920,
+        185: 4925,
+        187: 4935,
+        188: 4940,
+        189: 4945,
+        192: 4960,
+        196: 4980,
+        7: 5035,
+        8: 5040,
+        9: 5045,
+        11: 5055,
+        12: 5060,
+        16: 5080,
+        34: 5170,
+        36: 5180,
+        38: 5190,
+        40: 5200,
+        42: 5210,
+        44: 5220,
+        46: 5230,
+        48: 5240,
+        50: 5250,
+        52: 5260,
+        56: 5280,
+        60: 5300,
+        64: 5320,
+        100: 5500,
+        104: 5520,
+        108: 5540,
+        112: 5560,
+        116: 5580,
+        120: 5600,
+        124: 5620,
+        128: 5640,
+        132: 5660,
+        136: 5680,
+        140: 5700,
+        149: 5745,
+        151: 5755,
+        153: 5765,
+        155: 5775,
+        157: 5785,
+        159: 5795,
+        161: 5805,
+        165: 5825,
+    }
+
+    channel_6G_to_freq = {4 * x + 1: 5955 + 20 * x for x in range(59)}
+
+    channel_to_freq = {
+        "2G": channel_2G_to_freq,
+        "5G": channel_5G_to_freq,
+        "6G": channel_6G_to_freq,
+    }
+
+
+def _assert_on_fail_handler(func, assert_on_fail, *args, **kwargs):
+    """Wrapper function that handles the bahevior of assert_on_fail.
+
+    When assert_on_fail is True, let all test signals through, which can
+    terminate test cases directly. When assert_on_fail is False, the wrapper
+    raises no test signals and reports operation status by returning True or
+    False.
+
+    Args:
+        func: The function to wrap. This function reports operation status by
+              raising test signals.
+        assert_on_fail: A boolean that specifies if the output of the wrapper
+                        is test signal based or return value based.
+        args: Positional args for func.
+        kwargs: Name args for func.
+
+    Returns:
+        If assert_on_fail is True, returns True/False to signal operation
+        status, otherwise return nothing.
+    """
+    try:
+        func(*args, **kwargs)
+        if not assert_on_fail:
+            return True
+    except signals.TestSignal:
+        if assert_on_fail:
+            raise
+        return False
+
+
+def match_networks(target_params, networks):
+    """Finds the WiFi networks that match a given set of parameters in a list
+    of WiFi networks.
+
+    To be considered a match, the network should contain every key-value pair
+    of target_params
+
+    Args:
+        target_params: A dict with 1 or more key-value pairs representing a Wi-Fi network.
+                       E.g { 'SSID': 'wh_ap1_5g', 'BSSID': '30:b5:c2:33:e4:47' }
+        networks: A list of dict objects representing WiFi networks.
+
+    Returns:
+        The networks that match the target parameters.
+    """
+    results = []
+    asserts.assert_true(
+        target_params, "Expected networks object 'target_params' is empty"
+    )
+    for n in networks:
+        add_network = 1
+        for k, v in target_params.items():
+            if k not in n:
+                add_network = 0
+                break
+            if n[k] != v:
+                add_network = 0
+                break
+        if add_network:
+            results.append(n)
+    return results
+
+
+def wifi_toggle_state(ad, new_state=None, assert_on_fail=True):
+    """Toggles the state of wifi.
+
+    Args:
+        ad: An AndroidDevice object.
+        new_state: Wifi state to set to. If None, opposite of the current state.
+        assert_on_fail: If True, error checks in this function will raise test
+                        failure signals.
+
+    Returns:
+        If assert_on_fail is False, function returns True if the toggle was
+        successful, False otherwise. If assert_on_fail is True, no return value.
+    """
+    return _assert_on_fail_handler(
+        _wifi_toggle_state, assert_on_fail, ad, new_state=new_state
+    )
+
+
+def _wifi_toggle_state(ad, new_state=None):
+    """Toggles the state of wifi.
+
+    TestFailure signals are raised when something goes wrong.
+
+    Args:
+        ad: An AndroidDevice object.
+        new_state: The state to set Wi-Fi to. If None, opposite of the current
+                   state will be set.
+    """
+    if new_state is None:
+        new_state = not ad.droid.wifiCheckState()
+    elif new_state == ad.droid.wifiCheckState():
+        # Check if the new_state is already achieved, so we don't wait for the
+        # state change event by mistake.
+        return
+    ad.droid.wifiStartTrackingStateChange()
+    ad.log.info("Setting Wi-Fi state to %s.", new_state)
+    ad.ed.clear_all_events()
+    # Setting wifi state.
+    ad.droid.wifiToggleState(new_state)
+    time.sleep(2)
+    fail_msg = f"Failed to set Wi-Fi state to {new_state} on {ad.serial}."
+    try:
+        ad.ed.wait_for_event(
+            wifi_constants.WIFI_STATE_CHANGED,
+            lambda x: x["data"]["enabled"] == new_state,
+            SHORT_TIMEOUT,
+        )
+    except Empty:
+        asserts.assert_equal(new_state, ad.droid.wifiCheckState(), fail_msg)
+    finally:
+        ad.droid.wifiStopTrackingStateChange()
+
+
+def reset_wifi(ad):
+    """Clears all saved Wi-Fi networks on a device.
+
+    This will turn Wi-Fi on.
+
+    Args:
+        ad: An AndroidDevice object.
+
+    """
+    networks = ad.droid.wifiGetConfiguredNetworks()
+    if not networks:
+        return
+    removed = []
+    for n in networks:
+        if n["networkId"] not in removed:
+            ad.droid.wifiForgetNetwork(n["networkId"])
+            removed.append(n["networkId"])
+        else:
+            continue
+        try:
+            event = ad.ed.pop_event(
+                wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT
+            )
+        except Empty:
+            logging.warning("Could not confirm the removal of network %s.", n)
+    # Check again to see if there's any network left.
+    asserts.assert_true(
+        not ad.droid.wifiGetConfiguredNetworks(),
+        f"Failed to remove these configured Wi-Fi networks: {networks}",
+    )
+
+
+def wifi_test_device_init(ad, country_code=WifiEnums.CountryCode.US):
+    """Initializes an android device for wifi testing.
+
+    0. Make sure SL4A connection is established on the android device.
+    1. Disable location service's WiFi scan.
+    2. Turn WiFi on.
+    3. Clear all saved networks.
+    4. Set country code to US.
+    5. Enable WiFi verbose logging.
+    6. Sync device time with computer time.
+    7. Turn off cellular data.
+    8. Turn off ambient display.
+    """
+    utils.require_sl4a((ad,))
+    ad.droid.wifiScannerToggleAlwaysAvailable(False)
+    msg = "Failed to turn off location service's scan."
+    asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
+    wifi_toggle_state(ad, True)
+    reset_wifi(ad)
+    ad.droid.wifiEnableVerboseLogging(1)
+    msg = "Failed to enable WiFi verbose logging."
+    asserts.assert_equal(ad.droid.wifiGetVerboseLoggingLevel(), 1, msg)
+    # We don't verify the following settings since they are not critical.
+    # Set wpa_supplicant log level to EXCESSIVE.
+    output = ad.adb.shell(
+        "wpa_cli -i wlan0 -p -g@android:wpa_wlan0 IFNAME=" "wlan0 log_level EXCESSIVE",
+        ignore_status=True,
+    )
+    ad.log.info("wpa_supplicant log change status: %s", output)
+    utils.sync_device_time(ad)
+    ad.droid.telephonyToggleDataConnection(False)
+    set_wifi_country_code(ad, country_code)
+    utils.set_ambient_display(ad, False)
+
+
+def set_wifi_country_code(ad, country_code):
+    """Sets the wifi country code on the device.
+
+    Args:
+        ad: An AndroidDevice object.
+        country_code: 2 letter ISO country code
+
+    Raises:
+        An RpcException if unable to set the country code.
+    """
+    try:
+        ad.adb.shell(f"cmd wifi force-country-code enabled {country_code}")
+    except Exception as e:
+        ad.log.warn(
+            f"Failed to set country code to {country_code}; defaulting to US. Error: {e}"
+        )
+        ad.droid.wifiSetCountryCode(WifiEnums.CountryCode.US)
+
+
+def start_wifi_connection_scan_and_return_status(ad):
+    """
+    Starts a wifi connection scan and wait for results to become available
+    or a scan failure to be reported.
+
+    Args:
+        ad: An AndroidDevice object.
+    Returns:
+        True: if scan succeeded & results are available
+        False: if scan failed
+    """
+    ad.ed.clear_all_events()
+    ad.droid.wifiStartScan()
+    try:
+        events = ad.ed.pop_events("WifiManagerScan(ResultsAvailable|Failure)", 60)
+    except Empty:
+        asserts.fail("Wi-Fi scan results/failure did not become available within 60s.")
+    # If there are multiple matches, we check for atleast one success.
+    for event in events:
+        if event["name"] == "WifiManagerScanResultsAvailable":
+            return True
+        elif event["name"] == "WifiManagerScanFailure":
+            ad.log.debug("Scan failure received")
+    return False
+
+
+def start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries=3):
+    """
+    Start connectivity scans & checks if the |network_ssid| is seen in
+    scan results. The method performs a max of |max_tries| connectivity scans
+    to find the network.
+
+    Args:
+        ad: An AndroidDevice object.
+        network_ssid: SSID of the network we are looking for.
+        max_tries: Number of scans to try.
+    Returns:
+        True: if network_ssid is found in scan results.
+        False: if network_ssid is not found in scan results.
+    """
+    start_time = time.time()
+    for num_tries in range(max_tries):
+        if start_wifi_connection_scan_and_return_status(ad):
+            scan_results = ad.droid.wifiGetScanResults()
+            match_results = match_networks(
+                {WifiEnums.SSID_KEY: network_ssid}, scan_results
+            )
+            if len(match_results) > 0:
+                ad.log.debug(f"Found network in {time.time() - start_time} seconds.")
+                return True
+    ad.log.debug(f"Did not find network in {time.time() - start_time} seconds.")
+    return False
+
+
+def start_wifi_connection_scan_and_ensure_network_found(ad, network_ssid, max_tries=3):
+    """
+    Start connectivity scans & ensure the |network_ssid| is seen in
+    scan results. The method performs a max of |max_tries| connectivity scans
+    to find the network.
+    This method asserts on failure!
+
+    Args:
+        ad: An AndroidDevice object.
+        network_ssid: SSID of the network we are looking for.
+        max_tries: Number of scans to try.
+    """
+    ad.log.info("Starting scans to ensure %s is present", network_ssid)
+    assert_msg = (
+        f"Failed to find {network_ssid} in scan results after {str(max_tries)} tries"
+    )
+    asserts.assert_true(
+        start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries),
+        assert_msg,
+    )
+
+
+def start_wifi_connection_scan_and_ensure_network_not_found(
+    ad, network_ssid, max_tries=3
+):
+    """
+    Start connectivity scans & ensure the |network_ssid| is not seen in
+    scan results. The method performs a max of |max_tries| connectivity scans
+    to find the network.
+    This method asserts on failure!
+
+    Args:
+        ad: An AndroidDevice object.
+        network_ssid: SSID of the network we are looking for.
+        max_tries: Number of scans to try.
+    """
+    ad.log.info("Starting scans to ensure %s is not present", network_ssid)
+    assert_msg = f"Found {network_ssid} in scan results after {str(max_tries)} tries"
+    asserts.assert_false(
+        start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries),
+        assert_msg,
+    )
+
+
+def _wait_for_connect_event(ad, ssid=None, id=None, tries=1):
+    """Wait for a connect event on queue and pop when available.
+
+    Args:
+        ad: An Android device object.
+        ssid: SSID of the network to connect to.
+        id: Network Id of the network to connect to.
+        tries: An integer that is the number of times to try before failing.
+
+    Returns:
+        A dict with details of the connection data, which looks like this:
+        {
+         'time': 1485460337798,
+         'name': 'WifiNetworkConnected',
+         'data': {
+                  'rssi': -27,
+                  'is_24ghz': True,
+                  'mac_address': '02:00:00:00:00:00',
+                  'network_id': 1,
+                  'BSSID': '30:b5:c2:33:d3:fc',
+                  'ip_address': 117483712,
+                  'link_speed': 54,
+                  'supplicant_state': 'completed',
+                  'hidden_ssid': False,
+                  'SSID': 'wh_ap1_2g',
+                  'is_5ghz': False}
+        }
+
+    """
+    conn_result = None
+
+    # If ssid and network id is None, just wait for any connect event.
+    if id is None and ssid is None:
+        for i in range(tries):
+            try:
+                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
+                break
+            except Empty:
+                pass
+    else:
+        # If ssid or network id is specified, wait for specific connect event.
+        for i in range(tries):
+            try:
+                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
+                if id and conn_result["data"][WifiEnums.NETID_KEY] == id:
+                    break
+                elif ssid and conn_result["data"][WifiEnums.SSID_KEY] == ssid:
+                    break
+            except Empty:
+                pass
+
+    return conn_result
+
+
+def connect_to_wifi_network(
+    ad,
+    network,
+    assert_on_fail=True,
+    check_connectivity=True,
+    hidden=False,
+    num_of_scan_tries=DEFAULT_SCAN_TRIES,
+    num_of_connect_tries=DEFAULT_CONNECT_TRIES,
+):
+    """Connection logic for open and psk wifi networks.
+
+    Args:
+        ad: AndroidDevice to use for connection
+        network: network info of the network to connect to
+        assert_on_fail: If true, errors from wifi_connect will raise
+                        test failure signals.
+        hidden: Is the Wifi network hidden.
+        num_of_scan_tries: The number of times to try scan
+                           interface before declaring failure.
+        num_of_connect_tries: The number of times to try
+                              connect wifi before declaring failure.
+    """
+    if hidden:
+        start_wifi_connection_scan_and_ensure_network_not_found(
+            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
+        )
+    else:
+        start_wifi_connection_scan_and_ensure_network_found(
+            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
+        )
+    wifi_connect(
+        ad,
+        network,
+        num_of_tries=num_of_connect_tries,
+        assert_on_fail=assert_on_fail,
+        check_connectivity=check_connectivity,
+    )
+
+
+def wifi_connect(
+    ad, network, num_of_tries=1, assert_on_fail=True, check_connectivity=True
+):
+    """Connect an Android device to a wifi network.
+
+    Initiate connection to a wifi network, wait for the "connected" event, then
+    confirm the connected ssid is the one requested.
+
+    This will directly fail a test if anything goes wrong.
+
+    Args:
+        ad: android_device object to initiate connection on.
+        network: A dictionary representing the network to connect to. The
+                 dictionary must have the key "SSID".
+        num_of_tries: An integer that is the number of times to try before
+                      delaring failure. Default is 1.
+        assert_on_fail: If True, error checks in this function will raise test
+                        failure signals.
+
+    Returns:
+        Returns a value only if assert_on_fail is false.
+        Returns True if the connection was successful, False otherwise.
+    """
+    return _assert_on_fail_handler(
+        _wifi_connect,
+        assert_on_fail,
+        ad,
+        network,
+        num_of_tries=num_of_tries,
+        check_connectivity=check_connectivity,
+    )
+
+
+def _wifi_connect(ad, network, num_of_tries=1, check_connectivity=True):
+    """Connect an Android device to a wifi network.
+
+    Initiate connection to a wifi network, wait for the "connected" event, then
+    confirm the connected ssid is the one requested.
+
+    This will directly fail a test if anything goes wrong.
+
+    Args:
+        ad: android_device object to initiate connection on.
+        network: A dictionary representing the network to connect to. The
+                 dictionary must have the key "SSID".
+        num_of_tries: An integer that is the number of times to try before
+                      delaring failure. Default is 1.
+    """
+    asserts.assert_true(
+        WifiEnums.SSID_KEY in network,
+        f"Key '{WifiEnums.SSID_KEY}' must be present in network definition.",
+    )
+    ad.droid.wifiStartTrackingStateChange()
+    expected_ssid = network[WifiEnums.SSID_KEY]
+    ad.droid.wifiConnectByConfig(network)
+    ad.log.info("Starting connection process to %s", expected_ssid)
+    try:
+        ad.ed.pop_event(wifi_constants.CONNECT_BY_CONFIG_SUCCESS, 30)
+        connect_result = _wait_for_connect_event(
+            ad, ssid=expected_ssid, tries=num_of_tries
+        )
+        asserts.assert_true(
+            connect_result,
+            f"Failed to connect to Wi-Fi network {network} on {ad.serial}",
+        )
+        ad.log.debug("Wi-Fi connection result: %s.", connect_result)
+        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
+        asserts.assert_equal(
+            actual_ssid,
+            expected_ssid,
+            f"Connected to the wrong network on {ad.serial}.",
+        )
+        ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
+
+        if check_connectivity:
+            internet = validate_connection(ad, DEFAULT_PING_ADDR)
+            if not internet:
+                raise signals.TestFailure(
+                    f"Failed to connect to internet on {expected_ssid}"
+                )
+    except Empty:
+        asserts.fail(f"Failed to start connection process to {network} on {ad.serial}")
+    except Exception as error:
+        ad.log.error("Failed to connect to %s with error %s", expected_ssid, error)
+        raise signals.TestFailure(f"Failed to connect to {network} network")
+
+    finally:
+        ad.droid.wifiStopTrackingStateChange()
+
+
+def validate_connection(
+    ad, ping_addr=DEFAULT_PING_ADDR, wait_time=15, ping_gateway=True
+):
+    """Validate internet connection by pinging the address provided.
+
+    Args:
+        ad: android_device object.
+        ping_addr: address on internet for pinging.
+        wait_time: wait for some time before validating connection
+
+    Returns:
+        ping output if successful, NULL otherwise.
+    """
+    android_version = int(ad.adb.shell("getprop ro.vendor.build.version.release"))
+    # wait_time to allow for DHCP to complete.
+    for i in range(wait_time):
+        if ad.droid.connectivityNetworkIsConnected():
+            if (
+                android_version > 10 and ad.droid.connectivityGetIPv4DefaultGateway()
+            ) or android_version < 11:
+                break
+        time.sleep(1)
+    ping = False
+    try:
+        ping = ad.droid.httpPing(ping_addr)
+        ad.log.info("Http ping result: %s.", ping)
+    except:
+        pass
+    if android_version > 10 and not ping and ping_gateway:
+        ad.log.info("Http ping failed. Pinging default gateway")
+        gw = ad.droid.connectivityGetIPv4DefaultGateway()
+        result = ad.adb.shell(f"ping -c 6 {gw}")
+        ad.log.info(f"Default gateway ping result: {result}")
+        ping = False if "100% packet loss" in result else True
+    return ping
+
+
+# TODO(angli): This can only verify if an actual value is exactly the same.
+# Would be nice to be able to verify an actual value is one of serveral.
+def verify_wifi_connection_info(ad, expected_con):
+    """Verifies that the information of the currently connected wifi network is
+    as expected.
+
+    Args:
+        expected_con: A dict representing expected key-value pairs for wifi
+            connection. e.g. {"SSID": "test_wifi"}
+    """
+    current_con = ad.droid.wifiGetConnectionInfo()
+    case_insensitive = ["BSSID", "supplicant_state"]
+    ad.log.debug("Current connection: %s", current_con)
+    for k, expected_v in expected_con.items():
+        # Do not verify authentication related fields.
+        if k == "password":
+            continue
+        msg = f"Field {k} does not exist in wifi connection info {current_con}."
+        if k not in current_con:
+            raise signals.TestFailure(msg)
+        actual_v = current_con[k]
+        if k in case_insensitive:
+            actual_v = actual_v.lower()
+            expected_v = expected_v.lower()
+        msg = f"Expected {k} to be {expected_v}, actual {k} is {actual_v}."
+        if actual_v != expected_v:
+            raise signals.TestFailure(msg)
+
+
+def get_current_softap_capability(ad, callbackId, need_to_wait):
+    """pop up all of softap info list changed event from queue.
+    Args:
+        callbackId: Id of the callback associated with registering.
+        need_to_wait: Wait for the info callback event before pop all.
+    Returns:
+        Returns last updated capability of softap.
+    """
+    eventStr = (
+        wifi_constants.SOFTAP_CALLBACK_EVENT
+        + str(callbackId)
+        + wifi_constants.SOFTAP_CAPABILITY_CHANGED
+    )
+    ad.log.debug("softap capability dump from eventStr %s", eventStr)
+    if need_to_wait:
+        event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
+        capability = event["data"]
+
+    events = ad.ed.pop_all(eventStr)
+    for event in events:
+        capability = event["data"]
+
+    return capability
+
+
+def get_ssrdumps(ad):
+    """Pulls dumps in the ssrdump dir
+    Args:
+        ad: android device object.
+    """
+    logs = ad.get_file_names("/data/vendor/ssrdump/")
+    if logs:
+        ad.log.info("Pulling ssrdumps %s", logs)
+        log_path = os.path.join(ad.device_log_path, f"SSRDUMPS_{ad.serial}")
+        os.makedirs(log_path, exist_ok=True)
+        ad.pull_files(logs, log_path)
+    ad.adb.shell("find /data/vendor/ssrdump/ -type f -delete", ignore_status=True)
+
+
+def start_pcap(pcap, wifi_band, test_name):
+    """Start packet capture in monitor mode.
+
+    Args:
+        pcap: packet capture object
+        wifi_band: '2g' or '5g' or 'dual'
+        test_name: test name to be used for pcap file name
+
+    Returns:
+        Dictionary with wifi band as key and the tuple
+        (pcap Process object, log directory) as the value
+    """
+    log_dir = os.path.join(
+        context.get_current_context().get_full_output_path(), "PacketCapture"
+    )
+    os.makedirs(log_dir, exist_ok=True)
+    if wifi_band == "dual":
+        bands = [BAND_2G, BAND_5G]
+    else:
+        bands = [wifi_band]
+    procs = {}
+    for band in bands:
+        proc = pcap.start_packet_capture(band, log_dir, test_name)
+        procs[band] = (proc, os.path.join(log_dir, test_name))
+    return procs
+
+
+def stop_pcap(pcap, procs, test_status=None):
+    """Stop packet capture in monitor mode.
+
+    Since, the pcap logs in monitor mode can be very large, we will
+    delete them if they are not required. 'test_status' if True, will delete
+    the pcap files. If False, we will keep them.
+
+    Args:
+        pcap: packet capture object
+        procs: dictionary returned by start_pcap
+        test_status: status of the test case
+    """
+    for proc, fname in procs.values():
+        pcap.stop_packet_capture(proc)
+
+    if test_status:
+        shutil.rmtree(os.path.dirname(fname))
+
+
+def start_cnss_diags(ads, cnss_diag_file, pixel_models):
+    for ad in ads:
+        start_cnss_diag(ad, cnss_diag_file, pixel_models)
+
+
+def start_cnss_diag(ad, cnss_diag_file, pixel_models):
+    """Start cnss_diag to record extra wifi logs
+
+    Args:
+        ad: android device object.
+        cnss_diag_file: cnss diag config file to push to device.
+        pixel_models: pixel devices.
+    """
+    if ad.model not in pixel_models:
+        ad.log.info("Device not supported to collect pixel logger")
+        return
+    if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
+        prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
+    else:
+        prop = wifi_constants.CNSS_DIAG_PROP
+    if ad.adb.getprop(prop) != "true":
+        if not int(
+            ad.adb.shell(
+                f"ls -l {CNSS_DIAG_CONFIG_PATH}{CNSS_DIAG_CONFIG_FILE} | wc -l"
+            )
+        ):
+            ad.adb.push(f"{cnss_diag_file} {CNSS_DIAG_CONFIG_PATH}")
+        ad.adb.shell(
+            "find /data/vendor/wifi/cnss_diag/wlan_logs/ -type f -delete",
+            ignore_status=True,
+        )
+        ad.adb.shell(f"setprop {prop} true", ignore_status=True)
+
+
+def stop_cnss_diags(ads, pixel_models):
+    for ad in ads:
+        stop_cnss_diag(ad, pixel_models)
+
+
+def stop_cnss_diag(ad, pixel_models):
+    """Stops cnss_diag
+
+    Args:
+        ad: android device object.
+        pixel_models: pixel devices.
+    """
+    if ad.model not in pixel_models:
+        ad.log.info("Device not supported to collect pixel logger")
+        return
+    if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
+        prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
+    else:
+        prop = wifi_constants.CNSS_DIAG_PROP
+    ad.adb.shell(f"setprop {prop} false", ignore_status=True)
+
+
+def get_cnss_diag_log(ad):
+    """Pulls the cnss_diag logs in the wlan_logs dir
+    Args:
+        ad: android device object.
+    """
+    logs = ad.get_file_names("/data/vendor/wifi/cnss_diag/wlan_logs/")
+    if logs:
+        ad.log.info("Pulling cnss_diag logs %s", logs)
+        log_path = os.path.join(ad.device_log_path, f"CNSS_DIAG_{ad.serial}")
+        os.makedirs(log_path, exist_ok=True)
+        ad.pull_files(logs, log_path)
+
+
+def turn_location_off_and_scan_toggle_off(ad):
+    """Turns off wifi location scans."""
+    utils.set_location_service(ad, False)
+    ad.droid.wifiScannerToggleAlwaysAvailable(False)
+    msg = "Failed to turn off location service's scan."
+    asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
diff --git a/packages/antlion/types.py b/packages/antlion/types.py
new file mode 100644
index 0000000..9f231c5
--- /dev/null
+++ b/packages/antlion/types.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+#
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import TypeAlias
+
+Json: TypeAlias = dict[str, "Json"] | list["Json"] | str | int | float | bool | None
+ControllerConfig: TypeAlias = dict[str, Json]
diff --git a/src/antlion/unit_tests/__init__.py b/packages/antlion/unit_tests/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/__init__.py
rename to packages/antlion/unit_tests/__init__.py
diff --git a/packages/antlion/unit_tests/acts_adb_test.py b/packages/antlion/unit_tests/acts_adb_test.py
new file mode 100755
index 0000000..c7a14bc
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_adb_test.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+import mock
+
+from antlion.controllers import adb
+from antlion.controllers.adb_lib.error import AdbCommandError, AdbError
+
+
+class MockJob(object):
+    def __init__(self, exit_status=0, stderr="", stdout=""):
+        self.exit_status = exit_status
+        self.stderr = stderr
+        self.stdout = stdout
+
+
+class MockAdbProxy(adb.AdbProxy):
+    def __init__(self):
+        pass
+
+
+class ADBTest(unittest.TestCase):
+    """A class for testing antlion/controllers/adb.py"""
+
+    def test__exec_cmd_failure_old_adb(self):
+        mock_job = MockJob(exit_status=1, stderr="error: device not found")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
+            with self.assertRaises(AdbError):
+                MockAdbProxy()._exec_cmd(cmd)
+
+    def test__exec_cmd_failure_new_adb(self):
+        mock_job = MockJob(exit_status=1, stderr="error: device 'DEADBEEF' not found")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
+            with self.assertRaises(AdbError):
+                MockAdbProxy()._exec_cmd(cmd)
+
+    def test__exec_cmd_pass_basic(self):
+        mock_job = MockJob(exit_status=0, stderr="DEADBEEF", stdout="FEEDACAB")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
+            result = MockAdbProxy()._exec_cmd(cmd)
+        self.assertEqual(result, "FEEDACAB")
+
+    def test__exec_cmd_ignore_status(self):
+        mock_job = MockJob(exit_status=0, stderr="DEADBEEF", stdout="")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
+            result = MockAdbProxy()._exec_cmd(cmd, ignore_status=True)
+        self.assertEqual(result, "DEADBEEF")
+
+    def test__exec_cmd_pass_grep(self):
+        mock_job = MockJob(exit_status=1, stderr="", stdout="foo")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"grep foo"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
+            result = MockAdbProxy()._exec_cmd(cmd)
+        self.assertEqual(result, "foo")
+
+    def test__exec_cmd_failure_ret_nonzero(self):
+        mock_job = MockJob(exit_status=1, stderr="error not related to adb")
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
+            with self.assertRaises(AdbCommandError):
+                MockAdbProxy()._exec_cmd(cmd)
+
+    def test__exec_cmd_raises_on_bind_error(self):
+        """Tests _exec_cmd raises an AdbError on port forwarding failure."""
+        mock_job = MockJob(
+            exit_status=1,
+            stderr="error: cannot bind listener: " "Address already in use",
+            stdout="",
+        )
+        cmd = ["adb", "-s", '"SOME_SERIAL"', "shell", '"SOME_SHELL_CMD"']
+        with mock.patch("antlion.libs.proc.job.run", return_value=mock_job):
+            with self.assertRaises(AdbError):
+                MockAdbProxy()._exec_cmd(cmd)
+
+    def test__get_version_number_gets_version_number(self):
+        """Tests the positive case for AdbProxy.get_version_number()."""
+        proxy = MockAdbProxy()
+        expected_version_number = 39
+        proxy.version = lambda: (
+            f"Android Debug Bridge version 1.0.{expected_version_number}\nblah"
+        )
+        self.assertEqual(expected_version_number, proxy.get_version_number())
+
+    def test__get_version_number_raises_upon_parse_failure(self):
+        """Tests the failure case for AdbProxy.get_version_number()."""
+        proxy = MockAdbProxy()
+        proxy.version = lambda: "Bad format"
+        with self.assertRaises(AdbError):
+            proxy.get_version_number()
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/acts_android_device_test.py b/packages/antlion/unit_tests/acts_android_device_test.py
new file mode 100755
index 0000000..9e33f03
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_android_device_test.py
@@ -0,0 +1,754 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import shutil
+import tempfile
+import unittest
+
+import mock
+
+from antlion import logger
+from antlion.controllers import android_device
+from antlion.controllers.android_lib import errors
+
+# Mock log path for a test run.
+MOCK_LOG_PATH = "/tmp/logs/MockTest/xx-xx-xx_xx-xx-xx/"
+
+# Mock start and end time of the adb cat.
+MOCK_ADB_EPOCH_BEGIN_TIME = 191000123
+MOCK_ADB_LOGCAT_BEGIN_TIME = logger.normalize_log_line_timestamp(
+    logger.epoch_to_log_line_timestamp(MOCK_ADB_EPOCH_BEGIN_TIME)
+)
+MOCK_ADB_LOGCAT_END_TIME = "1970-01-02 21:22:02.000"
+
+MOCK_SERIAL = 1
+MOCK_RELEASE_BUILD_ID = "ABC1.123456.007"
+MOCK_DEV_BUILD_ID = "ABC-MR1"
+MOCK_NYC_BUILD_ID = "N4F27P"
+
+
+def get_mock_ads(num):
+    """Generates a list of mock AndroidDevice objects.
+
+    The serial number of each device will be integer 0 through num - 1.
+
+    Args:
+        num: An integer that is the number of mock AndroidDevice objects to
+            create.
+    """
+    ads = []
+    for i in range(num):
+        ad = mock.MagicMock(name="AndroidDevice", serial=i, h_port=None)
+        ad.ensure_screen_on = mock.MagicMock(return_value=True)
+        ads.append(ad)
+    return ads
+
+
+def mock_get_all_instances():
+    return get_mock_ads(5)
+
+
+def mock_list_adb_devices():
+    return [ad.serial for ad in get_mock_ads(5)]
+
+
+class MockAdbProxy(object):
+    """Mock class that swaps out calls to adb with mock calls."""
+
+    def __init__(
+        self,
+        serial,
+        fail_br=False,
+        fail_br_before_N=False,
+        build_id=MOCK_RELEASE_BUILD_ID,
+        return_value=None,
+    ):
+        self.serial = serial
+        self.fail_br = fail_br
+        self.fail_br_before_N = fail_br_before_N
+        self.return_value = return_value
+        self.return_multiple = False
+        self.build_id = build_id
+
+    def shell(self, params, ignore_status=False, timeout=60):
+        if params == "id -u":
+            return "root"
+        elif params == "bugreportz":
+            if self.fail_br:
+                return "OMG I died!\n"
+            return "OK:/path/bugreport.zip\n"
+        elif params == "bugreportz -v":
+            if self.fail_br_before_N:
+                return "/system/bin/sh: bugreportz: not found"
+            return "1.1"
+        else:
+            if self.return_multiple:
+                return self.return_value.pop(0)
+            else:
+                return self.return_value
+
+    def getprop(self, params):
+        if params == "ro.build.id":
+            return self.build_id
+        elif params == "ro.build.version.incremental":
+            return "123456789"
+        elif params == "ro.build.type":
+            return "userdebug"
+        elif params == "ro.build.product" or params == "ro.product.name":
+            return "FakeModel"
+        elif params == "sys.boot_completed":
+            return "1"
+
+    def devices(self):
+        return f"{str(self.serial)}\tdevice"
+
+    def bugreport(self, params, timeout=android_device.BUG_REPORT_TIMEOUT):
+        expected = os.path.join(
+            logging.log_path,
+            f"AndroidDevice{self.serial}",
+            "AndroidDevice%s_%s.txt"
+            % (
+                self.serial,
+                logger.normalize_log_line_timestamp(MOCK_ADB_LOGCAT_BEGIN_TIME),
+            ),
+        )
+        assert expected in params, f"Expected '{expected}', got '{params}'."
+
+    def __getattr__(self, name):
+        """All calls to the none-existent functions in adb proxy would
+        simply return the adb command string.
+        """
+
+        def adb_call(*args, **kwargs):
+            arg_str = " ".join(str(elem) for elem in args)
+            return arg_str
+
+        return adb_call
+
+
+class MockFastbootProxy:
+    """Mock class that swaps out calls to adb with mock calls."""
+
+    def __init__(self, serial):
+        self.serial = serial
+
+    def devices(self):
+        return "xxxx\tdevice\nyyyy\tdevice"
+
+    def __getattr__(self, name):
+        def fastboot_call(*args):
+            arg_str = " ".join(str(elem) for elem in args)
+            return arg_str
+
+        return fastboot_call
+
+
+class ActsAndroidDeviceTest(unittest.TestCase):
+    """This test class has unit tests for the implementation of everything
+    under antlion.controllers.android_device.
+    """
+
+    def setUp(self):
+        # Set log_path to logging since acts logger setup is not called.
+        if not hasattr(logging, "log_path"):
+            setattr(logging, "log_path", "/tmp/logs")
+        # Creates a temp dir to be used by tests in this test class.
+        self.tmp_dir = tempfile.mkdtemp()
+
+    def tearDown(self):
+        """Removes the temp dir."""
+        shutil.rmtree(self.tmp_dir)
+
+    # Tests for android_device module functions.
+    # These tests use mock AndroidDevice instances.
+
+    @mock.patch.object(android_device, "get_all_instances", new=mock_get_all_instances)
+    @mock.patch.object(android_device, "list_adb_devices", new=mock_list_adb_devices)
+    def test_create_with_pickup_all(self):
+        pick_all_token = android_device.ANDROID_DEVICE_PICK_ALL_TOKEN
+        actual_ads = android_device.create(pick_all_token)
+        for actual, expected in zip(actual_ads, get_mock_ads(5)):
+            self.assertEqual(actual.serial, expected.serial)
+
+    def test_create_with_empty_config(self):
+        expected_msg = android_device.ANDROID_DEVICE_EMPTY_CONFIG_MSG
+        with self.assertRaisesRegex(errors.AndroidDeviceConfigError, expected_msg):
+            android_device.create([])
+
+    def test_create_with_not_list_config(self):
+        expected_msg = android_device.ANDROID_DEVICE_NOT_LIST_CONFIG_MSG
+        with self.assertRaisesRegex(errors.AndroidDeviceConfigError, expected_msg):
+            android_device.create("HAHA")
+
+    def test_get_device_success_with_serial(self):
+        ads = get_mock_ads(5)
+        expected_serial = 0
+        ad = android_device.get_device(ads, serial=expected_serial)
+        self.assertEqual(ad.serial, expected_serial)
+
+    def test_get_device_success_with_serial_and_extra_field(self):
+        ads = get_mock_ads(5)
+        expected_serial = 1
+        expected_h_port = 5555
+        ads[1].h_port = expected_h_port
+        ad = android_device.get_device(
+            ads, serial=expected_serial, h_port=expected_h_port
+        )
+        self.assertEqual(ad.serial, expected_serial)
+        self.assertEqual(ad.h_port, expected_h_port)
+
+    def test_get_device_no_match(self):
+        ads = get_mock_ads(5)
+        expected_msg = (
+            "Could not find a target device that matches condition" ": {'serial': 5}."
+        )
+        with self.assertRaisesRegex(ValueError, expected_msg):
+            ad = android_device.get_device(ads, serial=len(ads))
+
+    def test_get_device_too_many_matches(self):
+        ads = get_mock_ads(5)
+        target_serial = ads[1].serial = ads[0].serial
+        expected_msg = "More than one device matched: \[0, 0\]"
+        with self.assertRaisesRegex(ValueError, expected_msg):
+            ad = android_device.get_device(ads, serial=target_serial)
+
+    def test_start_services_on_ads(self):
+        """Makes sure when an AndroidDevice fails to start some services, all
+        AndroidDevice objects get cleaned up.
+        """
+        msg = "Some error happened."
+        ads = get_mock_ads(3)
+        ads[0].start_services = mock.MagicMock()
+        ads[0].clean_up = mock.MagicMock()
+        ads[1].start_services = mock.MagicMock()
+        ads[1].clean_up = mock.MagicMock()
+        ads[2].start_services = mock.MagicMock(
+            side_effect=errors.AndroidDeviceError(msg)
+        )
+        ads[2].clean_up = mock.MagicMock()
+        with self.assertRaisesRegex(errors.AndroidDeviceError, msg):
+            android_device._start_services_on_ads(ads)
+        ads[0].clean_up.assert_called_once_with()
+        ads[1].clean_up.assert_called_once_with()
+        ads[2].clean_up.assert_called_once_with()
+
+    # Tests for android_device.AndroidDevice class.
+    # These tests mock out any interaction with the OS and real android device
+    # in AndroidDeivce.
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_AndroidDevice_instantiation(self, MockFastboot, MockAdbProxy):
+        """Verifies the AndroidDevice object's basic attributes are correctly
+        set after instantiation.
+        """
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        self.assertEqual(ad.serial, 1)
+        self.assertEqual(ad.model, "fakemodel")
+        self.assertIsNone(ad.adb_logcat_process)
+        expected_lp = os.path.join(logging.log_path, f"AndroidDevice{MOCK_SERIAL}")
+        self.assertEqual(ad.log_path, expected_lp)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_AndroidDevice_build_info_release(self, MockFastboot, MockAdbProxy):
+        """Verifies the AndroidDevice object's basic attributes are correctly
+        set after instantiation.
+        """
+        ad = android_device.AndroidDevice(serial=1)
+        build_info = ad.build_info
+        self.assertEqual(build_info["build_id"], "ABC1.123456.007")
+        self.assertEqual(build_info["build_type"], "userdebug")
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy",
+        return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_DEV_BUILD_ID),
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_AndroidDevice_build_info_dev(self, MockFastboot, MockAdbProxy):
+        """Verifies the AndroidDevice object's basic attributes are correctly
+        set after instantiation.
+        """
+        ad = android_device.AndroidDevice(serial=1)
+        build_info = ad.build_info
+        self.assertEqual(build_info["build_id"], "123456789")
+        self.assertEqual(build_info["build_type"], "userdebug")
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy",
+        return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_NYC_BUILD_ID),
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_AndroidDevice_build_info_nyc(self, MockFastboot, MockAdbProxy):
+        """Verifies the AndroidDevice object's build id is set correctly for
+        NYC releases.
+        """
+        ad = android_device.AndroidDevice(serial=1)
+        build_info = ad.build_info
+        self.assertEqual(build_info["build_id"], MOCK_NYC_BUILD_ID)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("os.makedirs")
+    @mock.patch("antlion.utils.exe_cmd")
+    @mock.patch(
+        "antlion.controllers.android_device.AndroidDevice.device_log_path",
+        new_callable=mock.PropertyMock,
+    )
+    def test_AndroidDevice_take_bug_report(
+        self, mock_log_path, exe_mock, mock_makedirs, FastbootProxy, MockAdbProxy
+    ):
+        """Verifies AndroidDevice.take_bug_report calls the correct adb command
+        and writes the bugreport file to the correct path.
+        """
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        mock_log_path.return_value = os.path.join(
+            logging.log_path, f"AndroidDevice{ad.serial}"
+        )
+        ad.take_bug_report("test_something", 234325.32)
+        mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy",
+        return_value=MockAdbProxy(MOCK_SERIAL, fail_br=True),
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("os.makedirs")
+    @mock.patch("antlion.utils.exe_cmd")
+    @mock.patch(
+        "antlion.controllers.android_device.AndroidDevice.device_log_path",
+        new_callable=mock.PropertyMock,
+    )
+    def test_AndroidDevice_take_bug_report_fail(self, mock_log_path, *_):
+        """Verifies AndroidDevice.take_bug_report writes out the correct message
+        when taking bugreport fails.
+        """
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        mock_log_path.return_value = os.path.join(
+            logging.log_path, f"AndroidDevice{ad.serial}"
+        )
+        expected_msg = "Failed to take bugreport on 1: OMG I died!"
+        with self.assertRaisesRegex(errors.AndroidDeviceError, expected_msg):
+            ad.take_bug_report("test_something", 4346343.23)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy",
+        return_value=MockAdbProxy(MOCK_SERIAL, fail_br_before_N=True),
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("os.makedirs")
+    @mock.patch("antlion.utils.exe_cmd")
+    @mock.patch(
+        "antlion.controllers.android_device.AndroidDevice.device_log_path",
+        new_callable=mock.PropertyMock,
+    )
+    def test_AndroidDevice_take_bug_report_fallback(
+        self, mock_log_path, exe_mock, mock_makedirs, FastbootProxy, MockAdbProxy
+    ):
+        """Verifies AndroidDevice.take_bug_report falls back to traditional
+        bugreport on builds that do not have bugreportz.
+        """
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        mock_log_path.return_value = os.path.join(
+            logging.log_path, f"AndroidDevice{ad.serial}"
+        )
+        ad.take_bug_report("test_something", MOCK_ADB_EPOCH_BEGIN_TIME)
+        mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("antlion.libs.proc.process.Process")
+    def test_AndroidDevice_start_adb_logcat(
+        self, proc_mock, FastbootProxy, MockAdbProxy
+    ):
+        """Verifies the AndroidDevice method start_adb_logcat. Checks that the
+        underlying logcat process is started properly and correct warning msgs
+        are generated.
+        """
+        with mock.patch(
+            (
+                "antlion.controllers.android_lib.logcat."
+                "create_logcat_keepalive_process"
+            ),
+            return_value=proc_mock,
+        ) as create_proc_mock:
+            ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+            ad.start_adb_logcat()
+            # Verify start did the correct operations.
+            self.assertTrue(ad.adb_logcat_process)
+            log_dir = f"AndroidDevice{ad.serial}"
+            create_proc_mock.assert_called_with(ad.serial, log_dir, "-b all")
+            proc_mock.start.assert_called_with()
+            # Expect warning msg if start is called back to back.
+            expected_msg = "Android device .* already has a running adb logcat"
+            proc_mock.is_running.return_value = True
+            with self.assertLogs(level="WARNING") as log:
+                ad.start_adb_logcat()
+                self.assertRegex(log.output[0], expected_msg)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch(
+        "antlion.controllers.android_lib.logcat." "create_logcat_keepalive_process"
+    )
+    def test_AndroidDevice_start_adb_logcat_with_user_param(
+        self, create_proc_mock, FastbootProxy, MockAdbProxy
+    ):
+        """Verifies that start_adb_logcat generates the correct adb logcat
+        command if adb_logcat_param is specified.
+        """
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.adb_logcat_param = "-b radio"
+        ad.start_adb_logcat()
+        # Verify that create_logcat_keepalive_process is called with the
+        # correct command.
+        log_dir = f"AndroidDevice{ad.serial}"
+        create_proc_mock.assert_called_with(ad.serial, log_dir, "-b radio")
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    @mock.patch("antlion.libs.proc.process.Process")
+    def test_AndroidDevice_stop_adb_logcat(
+        self, proc_mock, FastbootProxy, MockAdbProxy
+    ):
+        """Verifies the AndroidDevice method stop_adb_logcat. Checks that the
+        underlying logcat process is stopped properly and correct warning msgs
+        are generated.
+        """
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.adb_logcat_process = proc_mock
+        # Expect warning msg if stop is called before start.
+        expected_msg = "Android device .* does not have an ongoing adb logcat"
+        proc_mock.is_running.return_value = False
+        with self.assertLogs(level="WARNING") as log:
+            ad.stop_adb_logcat()
+            self.assertRegex(log.output[0], expected_msg)
+
+        # Verify the underlying process is stopped.
+        proc_mock.is_running.return_value = True
+        ad.stop_adb_logcat()
+        proc_mock.stop.assert_called_with()
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_get_apk_process_id_process_cannot_find(self, fastboot_proxy, adb_proxy):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.adb.return_value = "does_not_contain_value"
+        self.assertEqual(None, ad.get_package_pid("some_package"))
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_get_apk_process_id_process_exists_second_try(
+        self, fastboot_proxy, adb_proxy
+    ):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.adb.return_multiple = True
+        ad.adb.return_value = ["", "system 1 2 3 4  S com.some_package"]
+        self.assertEqual(1, ad.get_package_pid("some_package"))
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_get_apk_process_id_bad_return(self, fastboot_proxy, adb_proxy):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.adb.return_value = "bad_return_index_error"
+        self.assertEqual(None, ad.get_package_pid("some_package"))
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_get_apk_process_id_bad_return(self, fastboot_proxy, adb_proxy):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.adb.return_value = "bad return value error"
+        self.assertEqual(None, ad.get_package_pid("some_package"))
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_enabled_only_system_enabled(self, fastboot_proxy, adb_proxy):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        root_user_id = "0"
+
+        ad.adb.get_user_id = mock.MagicMock()
+        ad.adb.get_user_id.return_value = root_user_id
+
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["", "2"]  # system.verified
+        )  # vendor.verified
+        ad.adb.ensure_user = mock.MagicMock()
+        ad.reboot = mock.MagicMock()
+        ad.ensure_verity_enabled()
+        ad.reboot.assert_called_once()
+
+        ad.adb.ensure_user.assert_called_with(root_user_id)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_enabled_only_vendor_enabled(self, fastboot_proxy, adb_proxy):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        root_user_id = "0"
+
+        ad.adb.get_user_id = mock.MagicMock()
+        ad.adb.get_user_id.return_value = root_user_id
+
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["2", ""]  # system.verified
+        )  # vendor.verified
+        ad.adb.ensure_user = mock.MagicMock()
+        ad.reboot = mock.MagicMock()
+
+        ad.ensure_verity_enabled()
+
+        ad.reboot.assert_called_once()
+        ad.adb.ensure_user.assert_called_with(root_user_id)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_enabled_both_enabled_at_start(
+        self, fastboot_proxy, adb_proxy
+    ):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        root_user_id = "0"
+
+        ad.adb.get_user_id = mock.MagicMock()
+        ad.adb.get_user_id.return_value = root_user_id
+
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["2", "2"]  # system.verified
+        )  # vendor.verified
+        ad.adb.ensure_user = mock.MagicMock()
+        ad.reboot = mock.MagicMock()
+        ad.ensure_verity_enabled()
+
+        assert not ad.reboot.called
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_disabled_system_already_disabled(
+        self, fastboot_proxy, adb_proxy
+    ):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        root_user_id = "0"
+
+        ad.adb.get_user_id = mock.MagicMock()
+        ad.adb.get_user_id.return_value = root_user_id
+
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["2", ""]  # system.verified
+        )  # vendor.verified
+        ad.adb.ensure_user = mock.MagicMock()
+        ad.reboot = mock.MagicMock()
+        ad.ensure_verity_disabled()
+
+        ad.reboot.assert_called_once()
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_disabled_vendor_already_disabled(
+        self, fastboot_proxy, adb_proxy
+    ):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        root_user_id = "0"
+
+        ad.adb.get_user_id = mock.MagicMock()
+        ad.adb.get_user_id.return_value = root_user_id
+
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["", "2"]  # system.verified
+        )  # vendor.verified
+        ad.adb.ensure_user = mock.MagicMock()
+        ad.reboot = mock.MagicMock()
+
+        ad.ensure_verity_disabled()
+
+        ad.reboot.assert_called_once()
+        ad.adb.ensure_user.assert_called_with(root_user_id)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_ensure_verity_disabled_disabled_at_start(self, fastboot_proxy, adb_proxy):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        root_user_id = "0"
+
+        ad.adb.get_user_id = mock.MagicMock()
+        ad.adb.get_user_id.return_value = root_user_id
+
+        ad.adb.getprop = mock.MagicMock(
+            side_effect=["", ""]  # system.verified
+        )  # vendor.verified
+        ad.adb.ensure_user = mock.MagicMock()
+        ad.reboot = mock.MagicMock()
+
+        ad.ensure_verity_disabled()
+
+        assert not ad.reboot.called
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_push_system_file(self, fastboot_proxy, adb_proxy):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.ensure_verity_disabled = mock.MagicMock()
+        ad.adb.remount = mock.MagicMock()
+        ad.adb.push = mock.MagicMock()
+
+        ret = ad.push_system_file("asdf", "jkl")
+        self.assertTrue(ret)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_push_system_file_returns_false_on_error(self, fastboot_proxy, adb_proxy):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.ensure_verity_disabled = mock.MagicMock()
+        ad.adb.remount = mock.MagicMock()
+        ad.adb.push = mock.MagicMock(return_value="error")
+
+        ret = ad.push_system_file("asdf", "jkl")
+        self.assertFalse(ret)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_get_my_current_focus_window_return_empty_string(self, *_):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.adb.return_value = ""
+
+        ret = ad.get_my_current_focus_window()
+
+        self.assertEqual("", ret)
+
+    @mock.patch(
+        "antlion.controllers.adb.AdbProxy", return_value=MockAdbProxy(MOCK_SERIAL)
+    )
+    @mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy",
+        return_value=MockFastbootProxy(MOCK_SERIAL),
+    )
+    def test_get_my_current_focus_window_return_current_window(self, *_):
+        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
+        ad.adb.return_value = "mCurrentFocus=Window{a247ded u0 NotificationShade}"
+
+        ret = ad.get_my_current_focus_window()
+
+        self.assertEqual("NotificationShade", ret)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/acts_asserts_test.py b/packages/antlion/unit_tests/acts_asserts_test.py
new file mode 100755
index 0000000..8a87d8f
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_asserts_test.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from mobly import asserts, signals
+
+MSG_EXPECTED_EXCEPTION = "This is an expected exception."
+
+
+class ActsAssertsTest(unittest.TestCase):
+    """Verifies that asserts.xxx functions raise the correct test signals."""
+
+    def test_assert_false(self):
+        asserts.assert_false(False, MSG_EXPECTED_EXCEPTION)
+        with self.assertRaisesRegexp(signals.TestFailure, MSG_EXPECTED_EXCEPTION):
+            asserts.assert_false(True, MSG_EXPECTED_EXCEPTION)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/acts_confidence_test_config.json b/packages/antlion/unit_tests/acts_confidence_test_config.json
similarity index 100%
rename from src/antlion/unit_tests/acts_confidence_test_config.json
rename to packages/antlion/unit_tests/acts_confidence_test_config.json
diff --git a/packages/antlion/unit_tests/acts_context_test.py b/packages/antlion/unit_tests/acts_context_test.py
new file mode 100755
index 0000000..0634826
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_context_test.py
@@ -0,0 +1,225 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from unittest import TestCase
+
+from mock import Mock, patch
+
+from antlion import context
+from antlion.context import (
+    RootContext,
+    TestCaseContext,
+    TestClassContext,
+    TestContext,
+    _update_test_case_context,
+    _update_test_class_context,
+    get_current_context,
+)
+from antlion.event.event import (
+    TestCaseBeginEvent,
+    TestCaseEndEvent,
+    TestClassBeginEvent,
+    TestClassEndEvent,
+)
+
+LOGGING = "antlion.context.logging"
+
+
+def reset_context():
+    context._contexts = [RootContext()]
+
+
+TEST_CASE = "test_case_name"
+
+
+class TestClass:
+    pass
+
+
+class ModuleTest(TestCase):
+    """Unit tests for the context module."""
+
+    def test_update_test_class_context_for_test_class_begin(self):
+        event = Mock(spec=TestClassBeginEvent)
+        event.test_class = Mock()
+
+        _update_test_class_context(event)
+        self.assertIsInstance(get_current_context(), TestClassContext)
+        reset_context()
+
+    def test_update_test_class_context_for_test_class_end(self):
+        event = Mock(spec=TestClassBeginEvent)
+        event.test_class = Mock()
+        event2 = Mock(spec=TestClassEndEvent)
+        event2.test_class = Mock()
+
+        _update_test_class_context(event)
+        _update_test_class_context(event2)
+
+        self.assertIsInstance(get_current_context(), RootContext)
+        reset_context()
+
+    def test_update_test_case_context_for_test_case_begin(self):
+        event = Mock(spec=TestClassBeginEvent)
+        event.test_class = Mock()
+        event2 = Mock(spec=TestCaseBeginEvent)
+        event2.test_class = Mock()
+        event2.test_case = Mock()
+
+        _update_test_class_context(event)
+        _update_test_case_context(event2)
+
+        self.assertIsInstance(get_current_context(), TestCaseContext)
+        reset_context()
+
+    def test_update_test_case_context_for_test_case_end(self):
+        event = Mock(spec=TestClassBeginEvent)
+        event.test_class = Mock()
+        event2 = Mock(spec=TestCaseBeginEvent)
+        event2.test_class = Mock()
+        event2.test_case = Mock()
+        event3 = Mock(spec=TestCaseEndEvent)
+        event3.test_class = Mock()
+        event3.test_case = Mock()
+
+        _update_test_class_context(event)
+        _update_test_case_context(event2)
+        _update_test_case_context(event3)
+
+        self.assertIsInstance(get_current_context(), TestClassContext)
+        reset_context()
+
+
+class TestContextTest(TestCase):
+    """Unit tests for the TestContext class."""
+
+    @patch(LOGGING)
+    def test_get_base_output_path_uses_default(self, logging):
+        context = TestContext()
+
+        self.assertEqual(context.get_base_output_path(), logging.log_path)
+
+    @patch(LOGGING)
+    def test_add_base_path_overrides_default(self, _):
+        context = TestContext()
+        mock_path = Mock()
+
+        context.add_base_output_path("basepath", mock_path)
+
+        self.assertEqual(context.get_base_output_path("basepath"), mock_path)
+
+    def test_get_subcontext_returns_empty_string_by_default(self):
+        context = TestContext()
+
+        self.assertEqual(context.get_subcontext(), "")
+
+    def test_add_subcontext_sets_correct_path(self):
+        context = TestContext()
+        mock_path = Mock()
+
+        context.add_subcontext("subcontext", mock_path)
+
+        self.assertEqual(context.get_subcontext("subcontext"), mock_path)
+
+    @patch(LOGGING)
+    @patch("os.makedirs")
+    def test_get_full_output_path_returns_correct_path(self, *_):
+        context = TestClassContext(TestClass())
+        context.add_base_output_path("foo", "base/path")
+        context.add_subcontext("foo", "subcontext")
+
+        full_path = "base/path/TestClass/subcontext"
+        self.assertEqual(context.get_full_output_path("foo"), full_path)
+
+    def test_identifier_not_implemented(self):
+        context = TestContext()
+
+        self.assertRaises(NotImplementedError, lambda: context.identifier)
+
+
+class TestClassContextTest(TestCase):
+    """Unit tests for the TestClassContext class."""
+
+    def test_init_attributes(self):
+        test_class = Mock()
+        context = TestClassContext(test_class)
+
+        self.assertEqual(context.test_class, test_class)
+
+    def test_get_class_name(self):
+        class TestClass:
+            pass
+
+        test_class = TestClass()
+        context = TestClassContext(test_class)
+
+        self.assertEqual(context.test_class_name, TestClass.__name__)
+
+    def test_context_dir_is_class_name(self):
+        class TestClass:
+            pass
+
+        test_class = TestClass()
+        context = TestClassContext(test_class)
+
+        self.assertEqual(context._get_default_context_dir(), TestClass.__name__)
+
+    def test_identifier_is_class_name(self):
+        class TestClass:
+            pass
+
+        test_class = TestClass()
+        context = TestClassContext(test_class)
+
+        self.assertEqual(context.identifier, TestClass.__name__)
+
+
+class TestCaseContextTest(TestCase):
+    """Unit tests for the TestCaseContext class."""
+
+    def test_init_attributes(self):
+        test_class = Mock()
+        test_case = TEST_CASE
+        context = TestCaseContext(test_class, test_case)
+
+        self.assertEqual(context.test_class, test_class)
+        self.assertEqual(context.test_case, test_case)
+        self.assertEqual(context.test_case_name, test_case)
+
+    def test_get_class_name(self):
+        test_class = TestClass()
+        context = TestCaseContext(test_class, TEST_CASE)
+
+        self.assertEqual(context.test_class_name, TestClass.__name__)
+
+    def test_context_dir_is_class_and_test_case_name(self):
+        test_class = TestClass()
+        context = TestCaseContext(test_class, TEST_CASE)
+
+        context_dir = f"{TestClass.__name__}/{TEST_CASE}"
+        self.assertEqual(context._get_default_context_dir(), context_dir)
+
+    def test_identifier_is_class_and_test_case_name(self):
+        test_class = TestClass()
+        context = TestCaseContext(test_class, TEST_CASE)
+
+        identifier = f"{TestClass.__name__}.{TEST_CASE}"
+        self.assertEqual(context.identifier, identifier)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/acts_error_test.py b/packages/antlion/unit_tests/acts_error_test.py
new file mode 100755
index 0000000..2431bd3
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_error_test.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+
+from antlion import error
+
+
+class ActsErrorTest(unittest.TestCase):
+    def test_assert_key_pulled_from_acts_error_code(self):
+        e = error.ActsError()
+        self.assertEqual(e.error_code, 100)
+
+    def test_assert_description_pulled_from_docstring(self):
+        e = error.ActsError()
+        self.assertEqual(e.error_doc, "Base Acts Error")
+
+    def test_error_without_args(self):
+        e = error.ActsError()
+        self.assertEqual(e.details, "")
+
+    def test_error_with_args(self):
+        args = ("hello",)
+        e = error.ActsError(*args)
+        self.assertEqual(e.details, "hello")
+
+    def test_error_with_kwargs(self):
+        e = error.ActsError(key="value")
+        self.assertIn(("key", "value"), e.extras.items())
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/acts_host_utils_test.py b/packages/antlion/unit_tests/acts_host_utils_test.py
new file mode 100755
index 0000000..cc77f4f
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_host_utils_test.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import socket
+import unittest
+
+from antlion.controllers.utils_lib import host_utils
+
+
+class ActsHostUtilsTest(unittest.TestCase):
+    """This test class has unit tests for the implementation of everything
+    under antlion.controllers.adb.
+    """
+
+    def test_detects_udp_port_in_use(self):
+        test_s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        test_s.bind(("localhost", 0))
+        port = test_s.getsockname()[1]
+        try:
+            self.assertFalse(host_utils.is_port_available(port))
+        finally:
+            test_s.close()
+
+    def test_detects_tcp_port_in_use(self):
+        test_s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        test_s.bind(("localhost", 0))
+        port = test_s.getsockname()[1]
+        try:
+            self.assertFalse(host_utils.is_port_available(port))
+        finally:
+            test_s.close()
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/acts_import_unit_test.py b/packages/antlion/unit_tests/acts_import_unit_test.py
new file mode 100755
index 0000000..581f5f8
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_import_unit_test.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import importlib.machinery
+import os
+import re
+import unittest
+import uuid
+
+
+def import_module(name, path):
+    return importlib.machinery.SourceFileLoader(name, path).load_module()
+
+
+def import_acts():
+    return importlib.import_module("antlion")
+
+
+PY_FILE_REGEX = re.compile(".+\.py$")
+
+DENYLIST = [
+    "antlion/controllers/packet_sender.py",
+]
+
+DENYLIST_DIRECTORIES = []
+
+
+class ActsImportUnitTest(unittest.TestCase):
+    """Test that all acts framework imports work."""
+
+    def test_import_acts_successful(self):
+        """Test that importing ACTS works."""
+        acts = import_acts()
+        self.assertIsNotNone(acts)
+
+    # TODO(b/190659975): Re-enable once permission issue is resolved.
+    @unittest.skip("Permission error: b/190659975")
+    def test_import_framework_successful(self):
+        """Dynamically test all imports from the framework."""
+        acts = import_acts()
+        if hasattr(acts, "__path__") and len(antlion.__path__) > 0:
+            acts_path = antlion.__path__[0]
+        else:
+            acts_path = os.path.dirname(antlion.__file__)
+
+        for root, _, files in os.walk(acts_path):
+            for f in files:
+                full_path = os.path.join(root, f)
+                if any(full_path.endswith(e) for e in DENYLIST) or any(
+                    e in full_path for e in DENYLIST_DIRECTORIES
+                ):
+                    continue
+
+                path = os.path.relpath(os.path.join(root, f), os.getcwd())
+
+                if PY_FILE_REGEX.match(full_path):
+                    with self.subTest(msg=f"import {path}"):
+                        fake_module_name = str(uuid.uuid4())
+                        module = import_module(fake_module_name, path)
+                        self.assertIsNotNone(module)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/acts_job_test.py b/packages/antlion/unit_tests/acts_job_test.py
new file mode 100755
index 0000000..9953154
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_job_test.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import unittest
+
+import mock
+
+from antlion.libs.proc import job
+
+if os.name == "posix" and sys.version_info[0] < 3:
+    import subprocess32 as subprocess
+else:
+    import subprocess
+
+
+class FakePopen(object):
+    """A fake version of the object returned from subprocess.Popen()."""
+
+    def __init__(self, stdout=None, stderr=None, returncode=0, will_timeout=False):
+        self.returncode = returncode
+        self._stdout = bytes(stdout, "utf-8") if stdout is not None else bytes()
+        self._stderr = bytes(stderr, "utf-8") if stderr is not None else bytes()
+        self._will_timeout = will_timeout
+
+    def communicate(self, timeout=None):
+        if self._will_timeout:
+            raise subprocess.TimeoutExpired(-1, "Timed out according to test logic")
+        return self._stdout, self._stderr
+
+    def kill(self):
+        pass
+
+    def wait(self):
+        pass
+
+
+class JobTestCases(unittest.TestCase):
+    @mock.patch(
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(stdout="TEST\n"),
+    )
+    def test_run_success(self, popen):
+        """Test running a simple shell command."""
+        result = job.run("echo TEST")
+        self.assertTrue(result.stdout.startswith("TEST"))
+
+    @mock.patch(
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(stderr="TEST\n"),
+    )
+    def test_run_stderr(self, popen):
+        """Test that we can read process stderr."""
+        result = job.run("echo TEST 1>&2")
+        self.assertEqual(len(result.stdout), 0)
+        self.assertTrue(result.stderr.startswith("TEST"))
+        self.assertFalse(result.stdout)
+
+    @mock.patch(
+        "antlion.libs.proc.job.subprocess.Popen", return_value=FakePopen(returncode=1)
+    )
+    def test_run_error(self, popen):
+        """Test that we raise on non-zero exit statuses."""
+        self.assertRaises(job.Error, job.run, "exit 1")
+
+    @mock.patch(
+        "antlion.libs.proc.job.subprocess.Popen", return_value=FakePopen(returncode=1)
+    )
+    def test_run_with_ignored_error(self, popen):
+        """Test that we can ignore exit status on request."""
+        result = job.run("exit 1", ignore_status=True)
+        self.assertEqual(result.exit_status, 1)
+
+    @mock.patch(
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(will_timeout=True),
+    )
+    def test_run_timeout(self, popen):
+        """Test that we correctly implement command timeouts."""
+        self.assertRaises(job.Error, job.run, "sleep 5", timeout_sec=0.1)
+
+    @mock.patch(
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(stdout="TEST\n"),
+    )
+    def test_run_no_shell(self, popen):
+        """Test that we handle running without a wrapping shell."""
+        result = job.run(["echo", "TEST"])
+        self.assertTrue(result.stdout.startswith("TEST"))
+
+    @mock.patch(
+        "antlion.libs.proc.job.subprocess.Popen",
+        return_value=FakePopen(stdout="TEST\n"),
+    )
+    def test_job_env(self, popen):
+        """Test that we can set environment variables correctly."""
+        test_env = {"MYTESTVAR": "20"}
+        result = job.run("printenv", env=test_env.copy())
+        popen.assert_called_once()
+        _, kwargs = popen.call_args
+        self.assertTrue("env" in kwargs)
+        self.assertEqual(kwargs["env"], test_env)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/acts_logger_test.py b/packages/antlion/unit_tests/acts_logger_test.py
new file mode 100755
index 0000000..c6511c3
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_logger_test.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import time
+import unittest
+
+from antlion import logger
+
+
+class ActsLoggerTest(unittest.TestCase):
+    """Verifies code in antlion.logger module."""
+
+    def test_epoch_to_log_line_timestamp(self):
+        os.environ["TZ"] = "US/Pacific"
+        time.tzset()
+        actual_stamp = logger.epoch_to_log_line_timestamp(1469134262116)
+        self.assertEqual("2016-07-21 13:51:02.116", actual_stamp)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/acts_sanity_test_config.json b/packages/antlion/unit_tests/acts_sanity_test_config.json
similarity index 100%
rename from src/antlion/unit_tests/acts_sanity_test_config.json
rename to packages/antlion/unit_tests/acts_sanity_test_config.json
diff --git a/src/antlion/unit_tests/acts_sniffer_test_config.json b/packages/antlion/unit_tests/acts_sniffer_test_config.json
similarity index 100%
rename from src/antlion/unit_tests/acts_sniffer_test_config.json
rename to packages/antlion/unit_tests/acts_sniffer_test_config.json
diff --git a/packages/antlion/unit_tests/acts_utils_test.py b/packages/antlion/unit_tests/acts_utils_test.py
new file mode 100755
index 0000000..33b4679
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_utils_test.py
@@ -0,0 +1,345 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import subprocess
+import unittest
+
+import mock
+
+from antlion import utils
+from antlion.capabilities.ssh import SSHConfig, SSHResult
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.sl4f import SL4F
+from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
+from antlion.controllers.utils_lib.ssh.connection import SshConnection
+from antlion.libs.proc import job
+
+PROVISIONED_STATE_GOOD = 1
+
+MOCK_ENO1_IP_ADDRESSES = """100.127.110.79
+2401:fa00:480:7a00:8d4f:85ff:cc5c:787e
+2401:fa00:480:7a00:459:b993:fcbf:1419
+fe80::c66d:3c75:2cec:1d72"""
+
+MOCK_WLAN1_IP_ADDRESSES = ""
+
+FUCHSIA_INTERFACES = {
+    "id": "1",
+    "result": [
+        {
+            "id": 1,
+            "name": "lo",
+            "ipv4_addresses": [
+                [127, 0, 0, 1],
+            ],
+            "ipv6_addresses": [
+                [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
+            ],
+            "online": True,
+            "mac": [0, 0, 0, 0, 0, 0],
+        },
+        {
+            "id": 2,
+            "name": "eno1",
+            "ipv4_addresses": [
+                [100, 127, 110, 79],
+            ],
+            "ipv6_addresses": [
+                [254, 128, 0, 0, 0, 0, 0, 0, 198, 109, 60, 117, 44, 236, 29, 114],
+                [36, 1, 250, 0, 4, 128, 122, 0, 141, 79, 133, 255, 204, 92, 120, 126],
+                [36, 1, 250, 0, 4, 128, 122, 0, 4, 89, 185, 147, 252, 191, 20, 25],
+            ],
+            "online": True,
+            "mac": [0, 224, 76, 5, 76, 229],
+        },
+        {
+            "id": 3,
+            "name": "wlanxc0",
+            "ipv4_addresses": [],
+            "ipv6_addresses": [
+                [254, 128, 0, 0, 0, 0, 0, 0, 96, 255, 93, 96, 52, 253, 253, 243],
+                [254, 128, 0, 0, 0, 0, 0, 0, 70, 7, 11, 255, 254, 118, 126, 192],
+            ],
+            "online": False,
+            "mac": [68, 7, 11, 118, 126, 192],
+        },
+    ],
+    "error": None,
+}
+
+CORRECT_FULL_IP_LIST = {
+    "ipv4_private": [],
+    "ipv4_public": ["100.127.110.79"],
+    "ipv6_link_local": ["fe80::c66d:3c75:2cec:1d72"],
+    "ipv6_private_local": [],
+    "ipv6_public": [
+        "2401:fa00:480:7a00:8d4f:85ff:cc5c:787e",
+        "2401:fa00:480:7a00:459:b993:fcbf:1419",
+    ],
+}
+
+CORRECT_EMPTY_IP_LIST = {
+    "ipv4_private": [],
+    "ipv4_public": [],
+    "ipv6_link_local": [],
+    "ipv6_private_local": [],
+    "ipv6_public": [],
+}
+
+
+class IpAddressUtilTest(unittest.TestCase):
+    def test_positive_ipv4_normal_address(self):
+        ip_address = "192.168.1.123"
+        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
+
+    def test_positive_ipv4_any_address(self):
+        ip_address = "0.0.0.0"
+        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
+
+    def test_positive_ipv4_broadcast(self):
+        ip_address = "255.255.255.0"
+        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
+
+    def test_negative_ipv4_with_ipv6_address(self):
+        ip_address = "fe80::f693:9fff:fef4:1ac"
+        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
+
+    def test_negative_ipv4_with_invalid_string(self):
+        ip_address = "fdsafdsafdsafdsf"
+        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
+
+    def test_negative_ipv4_with_invalid_number(self):
+        ip_address = "192.168.500.123"
+        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
+
+    def test_positive_ipv6(self):
+        ip_address = "fe80::f693:9fff:fef4:1ac"
+        self.assertTrue(utils.is_valid_ipv6_address(ip_address))
+
+    def test_positive_ipv6_link_local(self):
+        ip_address = "fe80::"
+        self.assertTrue(utils.is_valid_ipv6_address(ip_address))
+
+    def test_negative_ipv6_with_ipv4_address(self):
+        ip_address = "192.168.1.123"
+        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
+
+    def test_negative_ipv6_invalid_characters(self):
+        ip_address = "fe80:jkyr:f693:9fff:fef4:1ac"
+        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
+
+    def test_negative_ipv6_invalid_string(self):
+        ip_address = "fdsafdsafdsafdsf"
+        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
+
+    @mock.patch("antlion.libs.proc.job.run")
+    def test_local_get_interface_ip_addresses_full(self, job_mock):
+        job_mock.side_effect = [
+            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, "utf-8"), encoding="utf-8"),
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(job, "eno1"), CORRECT_FULL_IP_LIST
+        )
+
+    @mock.patch("antlion.libs.proc.job.run")
+    def test_local_get_interface_ip_addresses_empty(self, job_mock):
+        job_mock.side_effect = [
+            job.Result(
+                stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
+            ),
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(job, "wlan1"), CORRECT_EMPTY_IP_LIST
+        )
+
+    @mock.patch("antlion.controllers.utils_lib.ssh.connection.SshConnection.run")
+    def test_ssh_get_interface_ip_addresses_full(self, ssh_mock):
+        ssh_mock.side_effect = [
+            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, "utf-8"), encoding="utf-8"),
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(SshConnection("mock_settings"), "eno1"),
+            CORRECT_FULL_IP_LIST,
+        )
+
+    @mock.patch("antlion.controllers.utils_lib.ssh.connection.SshConnection.run")
+    def test_ssh_get_interface_ip_addresses_empty(self, ssh_mock):
+        ssh_mock.side_effect = [
+            job.Result(
+                stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
+            ),
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(SshConnection("mock_settings"), "wlan1"),
+            CORRECT_EMPTY_IP_LIST,
+        )
+
+    @mock.patch("antlion.controllers.adb.AdbProxy")
+    @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
+    def test_android_get_interface_ip_addresses_full(self, is_bootloader, adb_mock):
+        adb_mock().shell.side_effect = [
+            MOCK_ENO1_IP_ADDRESSES,
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(AndroidDevice(), "eno1"),
+            CORRECT_FULL_IP_LIST,
+        )
+
+    @mock.patch("antlion.controllers.adb.AdbProxy")
+    @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
+    def test_android_get_interface_ip_addresses_empty(self, is_bootloader, adb_mock):
+        adb_mock().shell.side_effect = [
+            MOCK_WLAN1_IP_ADDRESSES,
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(AndroidDevice(), "wlan1"),
+            CORRECT_EMPTY_IP_LIST,
+        )
+
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
+    @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
+    @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
+    @mock.patch(
+        "antlion.controllers.fuchsia_device." "FuchsiaDevice._generate_ssh_config"
+    )
+    @mock.patch(
+        "antlion.controllers."
+        "fuchsia_lib.netstack.netstack_lib."
+        "FuchsiaNetstackLib.netstackListInterfaces"
+    )
+    def test_fuchsia_get_interface_ip_addresses_full(
+        self,
+        list_interfaces_mock,
+        generate_ssh_config_mock,
+        ssh_wait_until_reachable_mock,
+        ssh_run_mock,
+        wait_for_port_mock,
+        ffx_mock,
+        sl4f_mock,
+    ):
+        # Configure the log path which is required by ACTS logger.
+        logging.log_path = "/tmp/unit_test_garbage"
+
+        ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
+        ssh_run_mock.return_value = SSHResult(
+            subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
+        )
+
+        # Don't try to wait for the SL4F server to start; it's not being used.
+        wait_for_port_mock.return_value = None
+
+        sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
+        ssh_wait_until_reachable_mock.return_value = None
+
+        list_interfaces_mock.return_value = FUCHSIA_INTERFACES
+        self.assertEqual(
+            utils.get_interface_ip_addresses(
+                FuchsiaDevice({"ip": "192.168.1.1"}), "eno1"
+            ),
+            CORRECT_FULL_IP_LIST,
+        )
+
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
+    @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
+    @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
+    @mock.patch(
+        "antlion.controllers.fuchsia_device." "FuchsiaDevice._generate_ssh_config"
+    )
+    @mock.patch(
+        "antlion.controllers."
+        "fuchsia_lib.netstack.netstack_lib."
+        "FuchsiaNetstackLib.netstackListInterfaces"
+    )
+    def test_fuchsia_get_interface_ip_addresses_empty(
+        self,
+        list_interfaces_mock,
+        generate_ssh_config_mock,
+        ssh_wait_until_reachable_mock,
+        ssh_run_mock,
+        wait_for_port_mock,
+        ffx_mock,
+        sl4f_mock,
+    ):
+        # Configure the log path which is required by ACTS logger.
+        logging.log_path = "/tmp/unit_test_garbage"
+
+        ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
+        ssh_run_mock.return_value = SSHResult(
+            subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
+        )
+
+        # Don't try to wait for the SL4F server to start; it's not being used.
+        wait_for_port_mock.return_value = None
+        ssh_wait_until_reachable_mock.return_value = None
+        sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
+
+        list_interfaces_mock.return_value = FUCHSIA_INTERFACES
+        self.assertEqual(
+            utils.get_interface_ip_addresses(
+                FuchsiaDevice({"ip": "192.168.1.1"}), "wlan1"
+            ),
+            CORRECT_EMPTY_IP_LIST,
+        )
+
+
+class GetDeviceTest(unittest.TestCase):
+    class TestDevice:
+        def __init__(self, id, device_type=None) -> None:
+            self.id = id
+            if device_type:
+                self.device_type = device_type
+
+    def test_get_device_none(self):
+        devices = []
+        self.assertRaises(ValueError, utils.get_device, devices, "DUT")
+
+    def test_get_device_default_one(self):
+        devices = [self.TestDevice(0)]
+        self.assertEqual(utils.get_device(devices, "DUT").id, 0)
+
+    def test_get_device_default_many(self):
+        devices = [self.TestDevice(0), self.TestDevice(1)]
+        self.assertEqual(utils.get_device(devices, "DUT").id, 0)
+
+    def test_get_device_specified_one(self):
+        devices = [self.TestDevice(0), self.TestDevice(1, "DUT")]
+        self.assertEqual(utils.get_device(devices, "DUT").id, 1)
+
+    def test_get_device_specified_many(self):
+        devices = [self.TestDevice(0, "DUT"), self.TestDevice(1, "DUT")]
+        self.assertRaises(ValueError, utils.get_device, devices, "DUT")
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/__init__.py b/packages/antlion/unit_tests/controllers/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/__init__.py
rename to packages/antlion/unit_tests/controllers/__init__.py
diff --git a/src/antlion/unit_tests/controllers/android_lib/__init__.py b/packages/antlion/unit_tests/controllers/android_lib/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/android_lib/__init__.py
rename to packages/antlion/unit_tests/controllers/android_lib/__init__.py
diff --git a/packages/antlion/unit_tests/controllers/android_lib/logcat_test.py b/packages/antlion/unit_tests/controllers/android_lib/logcat_test.py
new file mode 100644
index 0000000..82259f2
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/android_lib/logcat_test.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import unittest
+
+import mock
+
+from antlion.controllers.android_lib import logcat
+from antlion.controllers.android_lib.logcat import TimestampTracker
+
+BASE_TIMESTAMP = "2000-01-01 12:34:56.789   123 75348 "
+
+
+class LogcatTest(unittest.TestCase):
+    """Tests antlion.controllers.android_lib.logcat"""
+
+    @staticmethod
+    def patch(patched):
+        return mock.patch(f"antlion.controllers.android_lib.logcat.{patched}")
+
+    def setUp(self):
+        self._get_log_level = logcat._get_log_level
+
+    def tearDown(self):
+        logcat._get_log_level = self._get_log_level
+
+    # TimestampTracker
+
+    def test_read_output_sets_last_timestamp_if_found(self):
+        tracker = TimestampTracker()
+        tracker.read_output(f"{BASE_TIMESTAMP}D message")
+
+        self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
+
+    def test_read_output_keeps_last_timestamp_if_no_new_stamp_is_found(self):
+        tracker = TimestampTracker()
+        tracker.read_output(f"{BASE_TIMESTAMP}D message")
+        tracker.read_output("--------- beginning of main")
+
+        self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
+
+    def test_read_output_updates_timestamp_to_first_in_results(self):
+        tracker = TimestampTracker()
+        tracker.read_output(f"{BASE_TIMESTAMP}D 9999-99-99 12:34:56.789")
+
+        self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
+
+    # _get_log_level
+
+    def test_get_log_level_verbose(self):
+        """Tests that Logcat's verbose logs make it to the debug level."""
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}V")
+
+        self.assertEqual(level, logging.DEBUG)
+
+    def test_get_log_level_debug(self):
+        """Tests that Logcat's debug logs make it to the debug level."""
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}D")
+
+        self.assertEqual(level, logging.DEBUG)
+
+    def test_get_log_level_info(self):
+        """Tests that Logcat's info logs make it to the info level."""
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}I")
+
+        self.assertEqual(level, logging.INFO)
+
+    def test_get_log_level_warning(self):
+        """Tests that Logcat's warning logs make it to the warning level."""
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}W")
+
+        self.assertEqual(level, logging.WARNING)
+
+    def test_get_log_level_error(self):
+        """Tests that Logcat's error logs make it to the error level."""
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}E")
+
+        self.assertEqual(level, logging.ERROR)
+
+    def test_get_log_level_markers(self):
+        """Tests that Logcat's marker logs make it to the error level."""
+        level = logcat._get_log_level("--------- beginning of main")
+
+        self.assertEqual(level, logging.ERROR)
+
+    # _log_line_func
+
+    def test_log_line_func_returns_func_that_logs_to_given_logger(self):
+        logcat._get_log_level = lambda message: logging.INFO
+        tracker = mock.Mock()
+        log = mock.Mock()
+        message = "MESSAGE"
+
+        logcat._log_line_func(log, tracker)(message)
+
+        self.assertEqual(log.log.called, True)
+        log.log.assert_called_once_with(logging.INFO, message)
+
+    def test_log_line_func_returns_func_that_updates_the_timestamp(self):
+        logcat._get_log_level = lambda message: logging.INFO
+        tracker = mock.Mock()
+        log = mock.Mock()
+        message = "MESSAGE"
+
+        logcat._log_line_func(log, tracker)(message)
+
+        self.assertEqual(tracker.read_output.called, True)
+        tracker.read_output.assert_called_once_with(message)
+
+    # _on_retry
+
+    def test_on_retry_returns_func_that_formats_with_last_timestamp(self):
+        tracker = TimestampTracker()
+        tracker.read_output(BASE_TIMESTAMP)
+        new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
+
+        self.assertIn(f'-T "{tracker.last_timestamp}"', new_command)
+
+    def test_on_retry_func_returns_string_that_contains_the_given_serial(self):
+        tracker = TimestampTracker()
+        tracker.read_output(BASE_TIMESTAMP)
+        new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
+
+        self.assertTrue("-s S3R14L" in new_command)
+
+    def test_on_retry_func_returns_string_that_contains_any_extra_params(self):
+        tracker = TimestampTracker()
+        tracker.read_output(BASE_TIMESTAMP)
+        new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
+
+        self.assertTrue("extra_params" in new_command)
+
+    # create_logcat_keepalive_process
+
+    def test_create_logcat_keepalive_process_creates_a_new_logger(self):
+        with self.patch("log_stream") as log_stream, self.patch("Process"):
+            logcat.create_logcat_keepalive_process("S3R14L", "dir")
+        self.assertEqual(log_stream.create_logger.call_args[0][0], "adblog_S3R14L")
+        self.assertEqual(log_stream.create_logger.call_args[1]["subcontext"], "dir")
+
+    def test_create_logcat_keepalive_process_creates_a_new_process(self):
+        with self.patch("log_stream"), self.patch("Process") as process:
+            logcat.create_logcat_keepalive_process("S3R14L", "dir")
+
+        self.assertIn("S3R14L", process.call_args[0][0])
+
+    def test_create_logcat_keepalive_process_sets_output_callback(self):
+        with self.patch("log_stream"), self.patch("Process"):
+            process = logcat.create_logcat_keepalive_process("S3R14L", "dir")
+
+        self.assertEqual(process.set_on_output_callback.called, True)
+
+    def test_create_logcat_keepalive_process_sets_on_terminate_callback(self):
+        with self.patch("log_stream"), self.patch("Process"):
+            process = logcat.create_logcat_keepalive_process("S3R14L", "dir")
+
+        self.assertEqual(process.set_on_terminate_callback.called, True)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/android_lib/services_test.py b/packages/antlion/unit_tests/controllers/android_lib/services_test.py
new file mode 100644
index 0000000..d0cd787
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/android_lib/services_test.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from unittest import mock
+
+from antlion.controllers.android_lib import services
+from antlion.controllers.android_lib.events import (
+    AndroidStartServicesEvent,
+    AndroidStopServicesEvent,
+)
+from antlion.event import event_bus
+
+
+class ServicesTest(unittest.TestCase):
+    """Tests antlion.controllers.android_lib.services"""
+
+    # AndroidService
+
+    def test_register_adds_both_start_and_stop_methods(self):
+        """Test that both the _start and _stop methods are registered to
+        their respective events upon calling register().
+        """
+        event_bus._event_bus = event_bus._EventBus()
+        service = services.AndroidService(mock.Mock())
+        service.register()
+        subscriptions = event_bus._event_bus._subscriptions
+        self.assertTrue(
+            any(
+                subscription._func == service._start
+                for subscription in subscriptions[AndroidStartServicesEvent]
+            )
+        )
+        self.assertTrue(
+            any(
+                subscription._func == service._stop
+                for subscription in subscriptions[AndroidStopServicesEvent]
+            )
+        )
+
+    @unittest.mock.patch.object(services.AndroidService, "_start")
+    def test_event_deliver_only_to_matching_serial(self, start_fn):
+        """Test that the service only responds to events that matches its
+        device serial.
+        """
+        event_bus._event_bus = event_bus._EventBus()
+        service = services.AndroidService(mock.Mock())
+        service.ad.serial = "right_serial"
+        service.register()
+
+        wrong_ad = mock.Mock()
+        wrong_ad.serial = "wrong_serial"
+        wrong_event = AndroidStartServicesEvent(wrong_ad)
+        event_bus.post(wrong_event)
+        start_fn.assert_not_called()
+
+        right_ad = mock.Mock()
+        right_ad.serial = "right_serial"
+        right_event = AndroidStartServicesEvent(right_ad)
+        event_bus.post(right_event)
+        start_fn.assert_called_with(right_event)
+
+    def test_unregister_removes_both_start_and_stop_methods(self):
+        """Test that both the _start and _stop methods are unregistered from
+        their respective events upon calling unregister().
+        """
+        event_bus._event_bus = event_bus._EventBus()
+        service = services.AndroidService(mock.Mock())
+        service.register()
+        service.unregister()
+        subscriptions = event_bus._event_bus._subscriptions
+        self.assertFalse(
+            any(
+                subscription._func == service._start
+                for subscription in subscriptions[AndroidStartServicesEvent]
+            )
+        )
+        self.assertFalse(
+            any(
+                subscription._func == service._stop
+                for subscription in subscriptions[AndroidStopServicesEvent]
+            )
+        )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/__init__.py b/packages/antlion/unit_tests/controllers/ap_lib/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/ap_lib/__init__.py
rename to packages/antlion/unit_tests/controllers/ap_lib/__init__.py
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py b/packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
new file mode 100644
index 0000000..c4841b3
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import unittest
+
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
+
+
+class DhcpConfigTest(unittest.TestCase):
+    def setUp(self):
+        super().setUp()
+        # These config files may have long diffs, modify this setting to
+        # ensure they're printed.
+        self.maxDiff = None
+
+    def test_basic_dhcp_config(self):
+        dhcp_conf = DhcpConfig()
+
+        expected_config = "default-lease-time 600;\n" "max-lease-time 7200;"
+
+        self.assertEqual(expected_config, dhcp_conf.render_config_file())
+
+    def test_dhcp_config_with_lease_times(self):
+        default_lease_time = 350
+        max_lease_time = 5000
+        dhcp_conf = DhcpConfig(
+            default_lease_time=default_lease_time, max_lease_time=max_lease_time
+        )
+
+        expected_config = (
+            f"default-lease-time {default_lease_time};\n"
+            f"max-lease-time {max_lease_time};"
+        )
+
+        self.assertEqual(expected_config, dhcp_conf.render_config_file())
+
+    def test_dhcp_config_with_subnets(self):
+        default_lease_time = 150
+        max_lease_time = 3000
+        subnets = [
+            # addresses from 10.10.1.0 - 10.10.1.255
+            Subnet(ipaddress.ip_network("10.10.1.0/24")),
+            # 4 addresses from 10.10.3.0 - 10.10.3.3
+            Subnet(ipaddress.ip_network("10.10.3.0/30")),
+            # 6 addresses from 10.10.5.20 - 10.10.5.25
+            Subnet(
+                ipaddress.ip_network("10.10.5.0/24"),
+                start=ipaddress.ip_address("10.10.5.20"),
+                end=ipaddress.ip_address("10.10.5.25"),
+                router=ipaddress.ip_address("10.10.5.255"),
+                lease_time=60,
+            ),
+        ]
+        dhcp_conf = DhcpConfig(
+            subnets=subnets,
+            default_lease_time=default_lease_time,
+            max_lease_time=max_lease_time,
+        )
+
+        # Unless an explicit start/end address is provided, the second
+        # address in the range is used for "start", and the second to
+        # last address is used for "end".
+        expected_config = (
+            f"default-lease-time {default_lease_time};\n"
+            f"max-lease-time {max_lease_time};\n"
+            "subnet 10.10.1.0 netmask 255.255.255.0 {\n"
+            "\tpool {\n"
+            "\t\toption subnet-mask 255.255.255.0;\n"
+            "\t\toption routers 10.10.1.1;\n"
+            "\t\trange 10.10.1.2 10.10.1.254;\n"
+            "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
+            "\t}\n"
+            "}\n"
+            "subnet 10.10.3.0 netmask 255.255.255.252 {\n"
+            "\tpool {\n"
+            "\t\toption subnet-mask 255.255.255.252;\n"
+            "\t\toption routers 10.10.3.1;\n"
+            "\t\trange 10.10.3.2 10.10.3.2;\n"
+            "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
+            "\t}\n"
+            "}\n"
+            "subnet 10.10.5.0 netmask 255.255.255.0 {\n"
+            "\tpool {\n"
+            "\t\toption subnet-mask 255.255.255.0;\n"
+            "\t\toption routers 10.10.5.255;\n"
+            "\t\trange 10.10.5.20 10.10.5.25;\n"
+            "\t\tdefault-lease-time 60;\n"
+            "\t\tmax-lease-time 60;\n"
+            "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
+            "\t}\n"
+            "}"
+        )
+
+        self.assertEqual(expected_config, dhcp_conf.render_config_file())
+
+    def test_additional_subnet_parameters_and_options(self):
+        default_lease_time = 150
+        max_lease_time = 3000
+        subnets = [
+            Subnet(
+                ipaddress.ip_network("10.10.1.0/24"),
+                additional_parameters={"allow": "unknown-clients", "foo": "bar"},
+                additional_options={"my-option": "some-value"},
+            ),
+        ]
+        dhcp_conf = DhcpConfig(
+            subnets=subnets,
+            default_lease_time=default_lease_time,
+            max_lease_time=max_lease_time,
+        )
+
+        # Unless an explicit start/end address is provided, the second
+        # address in the range is used for "start", and the second to
+        # last address is used for "end".
+        expected_config = (
+            f"default-lease-time {default_lease_time};\n"
+            f"max-lease-time {max_lease_time};\n"
+            "subnet 10.10.1.0 netmask 255.255.255.0 {\n"
+            "\tpool {\n"
+            "\t\toption subnet-mask 255.255.255.0;\n"
+            "\t\toption routers 10.10.1.1;\n"
+            "\t\trange 10.10.1.2 10.10.1.254;\n"
+            "\t\tallow unknown-clients;\n"
+            "\t\tfoo bar;\n"
+            "\t\toption my-option some-value;\n"
+            "\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n"
+            "\t}\n"
+            "}"
+        )
+
+        self.assertEqual(expected_config, dhcp_conf.render_config_file())
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py b/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py
new file mode 100644
index 0000000..239c3fe
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from unittest.mock import Mock
+
+from antlion.controllers.ap_lib import hostapd
+from antlion.libs.proc.job import Result
+
+# MAC address that will be used in these tests.
+STA_MAC = "aa:bb:cc:dd:ee:ff"
+
+# Abbreviated output of hostapd_cli STA commands, showing various AUTH/ASSOC/AUTHORIZED states.
+STA_OUTPUT_WITHOUT_STA_AUTHENTICATED = b"""aa:bb:cc:dd:ee:ff
+flags=[WMM][HT][VHT]"""
+
+STA_OUTPUT_WITH_STA_AUTHENTICATED = b"""aa:bb:cc:dd:ee:ff
+flags=[AUTH][WMM][HT][VHT]"""
+
+STA_OUTPUT_WITH_STA_ASSOCIATED = b"""aa:bb:cc:dd:ee:ff
+flags=[AUTH][ASSOC][WMM][HT][VHT]
+aid=42"""
+
+STA_OUTPUT_WITH_STA_AUTHORIZED = b"""aa:bb:cc:dd:ee:ff
+flags=[AUTH][ASSOC][AUTHORIZED][WMM][HT][VHT]
+aid=42"""
+
+
+class HostapdTest(unittest.TestCase):
+    def test_sta_authenticated_true_for_authenticated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_AUTHENTICATED, exit_status=0
+            )
+        )
+        self.assertTrue(hostapd_mock.sta_authenticated(STA_MAC))
+
+    def test_sta_authenticated_false_for_unauthenticated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(),
+                stdout=STA_OUTPUT_WITHOUT_STA_AUTHENTICATED,
+                exit_status=0,
+            )
+        )
+        self.assertFalse(hostapd_mock.sta_authenticated(STA_MAC))
+
+    def test_sta_associated_true_for_associated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_ASSOCIATED, exit_status=0
+            )
+        )
+        self.assertTrue(hostapd_mock.sta_associated(STA_MAC))
+
+    def test_sta_associated_false_for_unassociated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        # Uses the authenticated-only CLI output.
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_AUTHENTICATED, exit_status=0
+            )
+        )
+        self.assertFalse(hostapd_mock.sta_associated(STA_MAC))
+
+    def test_sta_authorized_true_for_authorized_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_AUTHORIZED, exit_status=0
+            )
+        )
+        self.assertTrue(hostapd_mock.sta_authorized(STA_MAC))
+
+    def test_sta_associated_false_for_unassociated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        # Uses the associated-only CLI output.
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_ASSOCIATED, exit_status=0
+            )
+        )
+        self.assertFalse(hostapd_mock.sta_authorized(STA_MAC))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py b/packages/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
new file mode 100644
index 0000000..17ee536
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from antlion.controllers.ap_lib.radio_measurement import (
+    BssidInformation,
+    BssidInformationCapabilities,
+    NeighborReportElement,
+    PhyType,
+)
+
+EXPECTED_BSSID = "01:23:45:ab:cd:ef"
+EXPECTED_BSSID_INFO_CAP = BssidInformationCapabilities(
+    spectrum_management=True, qos=True, apsd=True, radio_measurement=True
+)
+EXPECTED_OP_CLASS = 81
+EXPECTED_CHAN = 11
+EXPECTED_PHY = PhyType.HT
+EXPECTED_BSSID_INFO = BssidInformation(
+    capabilities=EXPECTED_BSSID_INFO_CAP, high_throughput=True
+)
+
+
+class RadioMeasurementTest(unittest.TestCase):
+    def test_bssid_information_capabilities(self):
+        self.assertTrue(EXPECTED_BSSID_INFO_CAP.spectrum_management)
+        self.assertTrue(EXPECTED_BSSID_INFO_CAP.qos)
+        self.assertTrue(EXPECTED_BSSID_INFO_CAP.apsd)
+        self.assertTrue(EXPECTED_BSSID_INFO_CAP.radio_measurement)
+        # Must also test the numeric representation.
+        self.assertEqual(int(EXPECTED_BSSID_INFO_CAP), 0b111100)
+
+    def test_bssid_information(self):
+        self.assertEqual(EXPECTED_BSSID_INFO.capabilities, EXPECTED_BSSID_INFO_CAP)
+        self.assertEqual(EXPECTED_BSSID_INFO.high_throughput, True)
+        # Must also test the numeric representation.
+        self.assertEqual(int(EXPECTED_BSSID_INFO), 0b10001111000100000000000000000000)
+
+    def test_neighbor_report_element(self):
+        element = NeighborReportElement(
+            bssid=EXPECTED_BSSID,
+            bssid_information=EXPECTED_BSSID_INFO,
+            operating_class=EXPECTED_OP_CLASS,
+            channel_number=EXPECTED_CHAN,
+            phy_type=EXPECTED_PHY,
+        )
+        self.assertEqual(element.bssid, EXPECTED_BSSID)
+        self.assertEqual(element.bssid_information, EXPECTED_BSSID_INFO)
+        self.assertEqual(element.operating_class, EXPECTED_OP_CLASS)
+        self.assertEqual(element.channel_number, EXPECTED_CHAN)
+        self.assertEqual(element.phy_type, EXPECTED_PHY)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py b/packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py
new file mode 100644
index 0000000..49324d9
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py
@@ -0,0 +1,221 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import unittest
+from unittest.mock import patch
+
+from antlion.controllers.ap_lib import radvd_constants
+from antlion.controllers.ap_lib.radvd import Error, Radvd
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+
+SEARCH_FILE = "antlion.controllers.utils_lib.commands.shell." "ShellCommand.search_file"
+DELETE_FILE = "antlion.controllers.utils_lib.commands.shell.ShellCommand." "delete_file"
+
+CORRECT_COMPLEX_RADVD_CONFIG = """interface wlan0 {
+    IgnoreIfMissing on;
+    AdvSendAdvert off;
+    UnicastOnly on;
+    MaxRtrAdvInterval 60;
+    MinRtrAdvInterval 5;
+    MinDelayBetweenRAs 5;
+    AdvManagedFlag off;
+    AdvOtherConfigFlag on;
+    AdvLinkMTU 1400;
+    AdvReachableTime 3600000;
+    AdvRetransTimer 10;
+    AdvCurHopLimit 50;
+    AdvDefaultLifetime 8000;
+    AdvDefaultPreference off;
+    AdvSourceLLAddress on;
+    AdvHomeAgentFlag off;
+    AdvHomeAgentInfo on;
+    HomeAgentLifetime 100;
+    HomeAgentPreference 100;
+    AdvMobRtrSupportFlag off;
+    AdvIntervalOpt on;
+    prefix fd00::/64
+    {
+        AdvOnLink off;
+        AdvAutonomous on;
+        AdvRouterAddr off;
+        AdvValidLifetime 86400;
+        AdvPreferredLifetime 14400;
+        Base6to4Interface NA;
+    };
+    clients
+    {
+        fe80::c66d:3c75:2cec:1d72;
+        fe80::c66d:3c75:2cec:1d73;
+    };
+    route fd00::/64 {
+        AdvRouteLifetime 1024;
+        AdvRoutePreference high;
+    };
+    RDNSS 2401:fa00:480:7a00:4d56:5373:4549:1e29 2401:fa00:480:7a00:4d56:5373:4549:1e30 {
+        AdvRDNSSPreference 8;
+        AdvRDNSSOpen on;
+        AdvRDNSSLifetime 1025;
+    };
+};""".replace(
+    "    ", "\t"
+)
+
+CORRECT_SIMPLE_RADVD_CONFIG = """interface wlan0 {
+    AdvSendAdvert on;
+    prefix fd00::/64
+    {
+        AdvOnLink on;
+        AdvAutonomous on;
+    };
+};""".replace(
+    "    ", "\t"
+)
+
+
+def delete_file_mock(file_to_delete):
+    if os.path.exists(file_to_delete):
+        os.remove(file_to_delete)
+
+
+def write_configs_mock(config_file_with_path, output_config):
+    with open(config_file_with_path, "w+") as config_fileId:
+        config_fileId.write(output_config)
+
+
+class RadvdTest(unittest.TestCase):
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.kill")
+    def test_radvd_ikill(self, kill):
+        kill.return_value = True
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        self.assertIsNone(radvd_mock.stop())
+
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive")
+    def test_radvd_is_alive_True(self, is_alive_mock):
+        is_alive_mock.return_value = True
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        self.assertTrue(radvd_mock.is_alive())
+
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive")
+    def test_radvd_is_alive_False(self, is_alive_mock):
+        is_alive_mock.return_value = False
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        self.assertFalse(radvd_mock.is_alive())
+
+    @patch("antlion.controllers.ap_lib.radvd.Radvd._scan_for_errors")
+    @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
+    def test_wait_for_process_process_alive(self, is_alive_mock, _scan_for_errors_mock):
+        is_alive_mock.return_value = True
+        _scan_for_errors_mock.return_value = True
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        self.assertIsNone(radvd_mock._wait_for_process(timeout=2))
+
+    @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
+    @patch(SEARCH_FILE)
+    def test_scan_for_errors_is_dead(self, search_file_mock, is_alive_mock):
+        is_alive_mock.return_value = False
+        search_file_mock.return_value = False
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        with self.assertRaises(Error) as context:
+            radvd_mock._scan_for_errors(True)
+        self.assertTrue("Radvd failed to start" in str(context.exception))
+
+    @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
+    @patch(SEARCH_FILE)
+    def test_scan_for_errors_exited_prematurely(self, search_file_mock, is_alive_mock):
+        is_alive_mock.return_value = True
+        search_file_mock.return_value = True
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        with self.assertRaises(Error) as context:
+            radvd_mock._scan_for_errors(True)
+        self.assertTrue("Radvd exited prematurely." in str(context.exception))
+
+    @patch("antlion.controllers.ap_lib.radvd.Radvd.is_alive")
+    @patch(SEARCH_FILE)
+    def test_scan_for_errors_success(self, search_file_mock, is_alive_mock):
+        is_alive_mock.return_value = True
+        search_file_mock.return_value = False
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        self.assertIsNone(radvd_mock._scan_for_errors(True))
+
+    @patch(DELETE_FILE)
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file")
+    def test_write_configs_simple(self, write_file, delete_file):
+        delete_file.side_effect = delete_file_mock
+        write_file.side_effect = write_configs_mock
+        basic_radvd_config = RadvdConfig()
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        radvd_mock._write_configs(basic_radvd_config)
+        radvd_config = radvd_mock._config_file
+        with open(radvd_config, "r") as radvd_config_fileId:
+            config_data = radvd_config_fileId.read()
+            self.assertTrue(CORRECT_SIMPLE_RADVD_CONFIG == config_data)
+
+    @patch(DELETE_FILE)
+    @patch("antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file")
+    def test_write_configs_complex(self, write_file, delete_file):
+        delete_file.side_effect = delete_file_mock
+        write_file.side_effect = write_configs_mock
+        complex_radvd_config = RadvdConfig(
+            clients=["fe80::c66d:3c75:2cec:1d72", "fe80::c66d:3c75:2cec:1d73"],
+            route=radvd_constants.DEFAULT_PREFIX,
+            rdnss=[
+                "2401:fa00:480:7a00:4d56:5373:4549:1e29",
+                "2401:fa00:480:7a00:4d56:5373:4549:1e30",
+            ],
+            ignore_if_missing=radvd_constants.IGNORE_IF_MISSING_ON,
+            adv_send_advert=radvd_constants.ADV_SEND_ADVERT_OFF,
+            unicast_only=radvd_constants.UNICAST_ONLY_ON,
+            max_rtr_adv_interval=60,
+            min_rtr_adv_interval=5,
+            min_delay_between_ras=5,
+            adv_managed_flag=radvd_constants.ADV_MANAGED_FLAG_OFF,
+            adv_other_config_flag=radvd_constants.ADV_OTHER_CONFIG_FLAG_ON,
+            adv_link_mtu=1400,
+            adv_reachable_time=3600000,
+            adv_retrans_timer=10,
+            adv_cur_hop_limit=50,
+            adv_default_lifetime=8000,
+            adv_default_preference=radvd_constants.ADV_DEFAULT_PREFERENCE_OFF,
+            adv_source_ll_address=radvd_constants.ADV_SOURCE_LL_ADDRESS_ON,
+            adv_home_agent_flag=radvd_constants.ADV_HOME_AGENT_FLAG_OFF,
+            adv_home_agent_info=radvd_constants.ADV_HOME_AGENT_INFO_ON,
+            home_agent_lifetime=100,
+            home_agent_preference=100,
+            adv_mob_rtr_support_flag=radvd_constants.ADV_MOB_RTR_SUPPORT_FLAG_OFF,
+            adv_interval_opt=radvd_constants.ADV_INTERVAL_OPT_ON,
+            adv_on_link=radvd_constants.ADV_ON_LINK_OFF,
+            adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON,
+            adv_router_addr=radvd_constants.ADV_ROUTER_ADDR_OFF,
+            adv_valid_lifetime=86400,
+            adv_preferred_lifetime=14400,
+            base_6to4_interface="NA",
+            adv_route_lifetime=1024,
+            adv_route_preference=radvd_constants.ADV_ROUTE_PREFERENCE_HIGH,
+            adv_rdnss_preference=8,
+            adv_rdnss_open=radvd_constants.ADV_RDNSS_OPEN_ON,
+            adv_rdnss_lifetime=1025,
+        )
+        radvd_mock = Radvd("mock_runner", "wlan0")
+        radvd_mock._write_configs(complex_radvd_config)
+        radvd_config = radvd_mock._config_file
+        with open(radvd_config, "r") as radvd_config_fileId:
+            config_data = radvd_config_fileId.read()
+            self.assertTrue(CORRECT_COMPLEX_RADVD_CONFIG == config_data)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py b/packages/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
new file mode 100644
index 0000000..0994a35
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from antlion.controllers.ap_lib.radio_measurement import (
+    BssidInformation,
+    NeighborReportElement,
+    PhyType,
+)
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionCandidateList,
+    BssTransitionManagementRequest,
+)
+
+EXPECTED_NEIGHBOR_1 = NeighborReportElement(
+    bssid="01:23:45:ab:cd:ef",
+    bssid_information=BssidInformation(),
+    operating_class=81,
+    channel_number=1,
+    phy_type=PhyType.HT,
+)
+EXPECTED_NEIGHBOR_2 = NeighborReportElement(
+    bssid="cd:ef:ab:45:67:89",
+    bssid_information=BssidInformation(),
+    operating_class=121,
+    channel_number=149,
+    phy_type=PhyType.VHT,
+)
+EXPECTED_NEIGHBORS = [EXPECTED_NEIGHBOR_1, EXPECTED_NEIGHBOR_2]
+EXPECTED_CANDIDATE_LIST = BssTransitionCandidateList(EXPECTED_NEIGHBORS)
+
+
+class WirelessNetworkManagementTest(unittest.TestCase):
+    def test_bss_transition_management_request(self):
+        request = BssTransitionManagementRequest(
+            disassociation_imminent=True,
+            abridged=True,
+            candidate_list=EXPECTED_NEIGHBORS,
+        )
+        self.assertTrue(request.disassociation_imminent)
+        self.assertTrue(request.abridged)
+        self.assertIn(EXPECTED_NEIGHBOR_1, request.candidate_list)
+        self.assertIn(EXPECTED_NEIGHBOR_2, request.candidate_list)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/iperf_client_test.py b/packages/antlion/unit_tests/controllers/iperf_client_test.py
new file mode 100644
index 0000000..f387cf9
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/iperf_client_test.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import os
+import unittest
+
+import mock
+
+from antlion.capabilities.ssh import SSHConfig, SSHProvider
+from antlion.controllers import iperf_client
+from antlion.controllers.iperf_client import (
+    IPerfClient,
+    IPerfClientBase,
+    IPerfClientOverAdb,
+    IPerfClientOverSsh,
+)
+
+# The position in the call tuple that represents the args array.
+ARGS = 0
+
+# The position in the call tuple that represents the kwargs dict.
+KWARGS = 1
+
+
+class IPerfClientModuleTest(unittest.TestCase):
+    """Tests the antlion.controllers.iperf_client module functions."""
+
+    def test_create_can_create_client_over_adb(self):
+        self.assertIsInstance(
+            iperf_client.create([{"AndroidDevice": "foo"}])[0],
+            IPerfClientOverAdb,
+            "Unable to create IPerfClientOverAdb from create().",
+        )
+
+    @mock.patch("subprocess.run")
+    @mock.patch("socket.create_connection")
+    def test_create_can_create_client_over_ssh(
+        self, mock_socket_create_connection, mock_subprocess_run
+    ):
+        self.assertIsInstance(
+            iperf_client.create(
+                [
+                    {
+                        "ssh_config": {
+                            "user": "root",
+                            "host": "192.168.42.11",
+                            "identity_file": "/dev/null",
+                        }
+                    }
+                ]
+            )[0],
+            IPerfClientOverSsh,
+            "Unable to create IPerfClientOverSsh from create().",
+        )
+
+    def test_create_can_create_local_client(self):
+        self.assertIsInstance(
+            iperf_client.create([{}])[0],
+            IPerfClient,
+            "Unable to create IPerfClient from create().",
+        )
+
+
+class IPerfClientBaseTest(unittest.TestCase):
+    """Tests antlion.controllers.iperf_client.IPerfClientBase."""
+
+    @mock.patch("os.makedirs")
+    def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
+        # Will never actually be created/used.
+        logging.log_path = "/tmp/unit_test_garbage"
+
+        full_file_path = IPerfClientBase._get_full_file_path(0)
+
+        self.assertTrue(mock_makedirs.called, "Did not attempt to create a directory.")
+        self.assertEqual(
+            os.path.dirname(full_file_path),
+            mock_makedirs.call_args[ARGS][0],
+            "The parent directory of the full file path was not created.",
+        )
+
+
+class IPerfClientTest(unittest.TestCase):
+    """Tests antlion.controllers.iperf_client.IPerfClient."""
+
+    @mock.patch("builtins.open")
+    @mock.patch("subprocess.call")
+    def test_start_writes_to_full_file_path(self, mock_call, mock_open):
+        client = IPerfClient()
+        file_path = "/path/to/foo"
+        client._get_full_file_path = lambda _: file_path
+
+        client.start("127.0.0.1", "IPERF_ARGS", "TAG")
+
+        mock_open.assert_called_with(file_path, "w")
+        self.assertEqual(
+            mock_call.call_args[KWARGS]["stdout"],
+            mock_open().__enter__.return_value,
+            "IPerfClient did not write the logs to the expected file.",
+        )
+
+
+class IPerfClientOverSshTest(unittest.TestCase):
+    """Test antlion.controllers.iperf_client.IPerfClientOverSshTest."""
+
+    @mock.patch("socket.create_connection")
+    @mock.patch("subprocess.run")
+    @mock.patch("builtins.open")
+    def test_start_writes_output_to_full_file_path(
+        self, mock_open, mock_subprocess_run, mock_socket_create_connection
+    ):
+        ssh_provider = SSHProvider(
+            SSHConfig(
+                user="root",
+                host_name="192.168.42.11",
+                identity_file="/dev/null",
+            )
+        )
+        client = IPerfClientOverSsh(ssh_provider)
+        file_path = "/path/to/foo"
+        client._get_full_file_path = lambda _: file_path
+        client.start("127.0.0.1", "IPERF_ARGS", "TAG")
+        mock_open.assert_called_with(file_path, "w")
+        mock_open().__enter__().write.assert_called()
+
+
+class IPerfClientOverAdbTest(unittest.TestCase):
+    """Test antlion.controllers.iperf_client.IPerfClientOverAdb."""
+
+    @mock.patch("builtins.open")
+    def test_start_writes_output_to_full_file_path(self, mock_open):
+        client = IPerfClientOverAdb(None)
+        file_path = "/path/to/foo"
+        client._get_full_file_path = lambda _: file_path
+
+        with mock.patch(
+            "antlion.controllers.iperf_client." "IPerfClientOverAdb._android_device"
+        ) as adb_device:
+            adb_device.adb.shell.return_value = "output"
+            client.start("127.0.0.1", "IPERF_ARGS", "TAG")
+
+        mock_open.assert_called_with(file_path, "w")
+        mock_open().__enter__().write.assert_called_with("output")
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/iperf_server_test.py b/packages/antlion/unit_tests/controllers/iperf_server_test.py
new file mode 100644
index 0000000..75b8a5e
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/iperf_server_test.py
@@ -0,0 +1,385 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import os
+import unittest
+
+import mock
+
+from antlion.controllers import iperf_server
+from antlion.controllers.iperf_server import (
+    IPerfServer,
+    IPerfServerOverAdb,
+    IPerfServerOverSsh,
+)
+from antlion.controllers.utils_lib.ssh import settings
+
+# The position in the call tuple that represents the args array.
+ARGS = 0
+
+# The position in the call tuple that represents the kwargs dict.
+KWARGS = 1
+
+MOCK_LOGFILE_PATH = "/path/to/foo"
+
+
+class IPerfServerModuleTest(unittest.TestCase):
+    """Tests the antlion.controllers.iperf_server module."""
+
+    def test_create_creates_local_iperf_server_with_int(self):
+        self.assertIsInstance(
+            iperf_server.create([12345])[0],
+            IPerfServer,
+            "create() failed to create IPerfServer for integer input.",
+        )
+
+    def test_create_creates_local_iperf_server_with_str(self):
+        self.assertIsInstance(
+            iperf_server.create(["12345"])[0],
+            IPerfServer,
+            "create() failed to create IPerfServer for integer input.",
+        )
+
+    def test_create_cannot_create_local_iperf_server_with_bad_str(self):
+        with self.assertRaises(ValueError):
+            iperf_server.create(["12345BAD_STRING"])
+
+    @mock.patch("antlion.controllers.iperf_server.utils")
+    def test_create_creates_server_over_ssh_with_ssh_config_and_port(self, _):
+        self.assertIsInstance(
+            iperf_server.create(
+                [
+                    {
+                        "ssh_config": {
+                            "user": "",
+                            "host": "",
+                            "identity_file": "/dev/null",
+                        },
+                        "port": "",
+                    }
+                ]
+            )[0],
+            IPerfServerOverSsh,
+            "create() failed to create IPerfServerOverSsh for a valid config.",
+        )
+
+    def test_create_creates_server_over_adb_with_proper_config(self):
+        self.assertIsInstance(
+            iperf_server.create([{"AndroidDevice": "53R147", "port": 0}])[0],
+            IPerfServerOverAdb,
+            "create() failed to create IPerfServerOverAdb for a valid config.",
+        )
+
+    def test_create_raises_value_error_on_bad_config_dict(self):
+        with self.assertRaises(ValueError):
+            iperf_server.create([{"AndroidDevice": "53R147", "ssh_config": {}}])
+
+    def test_get_port_from_ss_output_returns_correct_port_ipv4(self):
+        ss_output = (
+            "tcp LISTEN  0 5 127.0.0.1:<PORT>  *:*" ' users:(("cmd",pid=<PID>,fd=3))'
+        )
+        self.assertEqual(
+            iperf_server._get_port_from_ss_output(ss_output, "<PID>"), "<PORT>"
+        )
+
+    def test_get_port_from_ss_output_returns_correct_port_ipv6(self):
+        ss_output = (
+            "tcp LISTEN  0 5 ff:ff:ff:ff:ff:ff:<PORT>  *:*"
+            ' users:(("cmd",pid=<PID>,fd=3))'
+        )
+        self.assertEqual(
+            iperf_server._get_port_from_ss_output(ss_output, "<PID>"), "<PORT>"
+        )
+
+
+class IPerfServerBaseTest(unittest.TestCase):
+    """Tests antlion.controllers.iperf_server.IPerfServerBase."""
+
+    @mock.patch("os.makedirs")
+    def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
+        # Will never actually be created/used.
+        logging.log_path = "/tmp/unit_test_garbage"
+
+        server = IPerfServer("port")
+
+        full_file_path = server._get_full_file_path()
+
+        self.assertTrue(mock_makedirs.called, "Did not attempt to create a directory.")
+        self.assertEqual(
+            os.path.dirname(full_file_path),
+            mock_makedirs.call_args[ARGS][0],
+            "The parent directory of the full file path was not created.",
+        )
+
+
+class IPerfServerTest(unittest.TestCase):
+    """Tests antlion.controllers.iperf_server.IPerfServer."""
+
+    PID = 123456
+
+    def setUp(self):
+        iperf_server._get_port_from_ss_output = lambda *_: IPerfServerTest.PID
+
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    @mock.patch("antlion.controllers.iperf_server.job")
+    def test_start_makes_started_true(self, mock_job, __, ___):
+        """Tests calling start() without calling stop() makes started True."""
+        server = IPerfServer("port")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server.start()
+
+        self.assertTrue(server.started)
+
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    @mock.patch("antlion.controllers.iperf_server.job")
+    def test_start_stop_makes_started_false(self, _, __, ___):
+        """Tests calling start() without calling stop() makes started True."""
+        server = IPerfServer("port")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+
+        server.start()
+        server.stop()
+
+        self.assertFalse(server.started)
+
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    @mock.patch("antlion.controllers.iperf_server.job")
+    def test_start_sets_current_log_file(self, _, __, ___):
+        server = IPerfServer("port")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+
+        server.start()
+
+        self.assertEqual(
+            server._current_log_file,
+            MOCK_LOGFILE_PATH,
+            "The _current_log_file was not received from _get_full_file_path.",
+        )
+
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    def test_stop_returns_current_log_file(self, _, __):
+        server = IPerfServer("port")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._current_log_file = MOCK_LOGFILE_PATH
+        server._iperf_process = mock.Mock()
+
+        log_file = server.stop()
+
+        self.assertEqual(
+            log_file,
+            MOCK_LOGFILE_PATH,
+            "The _current_log_file was not returned by stop().",
+        )
+
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.subprocess")
+    @mock.patch("antlion.controllers.iperf_server.job")
+    def test_start_does_not_run_two_concurrent_processes(self, start_proc, _, __):
+        server = IPerfServer("port")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._iperf_process = mock.Mock()
+
+        server.start()
+
+        self.assertFalse(
+            start_proc.called,
+            "start() should not begin a second process if another is running.",
+        )
+
+    @mock.patch("antlion.utils.stop_standing_subprocess")
+    def test_stop_exits_early_if_no_process_has_started(self, stop_proc):
+        server = IPerfServer("port")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._iperf_process = None
+
+        server.stop()
+
+        self.assertFalse(
+            stop_proc.called,
+            "stop() should not kill a process if no process is running.",
+        )
+
+
+class IPerfServerOverSshTest(unittest.TestCase):
+    """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
+
+    INIT_ARGS = [
+        settings.from_config(
+            {"host": "TEST_HOST", "user": "test", "identity_file": "/dev/null"}
+        ),
+        "PORT",
+    ]
+
+    @mock.patch("antlion.controllers.iperf_server.connection")
+    def test_start_makes_started_true(self, _):
+        """Tests calling start() without calling stop() makes started True."""
+        server = IPerfServerOverSsh(*self.INIT_ARGS)
+        server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+
+        server.start()
+
+        self.assertTrue(server.started)
+
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.connection")
+    def test_start_stop_makes_started_false(self, _, __):
+        """Tests calling start() without calling stop() makes started True."""
+        server = IPerfServerOverSsh(*self.INIT_ARGS)
+        server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+
+        server.start()
+        server.stop()
+
+        self.assertFalse(server.started)
+
+    @mock.patch("builtins.open")
+    @mock.patch("antlion.controllers.iperf_server.connection")
+    def test_stop_returns_expected_log_file(self, _, __):
+        server = IPerfServerOverSsh(*self.INIT_ARGS)
+        server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._iperf_pid = mock.Mock()
+
+        log_file = server.stop()
+
+        self.assertEqual(
+            log_file,
+            MOCK_LOGFILE_PATH,
+            "The expected log file was not returned by stop().",
+        )
+
+    @mock.patch("antlion.controllers.iperf_server.connection")
+    def test_start_does_not_run_two_concurrent_processes(self, _):
+        server = IPerfServerOverSsh(*self.INIT_ARGS)
+        server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._iperf_pid = mock.Mock()
+
+        server.start()
+
+        self.assertFalse(
+            server._ssh_session.run_async.called,
+            "start() should not begin a second process if another is running.",
+        )
+
+    @mock.patch("antlion.utils.stop_standing_subprocess")
+    @mock.patch("antlion.controllers.iperf_server.connection")
+    def test_stop_exits_early_if_no_process_has_started(self, _, __):
+        server = IPerfServerOverSsh(*self.INIT_ARGS)
+        server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._iperf_pid = None
+
+        server.stop()
+
+        self.assertFalse(
+            server._ssh_session.run_async.called,
+            "stop() should not kill a process if no process is running.",
+        )
+
+
+class IPerfServerOverAdbTest(unittest.TestCase):
+    """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
+
+    ANDROID_DEVICE_PROP = (
+        "antlion.controllers.iperf_server." "IPerfServerOverAdb._android_device"
+    )
+
+    @mock.patch(ANDROID_DEVICE_PROP)
+    def test_start_makes_started_true(self, mock_ad):
+        """Tests calling start() without calling stop() makes started True."""
+        server = IPerfServerOverAdb("53R147", "PORT")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        mock_ad.adb.shell.return_value = "<PID>"
+
+        server.start()
+
+        self.assertTrue(server.started)
+
+    @mock.patch("antlion.libs.proc.job.run")
+    @mock.patch("builtins.open")
+    @mock.patch(ANDROID_DEVICE_PROP)
+    def test_start_stop_makes_started_false(self, mock_ad, _, __):
+        """Tests calling start() without calling stop() makes started True."""
+        server = IPerfServerOverAdb("53R147", "PORT")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        mock_ad.adb.shell.side_effect = ["<PID>", "", "", ""]
+
+        server.start()
+        server.stop()
+
+        self.assertFalse(server.started)
+
+    @mock.patch("antlion.libs.proc.job.run")
+    @mock.patch("builtins.open")
+    @mock.patch(ANDROID_DEVICE_PROP)
+    def test_stop_returns_expected_log_file(self, mock_ad, _, __):
+        server = IPerfServerOverAdb("53R147", "PORT")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._iperf_process = mock.Mock()
+        server._iperf_process_adb_pid = "<PID>"
+        mock_ad.adb.shell.side_effect = ["", "", ""]
+
+        log_file = server.stop()
+
+        self.assertEqual(
+            log_file,
+            MOCK_LOGFILE_PATH,
+            "The expected log file was not returned by stop().",
+        )
+
+    @mock.patch(ANDROID_DEVICE_PROP)
+    def test_start_does_not_run_two_concurrent_processes(self, android_device):
+        server = IPerfServerOverAdb("53R147", "PORT")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._iperf_process = mock.Mock()
+
+        server.start()
+
+        self.assertFalse(
+            android_device.adb.shell_nb.called,
+            "start() should not begin a second process if another is running.",
+        )
+
+    @mock.patch("antlion.libs.proc.job.run")
+    @mock.patch("builtins.open")
+    @mock.patch(ANDROID_DEVICE_PROP)
+    def test_stop_exits_early_if_no_process_has_started(self, android_device, _, __):
+        server = IPerfServerOverAdb("53R147", "PORT")
+        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
+        server._iperf_pid = None
+
+        server.stop()
+
+        self.assertFalse(
+            android_device.adb.shell_nb.called,
+            "stop() should not kill a process if no process is running.",
+        )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/pdu_lib/__init__.py b/packages/antlion/unit_tests/controllers/pdu_lib/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/pdu_lib/__init__.py
rename to packages/antlion/unit_tests/controllers/pdu_lib/__init__.py
diff --git a/src/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py b/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py
rename to packages/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py
diff --git a/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py b/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
new file mode 100644
index 0000000..b852fe0
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
@@ -0,0 +1,121 @@
+#!/usr/bin python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Python unittest module for pdu_lib.synaccess.np02b"""
+
+import unittest
+from unittest.mock import patch
+
+from antlion.controllers.pdu import PduError
+from antlion.controllers.pdu_lib.synaccess.np02b import PduDevice, _TNHelperNP02B
+
+# Test Constants
+HOST = "192.168.1.2"
+VALID_COMMAND_STR = "cmd"
+VALID_COMMAND_BYTE_STR = b"cmd\n\r"
+VALID_RESPONSE_STR = ""
+VALID_RESPONSE_BYTE_STR = b"\n\r\r\n\r\n"
+STATUS_COMMAND_STR = "pshow"
+STATUS_COMMAND_BYTE_STR = b"pshow\n\r"
+STATUS_RESPONSE_STR = (
+    "Port | Name    |Status   1 |    Outlet1 |   OFF|   2 |    Outlet2 |   ON |"
+)
+STATUS_RESPONSE_BYTE_STR = (
+    b"Port | Name    |Status   1 |    Outlet1 |   OFF|   2 |    Outlet2 |   "
+    b"ON |\n\r\r\n\r\n"
+)
+INVALID_COMMAND_OUTPUT_BYTE_STR = b"Invalid Command\n\r\r\n\r\n>"
+VALID_STATUS_DICT = {"1": False, "2": True}
+INVALID_STATUS_DICT = {"1": False, "2": False}
+
+
+class _TNHelperNP02BTest(unittest.TestCase):
+    """Unit tests for _TNHelperNP02B."""
+
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
+    def test_cmd_is_properly_written(self, telnetlib_mock, sleep_mock):
+        """cmd should strip whitespace and encode in ASCII."""
+        tnhelper = _TNHelperNP02B(HOST)
+        telnetlib_mock.Telnet().read_until.return_value = VALID_RESPONSE_BYTE_STR
+        tnhelper.cmd(VALID_COMMAND_STR)
+        telnetlib_mock.Telnet().write.assert_called_with(VALID_COMMAND_BYTE_STR)
+
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
+    def test_cmd_valid_command_output_is_properly_parsed(
+        self, telnetlib_mock, sleep_mock
+    ):
+        """cmd should strip the prompt, separators and command from the
+        output."""
+        tnhelper = _TNHelperNP02B(HOST)
+        telnetlib_mock.Telnet().read_until.return_value = VALID_RESPONSE_BYTE_STR
+        res = tnhelper.cmd(VALID_COMMAND_STR)
+        self.assertEqual(res, VALID_RESPONSE_STR)
+
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
+    def test_cmd_status_output_is_properly_parsed(self, telnetlib_mock, sleep_mock):
+        """cmd should strip the prompt, separators and command from the output,
+        returning just the status information."""
+        tnhelper = _TNHelperNP02B(HOST)
+        telnetlib_mock.Telnet().read_until.return_value = STATUS_RESPONSE_BYTE_STR
+        res = tnhelper.cmd(STATUS_COMMAND_STR)
+        self.assertEqual(res, STATUS_RESPONSE_STR)
+
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.telnetlib")
+    def test_cmd_invalid_command_raises_error(self, telnetlib_mock, sleep_mock):
+        """cmd should raise PduError when an invalid command is given."""
+        tnhelper = _TNHelperNP02B(HOST)
+        telnetlib_mock.Telnet().read_until.return_value = (
+            INVALID_COMMAND_OUTPUT_BYTE_STR
+        )
+        with self.assertRaises(PduError):
+            tnhelper.cmd("Some invalid command.")
+
+
+class NP02BPduDeviceTest(unittest.TestCase):
+    """Unit tests for NP02B PduDevice implementation."""
+
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
+    def test_status_parses_output_to_valid_dictionary(self, tnhelper_cmd_mock):
+        """status should parse helper response correctly into dict."""
+        np02b = PduDevice(HOST, None, None)
+        tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
+        self.assertEqual(np02b.status(), VALID_STATUS_DICT)
+
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
+    def test_verify_state_matches_state(self, tnhelper_cmd_mock):
+        """verify_state should return true when expected state is a subset of
+        actual state"""
+        np02b = PduDevice(HOST, None, None)
+        tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
+        self.assertTrue(np02b._verify_state(VALID_STATUS_DICT))
+
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b.time")
+    @patch("antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd")
+    def test_verify_state_throws_error(self, tnhelper_cmd_mock, time_mock):
+        """verify_state should throw error after timeout when actual state never
+        reaches expected state"""
+        time_mock.time.side_effect = [1, 2, 10]
+        np02b = PduDevice(HOST, None, None)
+        tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
+        with self.assertRaises(PduError):
+            self.assertTrue(np02b._verify_state(INVALID_STATUS_DICT))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/__init__.py b/packages/antlion/unit_tests/controllers/sl4a_lib/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/sl4a_lib/__init__.py
rename to packages/antlion/unit_tests/controllers/sl4a_lib/__init__.py
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
new file mode 100755
index 0000000..c5f49ac
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
@@ -0,0 +1,248 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+
+import mock
+
+from antlion.controllers.sl4a_lib import rpc_client
+
+
+class BreakoutError(Exception):
+    """Thrown to prove program execution."""
+
+
+class RpcClientTest(unittest.TestCase):
+    """Tests the rpc_client.RpcClient class."""
+
+    def test_terminate_warn_on_working_connections(self):
+        """Tests rpc_client.RpcClient.terminate().
+
+        Tests that if some connections are still working, we log this before
+        closing the connections.
+        """
+        session = mock.Mock()
+
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
+        client._log = mock.Mock()
+        client._working_connections = [mock.Mock()]
+
+        client.terminate()
+
+        self.assertTrue(client._log.warning.called)
+
+    def test_terminate_closes_all_connections(self):
+        """Tests rpc_client.RpcClient.terminate().
+
+        Tests that all free and working connections have been closed.
+        """
+        session = mock.Mock()
+
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
+        client._log = mock.Mock()
+        working_connections = [mock.Mock() for _ in range(3)]
+        free_connections = [mock.Mock() for _ in range(3)]
+        client._free_connections = free_connections
+        client._working_connections = working_connections
+
+        client.terminate()
+
+        for connection in working_connections + free_connections:
+            self.assertTrue(connection.close.called)
+
+    def test_get_free_connection_get_available_client(self):
+        """Tests rpc_client.RpcClient._get_free_connection().
+
+        Tests that an available client is returned if one exists.
+        """
+
+        def fail_on_wrong_execution():
+            self.fail(
+                "The program is not executing the expected path. "
+                "Tried to return an available free client, ended up "
+                "sleeping to wait for client instead."
+            )
+
+        session = mock.Mock()
+
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
+        expected_connection = mock.Mock()
+        client._free_connections = [expected_connection]
+        client._lock = mock.MagicMock()
+
+        with mock.patch("time.sleep") as sleep_mock:
+            sleep_mock.side_effect = fail_on_wrong_execution
+
+            connection = client._get_free_connection()
+
+        self.assertEqual(connection, expected_connection)
+        self.assertTrue(expected_connection in client._working_connections)
+        self.assertEqual(len(client._free_connections), 0)
+
+    def test_get_free_connection_continues_upon_connection_taken(self):
+        """Tests rpc_client.RpcClient._get_free_connection().
+
+        Tests that if the free connection is taken while trying to acquire the
+        lock to reserve it, the thread gives up the lock and tries again.
+        """
+
+        def empty_list():
+            client._free_connections.clear()
+
+        def fail_on_wrong_execution():
+            self.fail(
+                "The program is not executing the expected path. "
+                "Tried to return an available free client, ended up "
+                "sleeping to wait for client instead."
+            )
+
+        session = mock.Mock()
+
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
+        client._free_connections = mock.Mock()
+        client._lock = mock.MagicMock()
+        client._lock.acquire.side_effect = empty_list
+        client._free_connections = [mock.Mock()]
+
+        with mock.patch("time.sleep") as sleep_mock:
+            sleep_mock.side_effect = fail_on_wrong_execution
+
+            try:
+                client._get_free_connection()
+            except IndexError:
+                self.fail(
+                    "Tried to pop free connection when another thread" "has taken it."
+                )
+        # Assert that the lock has been freed.
+        self.assertEqual(
+            client._lock.acquire.call_count, client._lock.release.call_count
+        )
+
+    def test_get_free_connection_sleep(self):
+        """Tests rpc_client.RpcClient._get_free_connection().
+
+        Tests that if the free connection is taken, it will wait for a new one.
+        """
+
+        session = mock.Mock()
+
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
+        client._free_connections = []
+        client.max_connections = 0
+        client._lock = mock.MagicMock()
+        client._free_connections = []
+
+        with mock.patch("time.sleep") as sleep_mock:
+            sleep_mock.side_effect = BreakoutError()
+            try:
+                client._get_free_connection()
+            except BreakoutError:
+                # Assert that the lock has been freed.
+                self.assertEqual(
+                    client._lock.acquire.call_count, client._lock.release.call_count
+                )
+                # Asserts that the sleep has been called.
+                self.assertTrue(sleep_mock.called)
+                # Asserts that no changes to connections happened
+                self.assertEqual(len(client._free_connections), 0)
+                self.assertEqual(len(client._working_connections), 0)
+                return True
+        self.fail("Failed to hit sleep case")
+
+    def test_release_working_connection(self):
+        """Tests rpc_client.RpcClient._release_working_connection.
+
+        Tests that the working connection is moved into the free connections.
+        """
+        session = mock.Mock()
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
+
+        connection = mock.Mock()
+        client._working_connections = [connection]
+        client._free_connections = []
+        client._release_working_connection(connection)
+
+        self.assertTrue(connection in client._free_connections)
+        self.assertFalse(connection in client._working_connections)
+
+    def test_future(self):
+        """Tests rpc_client.RpcClient.future."""
+        session = mock.Mock()
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
+
+        self.assertEqual(client.future, client._async_client)
+
+    def test_getattr(self):
+        """Tests rpc_client.RpcClient.__getattr__.
+
+        Tests that the name, args, and kwargs are correctly passed to self.rpc.
+        """
+        session = mock.Mock()
+        client = rpc_client.RpcClient(
+            session.uid,
+            session.adb.serial,
+            lambda _: mock.Mock(),
+            lambda _: mock.Mock(),
+        )
+        client.rpc = mock.MagicMock()
+        fn = client.fake_function_please_do_not_be_implemented
+
+        fn("arg1", "arg2", kwarg1=1, kwarg2=2)
+        client.rpc.assert_called_with(
+            "fake_function_please_do_not_be_implemented",
+            "arg1",
+            "arg2",
+            kwarg1=1,
+            kwarg2=2,
+        )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
new file mode 100755
index 0000000..34b6384
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+
+import mock
+
+from antlion.controllers.sl4a_lib import rpc_client, rpc_connection
+
+MOCK_RESP = b'{"id": 0, "result": 123, "error": null, "status": 1, "uid": 1}'
+MOCK_RESP_UNKNOWN_UID = b'{"id": 0, "result": 123, "error": null, "status": 0}'
+MOCK_RESP_WITH_ERROR = b'{"id": 0, "error": 1, "status": 1, "uid": 1}'
+
+
+class MockSocketFile(object):
+    def __init__(self, resp):
+        self.resp = resp
+        self.last_write = None
+
+    def write(self, msg):
+        self.last_write = msg
+
+    def readline(self):
+        return self.resp
+
+    def flush(self):
+        pass
+
+
+class RpcConnectionTest(unittest.TestCase):
+    """This test class has unit tests for the implementation of everything
+    under antlion.controllers.android, which is the RPC client module for sl4a.
+    """
+
+    @staticmethod
+    def mock_rpc_connection(response=MOCK_RESP, uid=rpc_connection.UNKNOWN_UID):
+        """Sets up a faked socket file from the mock connection."""
+        fake_file = MockSocketFile(response)
+        fake_conn = mock.MagicMock()
+        fake_conn.makefile.return_value = fake_file
+        adb = mock.Mock()
+        ports = mock.Mock()
+
+        return rpc_connection.RpcConnection(adb, ports, fake_conn, fake_file, uid=uid)
+
+    def test_open_chooses_init_on_unknown_uid(self):
+        """Tests rpc_connection.RpcConnection.open().
+
+        Tests that open uses the init start command when the uid is unknown.
+        """
+
+        def pass_on_init(start_command):
+            if not start_command == rpc_connection.Sl4aConnectionCommand.INIT:
+                self.fail(f'Must call "init". Called "{start_command}" instead.')
+
+        connection = self.mock_rpc_connection()
+        connection._initiate_handshake = pass_on_init
+        connection.open()
+
+    def test_open_chooses_continue_on_known_uid(self):
+        """Tests rpc_connection.RpcConnection.open().
+
+        Tests that open uses the continue start command when the uid is known.
+        """
+
+        def pass_on_continue(start_command):
+            if start_command != rpc_connection.Sl4aConnectionCommand.CONTINUE:
+                self.fail(f'Must call "continue". Called "{start_command}" instead.')
+
+        connection = self.mock_rpc_connection(uid=1)
+        connection._initiate_handshake = pass_on_continue
+        connection.open()
+
+    def test_initiate_handshake_returns_uid(self):
+        """Tests rpc_connection.RpcConnection._initiate_handshake().
+
+        Test that at the end of a handshake with no errors the client object
+        has the correct parameters.
+        """
+        connection = self.mock_rpc_connection()
+        connection._initiate_handshake(rpc_connection.Sl4aConnectionCommand.INIT)
+
+        self.assertEqual(connection.uid, 1)
+
+    def test_initiate_handshake_returns_unknown_status(self):
+        """Tests rpc_connection.RpcConnection._initiate_handshake().
+
+        Test that when the handshake is given an unknown uid then the client
+        will not be given a uid.
+        """
+        connection = self.mock_rpc_connection(MOCK_RESP_UNKNOWN_UID)
+        connection._initiate_handshake(rpc_connection.Sl4aConnectionCommand.INIT)
+
+        self.assertEqual(connection.uid, rpc_client.UNKNOWN_UID)
+
+    def test_initiate_handshake_no_response(self):
+        """Tests rpc_connection.RpcConnection._initiate_handshake().
+
+        Test that if a handshake receives no response then it will give a
+        protocol error.
+        """
+        connection = self.mock_rpc_connection(b"")
+
+        with self.assertRaises(
+            rpc_client.Sl4aProtocolError,
+            msg=rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE,
+        ):
+            connection._initiate_handshake(rpc_connection.Sl4aConnectionCommand.INIT)
+
+    def test_cmd_properly_formatted(self):
+        """Tests rpc_connection.RpcConnection._cmd().
+
+        Tests that the command sent is properly formatted.
+        """
+        connection = self.mock_rpc_connection(MOCK_RESP)
+        connection._cmd("test")
+        self.assertIn(
+            connection._socket_file.last_write,
+            [b'{"cmd": "test", "uid": -1}\n', b'{"uid": -1, "cmd": "test"}\n'],
+        )
+
+    def test_get_new_ticket(self):
+        """Tests rpc_connection.RpcConnection.get_new_ticket().
+
+        Tests that a new number is always given for get_new_ticket().
+        """
+        connection = self.mock_rpc_connection(MOCK_RESP)
+        self.assertEqual(connection.get_new_ticket() + 1, connection.get_new_ticket())
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
new file mode 100755
index 0000000..6d42989
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
@@ -0,0 +1,462 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+
+import mock
+
+from antlion.controllers.sl4a_lib import rpc_client, sl4a_manager
+
+
+class Sl4aManagerFactoryTest(unittest.TestCase):
+    """Tests the sl4a_manager module-level functions."""
+
+    def setUp(self):
+        """Clears the Sl4aManager cache."""
+        sl4a_manager._all_sl4a_managers = {}
+
+    def test_create_manager(self):
+        """Tests sl4a_manager.create_sl4a_manager().
+
+        Tests that a new Sl4aManager is returned without an error.
+        """
+        adb = mock.Mock()
+        adb.serial = "SERIAL"
+        sl4a_man = sl4a_manager.create_sl4a_manager(adb)
+        self.assertEqual(sl4a_man.adb, adb)
+
+    def test_create_sl4a_manager_return_already_created_manager(self):
+        """Tests sl4a_manager.create_sl4a_manager().
+
+        Tests that a second call to create_sl4a_manager() does not create a
+        new Sl4aManager, and returns the first created Sl4aManager instead.
+        """
+        adb = mock.Mock()
+        adb.serial = "SERIAL"
+        first_manager = sl4a_manager.create_sl4a_manager(adb)
+
+        adb_same_serial = mock.Mock()
+        adb_same_serial.serial = "SERIAL"
+        second_manager = sl4a_manager.create_sl4a_manager(adb)
+
+        self.assertEqual(first_manager, second_manager)
+
+    def test_create_sl4a_manager_multiple_devices_with_one_manager_each(self):
+        """Tests sl4a_manager.create_sl4a_manager().
+
+        Tests that when create_s4l4a_manager() is called for different devices,
+        each device gets its own Sl4aManager object.
+        """
+        adb_1 = mock.Mock()
+        adb_1.serial = "SERIAL"
+        first_manager = sl4a_manager.create_sl4a_manager(adb_1)
+
+        adb_2 = mock.Mock()
+        adb_2.serial = "DIFFERENT_SERIAL_NUMBER"
+        second_manager = sl4a_manager.create_sl4a_manager(adb_2)
+
+        self.assertNotEqual(first_manager, second_manager)
+
+
+class Sl4aManagerTest(unittest.TestCase):
+    """Tests the sl4a_manager.Sl4aManager class."""
+
+    ATTEMPT_INTERVAL = 0.25
+    MAX_WAIT_ON_SERVER_SECONDS = 1
+    _SL4A_LAUNCH_SERVER_CMD = ""
+    _SL4A_CLOSE_SERVER_CMD = ""
+    _SL4A_ROOT_FIND_PORT_CMD = ""
+    _SL4A_USER_FIND_PORT_CMD = ""
+    _SL4A_START_SERVICE_CMD = ""
+
+    @classmethod
+    def setUpClass(cls):
+        # Copy all module constants before testing begins.
+        Sl4aManagerTest.ATTEMPT_INTERVAL = sl4a_manager.ATTEMPT_INTERVAL
+        Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS = (
+            sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS
+        )
+        Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD = sl4a_manager._SL4A_LAUNCH_SERVER_CMD
+        Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD = sl4a_manager._SL4A_CLOSE_SERVER_CMD
+        Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD = sl4a_manager._SL4A_ROOT_FIND_PORT_CMD
+        Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD = sl4a_manager._SL4A_USER_FIND_PORT_CMD
+        Sl4aManagerTest._SL4A_START_SERVICE_CMD = sl4a_manager._SL4A_START_SERVICE_CMD
+
+    def setUp(self):
+        # Restore all module constants at the beginning of each test case.
+        sl4a_manager.ATTEMPT_INTERVAL = Sl4aManagerTest.ATTEMPT_INTERVAL
+        sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS = (
+            Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS
+        )
+        sl4a_manager._SL4A_LAUNCH_SERVER_CMD = Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD
+        sl4a_manager._SL4A_CLOSE_SERVER_CMD = Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD
+        sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD
+        sl4a_manager._SL4A_USER_FIND_PORT_CMD = Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD
+        sl4a_manager._SL4A_START_SERVICE_CMD = Sl4aManagerTest._SL4A_START_SERVICE_CMD
+
+        # Reset module data at the beginning of each test.
+        sl4a_manager._all_sl4a_managers = {}
+
+    def test_sl4a_ports_in_use(self):
+        """Tests sl4a_manager.Sl4aManager.sl4a_ports_in_use
+
+        Tests to make sure all server ports are returned with no duplicates.
+        """
+        adb = mock.Mock()
+        manager = sl4a_manager.Sl4aManager(adb)
+        session_1 = mock.Mock()
+        session_1.server_port = 12345
+        manager.sessions[1] = session_1
+        session_2 = mock.Mock()
+        session_2.server_port = 15973
+        manager.sessions[2] = session_2
+        session_3 = mock.Mock()
+        session_3.server_port = 12345
+        manager.sessions[3] = session_3
+        session_4 = mock.Mock()
+        session_4.server_port = 67890
+        manager.sessions[4] = session_4
+        session_5 = mock.Mock()
+        session_5.server_port = 75638
+        manager.sessions[5] = session_5
+
+        returned_ports = manager.sl4a_ports_in_use
+
+        # No duplicated ports.
+        self.assertEqual(len(returned_ports), len(set(returned_ports)))
+        # One call for each session
+        self.assertSetEqual(set(returned_ports), {12345, 15973, 67890, 75638})
+
+    @mock.patch("time.sleep", return_value=None)
+    def test_start_sl4a_server_uses_all_retries(self, _):
+        """Tests sl4a_manager.Sl4aManager.start_sl4a_server().
+
+        Tests to ensure that _start_sl4a_server retries and successfully returns
+        a port.
+        """
+        adb = mock.Mock()
+        adb.shell = lambda _, **kwargs: ""
+
+        side_effects = []
+        expected_port = 12345
+        for _ in range(
+            int(sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS / sl4a_manager.ATTEMPT_INTERVAL)
+            - 1
+        ):
+            side_effects.append(None)
+        side_effects.append(expected_port)
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        manager._get_open_listening_port = mock.Mock(side_effect=side_effects)
+        try:
+            found_port = manager.start_sl4a_server(0)
+            self.assertTrue(found_port)
+        except rpc_client.Sl4aConnectionError:
+            self.fail("start_sl4a_server failed to respect FIND_PORT_RETRIES.")
+
+    @mock.patch("time.sleep", return_value=None)
+    def test_start_sl4a_server_fails_all_retries(self, _):
+        """Tests sl4a_manager.Sl4aManager.start_sl4a_server().
+
+        Tests to ensure that start_sl4a_server throws an error if all retries
+        fail.
+        """
+        adb = mock.Mock()
+        adb.shell = lambda _, **kwargs: ""
+
+        side_effects = []
+        for _ in range(
+            int(sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS / sl4a_manager.ATTEMPT_INTERVAL)
+        ):
+            side_effects.append(None)
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        manager._get_open_listening_port = mock.Mock(side_effect=side_effects)
+        try:
+            manager.start_sl4a_server(0)
+            self.fail("Sl4aConnectionError was not thrown.")
+        except rpc_client.Sl4aConnectionError:
+            pass
+
+    def test_get_all_ports_command_uses_root_cmd(self):
+        """Tests sl4a_manager.Sl4aManager._get_all_ports_command().
+
+        Tests that _get_all_ports_command calls the root command when root is
+        available.
+        """
+        adb = mock.Mock()
+        adb.is_root = lambda: True
+        command = "ngo45hke3b4vie3mv5ni93,vfu3j"
+        sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = command
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        self.assertEqual(manager._get_all_ports_command(), command)
+
+    def test_get_all_ports_command_escalates_to_root(self):
+        """Tests sl4a_manager.Sl4aManager._call_get_ports_command().
+
+        Tests that _call_get_ports_command calls the root command when adb is
+        user but can escalate to root.
+        """
+        adb = mock.Mock()
+        adb.is_root = lambda: False
+        adb.ensure_root = lambda: True
+        command = "ngo45hke3b4vie3mv5ni93,vfu3j"
+        sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = command
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        self.assertEqual(manager._get_all_ports_command(), command)
+
+    def test_get_all_ports_command_uses_user_cmd(self):
+        """Tests sl4a_manager.Sl4aManager._call_get_ports_command().
+
+        Tests that _call_get_ports_command calls the user command when root is
+        unavailable.
+        """
+        adb = mock.Mock()
+        adb.is_root = lambda: False
+        adb.ensure_root = lambda: False
+        command = "ngo45hke3b4vie3mv5ni93,vfu3j"
+        sl4a_manager._SL4A_USER_FIND_PORT_CMD = command
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        self.assertEqual(manager._get_all_ports_command(), command)
+
+    def test_get_open_listening_port_no_port_found(self):
+        """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
+
+        Tests to ensure None is returned if no open port is found.
+        """
+        adb = mock.Mock()
+        adb.shell = lambda _: ""
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        self.assertIsNone(manager._get_open_listening_port())
+
+    def test_get_open_listening_port_no_new_port_found(self):
+        """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
+
+        Tests to ensure None is returned if the ports returned have all been
+        marked as in used.
+        """
+        adb = mock.Mock()
+        adb.shell = lambda _: "12345 67890"
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        manager._sl4a_ports = {"12345", "67890"}
+        self.assertIsNone(manager._get_open_listening_port())
+
+    def test_get_open_listening_port_port_is_avaiable(self):
+        """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
+
+        Tests to ensure a port is returned if a port is found and has not been
+        marked as used.
+        """
+        adb = mock.Mock()
+        adb.shell = lambda _: "12345 67890"
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        manager._sl4a_ports = {"12345"}
+        self.assertEqual(manager._get_open_listening_port(), 67890)
+
+    def test_is_sl4a_installed_is_true(self):
+        """Tests sl4a_manager.Sl4aManager.is_sl4a_installed().
+
+        Tests is_sl4a_installed() returns true when pm returns data
+        """
+        adb = mock.Mock()
+        adb.shell = lambda _, **kwargs: "asdf"
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        self.assertTrue(manager.is_sl4a_installed())
+
+    def test_is_sl4a_installed_is_false(self):
+        """Tests sl4a_manager.Sl4aManager.is_sl4a_installed().
+
+        Tests is_sl4a_installed() returns true when pm returns data
+        """
+        adb = mock.Mock()
+        adb.shell = lambda _, **kwargs: ""
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        self.assertFalse(manager.is_sl4a_installed())
+
+    def test_start_sl4a_throws_error_on_sl4a_not_installed(self):
+        """Tests sl4a_manager.Sl4aManager.start_sl4a_service().
+
+        Tests that a MissingSl4aError is thrown when SL4A is not installed.
+        """
+        adb = mock.Mock()
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        manager.is_sl4a_installed = lambda: False
+        try:
+            manager.start_sl4a_service()
+            self.fail("An error should have been thrown.")
+        except rpc_client.Sl4aNotInstalledError:
+            pass
+
+    def test_start_sl4a_starts_sl4a_if_not_running(self):
+        """Tests sl4a_manager.Sl4aManager.start_sl4a_service().
+
+        Tests that SL4A is started if it was not already running.
+        """
+        adb = mock.Mock()
+        adb.shell = mock.Mock(side_effect=["", "", ""])
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        manager.is_sl4a_installed = lambda: True
+        try:
+            manager.start_sl4a_service()
+        except rpc_client.Sl4aNotInstalledError:
+            self.fail("An error should not have been thrown.")
+        adb.shell.assert_called_with(sl4a_manager._SL4A_START_SERVICE_CMD)
+
+    def test_create_session_uses_oldest_server_port(self):
+        """Tests sl4a_manager.Sl4aManager.create_session().
+
+        Tests that when no port is given, the oldest server port opened is used
+        as the server port for a new session. The oldest server port can be
+        found by getting the oldest session's server port.
+        """
+        adb = mock.Mock()
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        # Ignore starting SL4A.
+        manager.start_sl4a_service = lambda: None
+
+        session_1 = mock.Mock()
+        session_1.server_port = 12345
+        session_2 = mock.Mock()
+        session_2.server_port = 67890
+        session_3 = mock.Mock()
+        session_3.server_port = 67890
+
+        manager.sessions[3] = session_3
+        manager.sessions[1] = session_1
+        manager.sessions[2] = session_2
+
+        with mock.patch.object(rpc_client.RpcClient, "__init__", return_value=None):
+            created_session = manager.create_session()
+
+        self.assertEqual(created_session.server_port, session_1.server_port)
+
+    def test_create_session_uses_random_port_when_no_session_exists(self):
+        """Tests sl4a_manager.Sl4aManager.create_session().
+
+        Tests that when no port is given, and no SL4A server exists, the server
+        port for the session is set to 0.
+        """
+        adb = mock.Mock()
+
+        manager = sl4a_manager.create_sl4a_manager(adb)
+        # Ignore starting SL4A.
+        manager.start_sl4a_service = lambda: None
+
+        with mock.patch.object(rpc_client.RpcClient, "__init__", return_value=None):
+            created_session = manager.create_session()
+
+        self.assertEqual(created_session.server_port, 0)
+
+    def test_terminate_all_session_call_terminate_on_all_sessions(self):
+        """Tests sl4a_manager.Sl4aManager.terminate_all_sessions().
+
+        Tests to see that the manager has called terminate on all sessions.
+        """
+        called_terminate_on = list()
+
+        def called_on(session):
+            called_terminate_on.append(session)
+
+        adb = mock.Mock()
+        manager = sl4a_manager.Sl4aManager(adb)
+
+        session_1 = mock.Mock()
+        session_1.terminate = lambda *args, **kwargs: called_on(session_1)
+        manager.sessions[1] = session_1
+        session_4 = mock.Mock()
+        session_4.terminate = lambda *args, **kwargs: called_on(session_4)
+        manager.sessions[4] = session_4
+        session_5 = mock.Mock()
+        session_5.terminate = lambda *args, **kwargs: called_on(session_5)
+        manager.sessions[5] = session_5
+
+        manager._get_all_ports = lambda: []
+        manager.terminate_all_sessions()
+        # No duplicates calls to terminate.
+        self.assertEqual(len(called_terminate_on), len(set(called_terminate_on)))
+        # One call for each session
+        self.assertSetEqual(set(called_terminate_on), {session_1, session_4, session_5})
+
+    def test_terminate_all_session_close_each_server(self):
+        """Tests sl4a_manager.Sl4aManager.terminate_all_sessions().
+
+        Tests to see that the manager has called terminate on all sessions.
+        """
+        closed_ports = list()
+
+        def close(command):
+            if str.isdigit(command):
+                closed_ports.append(command)
+            return ""
+
+        adb = mock.Mock()
+        adb.shell = close
+        sl4a_manager._SL4A_CLOSE_SERVER_CMD = "%s"
+        ports_to_close = {"12345", "67890", "24680", "13579"}
+
+        manager = sl4a_manager.Sl4aManager(adb)
+        manager._sl4a_ports = set(ports_to_close)
+        manager._get_all_ports = lambda: []
+        manager.terminate_all_sessions()
+
+        # No duplicate calls to close port
+        self.assertEqual(len(closed_ports), len(set(closed_ports)))
+        # One call for each port
+        self.assertSetEqual(ports_to_close, set(closed_ports))
+
+    def test_obtain_sl4a_server_starts_new_server(self):
+        """Tests sl4a_manager.Sl4aManager.obtain_sl4a_server().
+
+        Tests that a new server can be returned if the server does not exist.
+        """
+        adb = mock.Mock()
+        manager = sl4a_manager.Sl4aManager(adb)
+        manager.start_sl4a_server = mock.Mock()
+
+        manager.obtain_sl4a_server(0)
+
+        self.assertTrue(manager.start_sl4a_server.called)
+
+    @mock.patch(
+        "antlion.controllers.sl4a_lib.sl4a_manager.Sl4aManager.sl4a_ports_in_use",
+        new_callable=mock.PropertyMock,
+    )
+    def test_obtain_sl4a_server_returns_existing_server(self, sl4a_ports_in_use):
+        """Tests sl4a_manager.Sl4aManager.obtain_sl4a_server().
+
+        Tests that an existing server is returned if it is already opened.
+        """
+        adb = mock.Mock()
+        manager = sl4a_manager.Sl4aManager(adb)
+        manager.start_sl4a_server = mock.Mock()
+        sl4a_ports_in_use.return_value = [12345]
+
+        ret = manager.obtain_sl4a_server(12345)
+
+        self.assertFalse(manager.start_sl4a_server.called)
+        self.assertEqual(12345, ret)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
new file mode 100755
index 0000000..c28ba5b
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
@@ -0,0 +1,250 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import errno
+import unittest
+from socket import error as socket_error
+from socket import timeout
+
+import mock
+from mock import patch
+
+from antlion.controllers.adb_lib.error import AdbError
+from antlion.controllers.sl4a_lib import rpc_client, sl4a_ports
+from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError
+from antlion.controllers.sl4a_lib.sl4a_session import Sl4aSession
+
+
+class Sl4aSessionTest(unittest.TestCase):
+    """Tests the Sl4aSession class."""
+
+    def test_is_alive_true_on_not_terminated(self):
+        """Tests Sl4aSession.is_alive.
+
+        Tests that the session is_alive when it has not been terminated.
+        """
+        session = mock.Mock()
+        session._terminated = False
+        session.is_alive = Sl4aSession.is_alive
+        self.assertNotEqual(session._terminated, session.is_alive)
+
+    def test_is_alive_false_on_terminated(self):
+        """Tests Sl4aSession.is_alive.
+
+        Tests that the session is_alive when it has not been terminated.
+        """
+        session = mock.Mock()
+        session._terminated = True
+        session.is_alive = Sl4aSession.is_alive
+        self.assertNotEqual(session._terminated, session.is_alive)
+
+    @patch("antlion.controllers.sl4a_lib.event_dispatcher.EventDispatcher")
+    def test_get_event_dispatcher_create_on_none(self, _):
+        """Tests Sl4aSession.get_event_dispatcher.
+
+        Tests that a new event_dispatcher is created if one does not exist.
+        """
+        session = mock.Mock()
+        session._event_dispatcher = None
+        ed = Sl4aSession.get_event_dispatcher(session)
+        self.assertTrue(session._event_dispatcher is not None)
+        self.assertEqual(session._event_dispatcher, ed)
+
+    def test_get_event_dispatcher_returns_existing_event_dispatcher(self):
+        """Tests Sl4aSession.get_event_dispatcher.
+
+        Tests that the existing event_dispatcher is returned.
+        """
+        session = mock.Mock()
+        session._event_dispatcher = "Something that is not None"
+        ed = Sl4aSession.get_event_dispatcher(session)
+        self.assertEqual(session._event_dispatcher, ed)
+
+    def test_create_client_side_connection_hint_already_in_use(self):
+        """Tests Sl4aSession._create_client_side_connection().
+
+        Tests that if the hinted port is already in use, the function will
+        call itself with a hinted port of 0 (random).
+        """
+        session = mock.Mock()
+        session._create_client_side_connection = mock.Mock()
+        with mock.patch("socket.socket") as socket:
+            # Throw an error when trying to bind to the hinted port.
+            error = OSError()
+            error.errno = errno.EADDRINUSE
+            socket_instance = mock.Mock()
+            socket_instance.bind = mock.Mock()
+            socket_instance.bind.side_effect = error
+            socket.return_value = socket_instance
+
+            Sl4aSession._create_client_side_connection(
+                session, sl4a_ports.Sl4aPorts(1, 2, 3)
+            )
+
+        fn = session._create_client_side_connection
+        self.assertEqual(fn.call_count, 1)
+        # Asserts that the 1st argument (Sl4aPorts) sent to the function
+        # has a client port of 0.
+        self.assertEqual(fn.call_args_list[0][0][0].client_port, 0)
+
+    def test_create_client_side_connection_catches_timeout(self):
+        """Tests Sl4aSession._create_client_side_connection().
+
+        Tests that the function will raise an Sl4aConnectionError upon timeout.
+        """
+        session = mock.Mock()
+        session._create_client_side_connection = mock.Mock()
+        error = timeout()
+        with mock.patch("socket.socket") as socket:
+            # Throw an error when trying to bind to the hinted port.
+            socket_instance = mock.Mock()
+            socket_instance.connect = mock.Mock()
+            socket_instance.connect.side_effect = error
+            socket.return_value = socket_instance
+
+            with self.assertRaises(rpc_client.Sl4aConnectionError):
+                Sl4aSession._create_client_side_connection(
+                    session, sl4a_ports.Sl4aPorts(0, 2, 3)
+                )
+
+    def test_create_client_side_connection_hint_taken_during_fn(self):
+        """Tests Sl4aSession._create_client_side_connection().
+
+        Tests that the function will call catch an EADDRNOTAVAIL OSError and
+        call itself again, this time with a hinted port of 0 (random).
+        """
+        session = mock.Mock()
+        session._create_client_side_connection = mock.Mock()
+        error = socket_error()
+        error.errno = errno.EADDRNOTAVAIL
+        with mock.patch("socket.socket") as socket:
+            # Throw an error when trying to bind to the hinted port.
+            socket_instance = mock.Mock()
+            socket_instance.connect = mock.Mock()
+            socket_instance.connect.side_effect = error
+            socket.return_value = socket_instance
+
+            Sl4aSession._create_client_side_connection(
+                session, sl4a_ports.Sl4aPorts(0, 2, 3)
+            )
+
+        fn = session._create_client_side_connection
+        self.assertEqual(fn.call_count, 1)
+        # Asserts that the 1st argument (Sl4aPorts) sent to the function
+        # has a client port of 0.
+        self.assertEqual(fn.call_args_list[0][0][0].client_port, 0)
+
+    def test_create_client_side_connection_re_raises_uncaught_errors(self):
+        """Tests Sl4aSession._create_client_side_connection().
+
+        Tests that the function will re-raise any socket error that does not
+        have errno.EADDRNOTAVAIL.
+        """
+        session = mock.Mock()
+        session._create_client_side_connection = mock.Mock()
+        error = socket_error()
+        # Some error that isn't EADDRNOTAVAIL
+        error.errno = errno.ESOCKTNOSUPPORT
+        with mock.patch("socket.socket") as socket:
+            # Throw an error when trying to bind to the hinted port.
+            socket_instance = mock.Mock()
+            socket_instance.connect = mock.Mock()
+            socket_instance.connect.side_effect = error
+            socket.return_value = socket_instance
+
+            with self.assertRaises(socket_error):
+                Sl4aSession._create_client_side_connection(
+                    session, sl4a_ports.Sl4aPorts(0, 2, 3)
+                )
+
+    def test_terminate_only_closes_if_not_terminated(self):
+        """Tests Sl4aSession.terminate()
+
+        Tests that terminate only runs termination steps if the session has not
+        already been terminated.
+        """
+        session = mock.Mock()
+        session._terminate_lock = mock.MagicMock()
+        session._terminated = True
+        Sl4aSession.terminate(session)
+
+        self.assertFalse(session._event_dispatcher.close.called)
+        self.assertFalse(session.rpc_client.terminate.called)
+
+    def test_terminate_closes_session_first(self):
+        """Tests Sl4aSession.terminate()
+
+        Tests that terminate only runs termination steps if the session has not
+        already been terminated.
+        """
+        session = mock.Mock()
+        session._terminate_lock = mock.MagicMock()
+        session._terminated = True
+        Sl4aSession.terminate(session)
+
+        self.assertFalse(session._event_dispatcher.close.called)
+        self.assertFalse(session.rpc_client.terminate.called)
+
+    def test_create_forwarded_port(self):
+        """Tests Sl4aSession._create_forwarded_port returns the hinted port."""
+        mock_adb = mock.Mock()
+        mock_adb.get_version_number = lambda: 37
+        mock_adb.tcp_forward = lambda hinted_port, device_port: hinted_port
+        mock_session = mock.Mock()
+        mock_session.adb = mock_adb
+        mock_session.log = mock.Mock()
+
+        self.assertEqual(
+            8080, Sl4aSession._create_forwarded_port(mock_session, 9999, 8080)
+        )
+
+    def test_create_forwarded_port_fail_once(self):
+        """Tests that _create_forwarded_port can return a non-hinted port.
+
+        This will only happen if the hinted port is already taken.
+        """
+        mock_adb = mock.Mock()
+        mock_adb.get_version_number = lambda: 37
+
+        mock_adb.tcp_forward = mock.Mock(
+            side_effect=AdbError(
+                "cmd", "stdout", stderr="cannot bind listener", ret_code=1
+            )
+        )
+        mock_session = mock.MagicMock()
+        mock_session.adb = mock_adb
+        mock_session.log = mock.Mock()
+        mock_session._create_forwarded_port = lambda *args, **kwargs: 12345
+
+        self.assertEqual(
+            12345, Sl4aSession._create_forwarded_port(mock_session, 9999, 8080)
+        )
+
+    def test_create_forwarded_port_raises_if_adb_version_is_old(self):
+        """Tests that _create_forwarded_port raises if adb version < 37."""
+        mock_adb = mock.Mock()
+        mock_adb.get_version_number = lambda: 31
+        mock_adb.tcp_forward = lambda _, __: self.fail(
+            "Calling adb.tcp_forward despite ADB version being too old."
+        )
+        mock_session = mock.Mock()
+        mock_session.adb = mock_adb
+        mock_session.log = mock.Mock()
+        with self.assertRaises(Sl4aStartError):
+            Sl4aSession._create_forwarded_port(mock_session, 9999, 0)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/event/__init__.py b/packages/antlion/unit_tests/event/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/event/__init__.py
rename to packages/antlion/unit_tests/event/__init__.py
diff --git a/packages/antlion/unit_tests/event/decorators_test.py b/packages/antlion/unit_tests/event/decorators_test.py
new file mode 100755
index 0000000..e1542b5
--- /dev/null
+++ b/packages/antlion/unit_tests/event/decorators_test.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+from unittest import TestCase
+
+from mock import Mock
+
+from antlion.event.decorators import subscribe_static
+from antlion.event.subscription_handle import SubscriptionHandle
+
+
+class DecoratorsTest(TestCase):
+    """Tests the decorators found in antlion.event.decorators."""
+
+    def test_subscribe_static_return_type(self):
+        """Tests that the subscribe_static is the correct type."""
+        mock = Mock()
+
+        @subscribe_static(type)
+        def test(_):
+            return mock
+
+        self.assertTrue(isinstance(test, SubscriptionHandle))
+
+    def test_subscribe_static_calling_the_function_returns_normally(self):
+        """Tests that functions decorated by subscribe_static can be called."""
+        static_mock = Mock()
+
+        @subscribe_static(type)
+        def test(_):
+            return static_mock
+
+        self.assertEqual(test(Mock()), static_mock)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/event/event_bus_test.py b/packages/antlion/unit_tests/event/event_bus_test.py
new file mode 100755
index 0000000..f7657ce
--- /dev/null
+++ b/packages/antlion/unit_tests/event/event_bus_test.py
@@ -0,0 +1,265 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+from unittest import TestCase
+
+from mock import Mock, patch
+
+from antlion.event import event_bus
+from antlion.event.event import Event
+from antlion.event.event_subscription import EventSubscription
+
+
+class EventBusTest(TestCase):
+    """Tests the event_bus functions."""
+
+    def setUp(self):
+        """Clears all state from the event_bus between test cases."""
+        event_bus._event_bus = event_bus._EventBus()
+
+    def get_subscription_argument(self, register_subscription_call):
+        """Gets the subscription argument from a register_subscription call."""
+        return register_subscription_call[0][0]
+
+    @patch("antlion.event.event_bus._event_bus.register_subscription")
+    def test_register_registers_a_subscription(self, register_subscription):
+        """Tests that register creates and registers a subscription."""
+        mock_event = Mock()
+        mock_func = Mock()
+        order = 43
+        event_bus.register(mock_event, mock_func, order=order)
+
+        args, _ = register_subscription.call_args
+        subscription = args[0]
+
+        # Instead of writing an equality operator for only testing,
+        # check the internals to make sure they are expected values.
+        self.assertEqual(subscription._event_type, mock_event)
+        self.assertEqual(subscription._func, mock_func)
+        self.assertEqual(subscription.order, order)
+
+    @patch("antlion.event.event_bus._event_bus.register_subscription")
+    def test_register_subscriptions_for_list(self, register_subscription):
+        """Tests that register_subscription is called for each subscription."""
+        mocks = [Mock(), Mock(), Mock()]
+        subscriptions = [
+            EventSubscription(mocks[0], lambda _: None),
+            EventSubscription(mocks[1], lambda _: None),
+            EventSubscription(mocks[2], lambda _: None),
+        ]
+
+        event_bus.register_subscriptions(subscriptions)
+        received_subscriptions = set()
+        for index, call in enumerate(register_subscription.call_args_list):
+            received_subscriptions.add(self.get_subscription_argument(call))
+
+        self.assertEqual(register_subscription.call_count, len(subscriptions))
+        self.assertSetEqual(received_subscriptions, set(subscriptions))
+
+    def test_register_subscription_new_event_type(self):
+        """Tests that the event_bus can register a new event type."""
+        mock_type = Mock()
+        bus = event_bus._event_bus
+        subscription = EventSubscription(mock_type, lambda _: None)
+
+        reg_id = event_bus.register_subscription(subscription)
+
+        self.assertTrue(mock_type in bus._subscriptions.keys())
+        self.assertTrue(subscription in bus._subscriptions[mock_type])
+        self.assertTrue(reg_id in bus._registration_id_map.keys())
+
+    def test_register_subscription_existing_type(self):
+        """Tests that the event_bus can register an existing event type."""
+        mock_type = Mock()
+        bus = event_bus._event_bus
+        bus._subscriptions[mock_type] = [EventSubscription(mock_type, lambda _: None)]
+        new_subscription = EventSubscription(mock_type, lambda _: True)
+
+        reg_id = event_bus.register_subscription(new_subscription)
+
+        self.assertTrue(new_subscription in bus._subscriptions[mock_type])
+        self.assertTrue(reg_id in bus._registration_id_map.keys())
+
+    def test_post_to_unregistered_event_does_not_call_other_funcs(self):
+        """Tests posting an unregistered event will not call other funcs."""
+        mock_subscription = Mock()
+        bus = event_bus._event_bus
+        mock_type = Mock()
+        mock_subscription.event_type = mock_type
+        bus._subscriptions[mock_type] = [mock_subscription]
+
+        event_bus.post(Mock())
+
+        self.assertEqual(mock_subscription.deliver.call_count, 0)
+
+    def test_post_to_registered_event_calls_all_registered_funcs(self):
+        """Tests posting to a registered event calls all registered funcs."""
+        mock_subscriptions = [Mock(), Mock(), Mock()]
+        bus = event_bus._event_bus
+        for subscription in mock_subscriptions:
+            subscription.order = 0
+        mock_event = Mock()
+        bus._subscriptions[type(mock_event)] = mock_subscriptions
+
+        event_bus.post(mock_event)
+
+        for subscription in mock_subscriptions:
+            subscription.deliver.assert_called_once_with(mock_event)
+
+    def test_post_with_ignore_errors_calls_all_registered_funcs(self):
+        """Tests posting with ignore_errors=True calls all registered funcs,
+        even if they raise errors.
+        """
+
+        def _raise(_):
+            raise Exception
+
+        mock_event = Mock()
+        mock_subscriptions = [Mock(), Mock(), Mock()]
+        mock_subscriptions[0].deliver.side_effect = _raise
+        bus = event_bus._event_bus
+        for i, subscription in enumerate(mock_subscriptions):
+            subscription.order = i
+        bus._subscriptions[type(mock_event)] = mock_subscriptions
+
+        event_bus.post(mock_event, ignore_errors=True)
+
+        for subscription in mock_subscriptions:
+            subscription.deliver.assert_called_once_with(mock_event)
+
+    @patch("antlion.event.event_bus._event_bus.unregister")
+    def test_unregister_all_from_list(self, unregister):
+        """Tests unregistering from a list unregisters the specified list."""
+        unregister_list = [Mock(), Mock()]
+
+        event_bus.unregister_all(from_list=unregister_list)
+
+        self.assertEqual(unregister.call_count, len(unregister_list))
+        for args, _ in unregister.call_args_list:
+            subscription = args[0]
+            self.assertTrue(subscription in unregister_list)
+
+    @patch("antlion.event.event_bus._event_bus.unregister")
+    def test_unregister_all_from_event(self, unregister):
+        """Tests that all subscriptions under the event are unregistered."""
+        mock_event = Mock()
+        mock_event_2 = Mock()
+        bus = event_bus._event_bus
+        unregister_list = [Mock(), Mock()]
+        bus._subscriptions[type(mock_event_2)] = [Mock(), Mock(), Mock()]
+        bus._subscriptions[type(mock_event)] = unregister_list
+        for sub_type in bus._subscriptions.keys():
+            for subscription in bus._subscriptions[sub_type]:
+                subscription.event_type = sub_type
+                bus._registration_id_map[id(subscription)] = subscription
+
+        event_bus.unregister_all(from_event=type(mock_event))
+
+        self.assertEqual(unregister.call_count, len(unregister_list))
+        for args, _ in unregister.call_args_list:
+            subscription = args[0]
+            self.assertTrue(subscription in unregister_list)
+
+    @patch("antlion.event.event_bus._event_bus.unregister")
+    def test_unregister_all_no_args_unregisters_everything(self, unregister):
+        """Tests unregister_all without arguments will unregister everything."""
+        mock_event_1 = Mock()
+        mock_event_2 = Mock()
+        bus = event_bus._event_bus
+        unregister_list_1 = [Mock(), Mock()]
+        unregister_list_2 = [Mock(), Mock(), Mock()]
+        bus._subscriptions[type(mock_event_1)] = unregister_list_1
+        bus._subscriptions[type(mock_event_2)] = unregister_list_2
+        for sub_type in bus._subscriptions.keys():
+            for subscription in bus._subscriptions[sub_type]:
+                subscription.event_type = sub_type
+                bus._registration_id_map[id(subscription)] = subscription
+
+        event_bus.unregister_all()
+
+        self.assertEqual(
+            unregister.call_count, len(unregister_list_1) + len(unregister_list_2)
+        )
+        for args, _ in unregister.call_args_list:
+            subscription = args[0]
+            self.assertTrue(
+                subscription in unregister_list_1 or subscription in unregister_list_2
+            )
+
+    def test_unregister_given_an_event_subscription(self):
+        """Tests that unregister can unregister a given EventSubscription."""
+        mock_event = Mock()
+        bus = event_bus._event_bus
+        subscription = EventSubscription(type(mock_event), lambda _: None)
+        bus._registration_id_map[id(subscription)] = subscription
+        bus._subscriptions[type(mock_event)] = [subscription]
+
+        val = event_bus.unregister(subscription)
+
+        self.assertTrue(val)
+        self.assertTrue(subscription not in bus._registration_id_map)
+        self.assertTrue(subscription not in bus._subscriptions[type(mock_event)])
+
+    def test_unregister_given_a_registration_id(self):
+        """Tests that unregister can unregister a given EventSubscription."""
+        mock_event = Mock()
+        bus = event_bus._event_bus
+        subscription = EventSubscription(type(mock_event), lambda _: None)
+        registration_id = id(subscription)
+        bus._registration_id_map[id(subscription)] = subscription
+        bus._subscriptions[type(mock_event)] = [subscription]
+
+        val = event_bus.unregister(registration_id)
+
+        self.assertTrue(val)
+        self.assertTrue(subscription not in bus._registration_id_map)
+        self.assertTrue(subscription not in bus._subscriptions[type(mock_event)])
+
+    def test_unregister_given_object_that_is_not_a_subscription(self):
+        """Asserts that a ValueError is raised upon invalid arguments."""
+        with self.assertRaises(ValueError):
+            event_bus.unregister(Mock())
+
+    def test_unregister_given_invalid_registration_id(self):
+        """Asserts that a false is returned upon invalid registration_id."""
+        val = event_bus.unregister(9)
+        self.assertFalse(val)
+
+    def test_listen_for_registers_listener(self):
+        """Tests listen_for registers the listener within the with statement."""
+        bus = event_bus._event_bus
+
+        def event_listener(_):
+            pass
+
+        with event_bus.listen_for(Event, event_listener):
+            self.assertEqual(len(bus._registration_id_map), 1)
+
+    def test_listen_for_unregisters_listener(self):
+        """Tests listen_for unregisters the listener after the with statement."""
+        bus = event_bus._event_bus
+
+        def event_listener(_):
+            pass
+
+        with event_bus.listen_for(Event, event_listener):
+            pass
+
+        self.assertEqual(len(bus._registration_id_map), 0)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/event/event_subscription_test.py b/packages/antlion/unit_tests/event/event_subscription_test.py
new file mode 100755
index 0000000..3c4a008
--- /dev/null
+++ b/packages/antlion/unit_tests/event/event_subscription_test.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+from unittest import TestCase
+
+from mock import Mock
+
+from antlion.event.event_subscription import EventSubscription
+
+
+class EventSubscriptionTest(TestCase):
+    """Tests the EventSubscription class."""
+
+    @staticmethod
+    def filter_out_event(_):
+        return False
+
+    @staticmethod
+    def pass_filter(_):
+        return True
+
+    def test_event_type_returns_correct_value(self):
+        """Tests that event_type returns the correct event type."""
+        expected_event_type = Mock()
+        subscription = EventSubscription(expected_event_type, lambda _: None)
+        self.assertEqual(expected_event_type, subscription.event_type)
+
+    def test_deliver_dont_deliver_if_event_is_filtered(self):
+        """Tests deliver does not call func if the event is filtered out."""
+        func = Mock()
+        subscription = EventSubscription(
+            Mock(), func, event_filter=self.filter_out_event
+        )
+
+        subscription.deliver(Mock())
+
+        self.assertFalse(func.called)
+
+    def test_deliver_deliver_accepted_event(self):
+        """Tests deliver does call func when the event is accepted."""
+        func = Mock()
+        subscription = EventSubscription(Mock(), func, event_filter=self.pass_filter)
+
+        subscription.deliver(Mock())
+        self.assertTrue(func.called)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/libs/__init__.py b/packages/antlion/unit_tests/libs/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/__init__.py
rename to packages/antlion/unit_tests/libs/__init__.py
diff --git a/src/antlion/unit_tests/libs/logging/__init__.py b/packages/antlion/unit_tests/libs/logging/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/logging/__init__.py
rename to packages/antlion/unit_tests/libs/logging/__init__.py
diff --git a/packages/antlion/unit_tests/libs/logging/log_stream_test.py b/packages/antlion/unit_tests/libs/logging/log_stream_test.py
new file mode 100755
index 0000000..2a4da06
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/logging/log_stream_test.py
@@ -0,0 +1,450 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import os
+import unittest
+
+import mock
+
+from antlion import context
+from antlion.libs.logging import log_stream
+from antlion.libs.logging.log_stream import (
+    AlsoToLogHandler,
+    InvalidStyleSetError,
+    LogStyles,
+    _LogStream,
+)
+
+
+class TestClass(object):
+    """Dummy class for TestEvents"""
+
+    def __init__(self):
+        self.test_name = self.test_case.__name__
+
+    def test_case(self):
+        """Dummy test case for test events."""
+
+
+class LogStreamTest(unittest.TestCase):
+    """Tests the _LogStream class in antlion.libs.logging.log_stream."""
+
+    @staticmethod
+    def patch(imported_name, *args, **kwargs):
+        return mock.patch(
+            f"antlion.libs.logging.log_stream.{imported_name}", *args, **kwargs
+        )
+
+    @classmethod
+    def setUpClass(cls):
+        # logging.log_path only exists if logger._setup_test_logger is called.
+        # Here we set it to a value that is likely to not exist so file IO is
+        # not executed (an error is raised instead of creating the file).
+        logging.log_path = "/f/a/i/l/p/a/t/h"
+
+    def setUp(self):
+        log_stream._log_streams = dict()
+
+    # __init__
+
+    @mock.patch("os.makedirs")
+    def test_init_adds_null_handler(self, *_):
+        """Tests that a NullHandler is added to the logger upon initialization.
+        This ensures that no log output is generated when a test class is not
+        running.
+        """
+        debug_monolith_log = LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG
+        with self.patch("MovableFileHandler"):
+            log = log_stream.create_logger(
+                self._testMethodName, log_styles=debug_monolith_log
+            )
+
+        self.assertTrue(isinstance(log.handlers[0], logging.NullHandler))
+
+    # __validate_style
+
+    @mock.patch("os.makedirs")
+    def test_validate_styles_raises_when_same_location_set_multiple_times(self, *_):
+        """Tests that a style is invalid if it sets the same handler twice.
+
+        If the error is NOT raised, then a LogStream can create a Logger that
+        has multiple LogHandlers trying to write to the same file.
+        """
+        with self.assertRaises(InvalidStyleSetError) as catch:
+            log_stream.create_logger(
+                self._testMethodName,
+                log_styles=[
+                    LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+                ],
+            )
+        self.assertTrue(
+            "has been set multiple" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
+
+    @mock.patch("os.makedirs")
+    def test_validate_styles_raises_when_multiple_file_outputs_set(self, *_):
+        """Tests that a style is invalid if more than one of MONOLITH_LOG,
+        TESTCLASS_LOG, and TESTCASE_LOG is set for the same log level.
+
+        If the error is NOT raised, then a LogStream can create a Logger that
+        has multiple LogHandlers trying to write to the same file.
+        """
+        with self.assertRaises(InvalidStyleSetError) as catch:
+            log_stream.create_logger(
+                self._testMethodName,
+                log_styles=[
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,
+                ],
+            )
+        self.assertTrue(
+            "More than one of" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
+
+        with self.assertRaises(InvalidStyleSetError) as catch:
+            log_stream.create_logger(
+                self._testMethodName,
+                log_styles=[
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+                ],
+            )
+        self.assertTrue(
+            "More than one of" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
+
+        with self.assertRaises(InvalidStyleSetError) as catch:
+            log_stream.create_logger(
+                self._testMethodName,
+                log_styles=[
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,
+                    LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
+                ],
+            )
+        self.assertTrue(
+            "More than one of" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
+
+    @mock.patch("os.makedirs")
+    def test_validate_styles_raises_when_no_level_exists(self, *_):
+        """Tests that a style is invalid if it does not contain a log level.
+
+        If the style does not contain a log level, then there is no way to
+        pass the information coming from the logger to the correct file.
+        """
+        with self.assertRaises(InvalidStyleSetError) as catch:
+            log_stream.create_logger(
+                self._testMethodName, log_styles=[LogStyles.MONOLITH_LOG]
+            )
+
+        self.assertTrue(
+            "log level" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
+
+    @mock.patch("os.makedirs")
+    def test_validate_styles_raises_when_no_location_exists(self, *_):
+        """Tests that a style is invalid if it does not contain a log level.
+
+        If the style does not contain a log level, then there is no way to
+        pass the information coming from the logger to the correct file.
+        """
+        with self.assertRaises(InvalidStyleSetError) as catch:
+            log_stream.create_logger(
+                self._testMethodName, log_styles=[LogStyles.LOG_INFO]
+            )
+
+        self.assertTrue(
+            "log location" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
+
+    @mock.patch("os.makedirs")
+    def test_validate_styles_raises_when_rotate_logs_no_file_handler(self, *_):
+        """Tests that a LogStyle cannot set ROTATE_LOGS without *_LOG flag.
+
+        If the LogStyle contains ROTATE_LOGS, it must be associated with a log
+        that is rotatable. TO_ACTS_LOG and TO_STDOUT are not rotatable logs,
+        since those are both controlled by another object/process. The user
+        must specify MONOLITHIC_LOG or TESTCASE_LOG.
+        """
+        with self.assertRaises(InvalidStyleSetError) as catch:
+            log_stream.create_logger(
+                self._testMethodName,
+                # Added LOG_DEBUG here to prevent the no_level_exists raise from
+                # occurring.
+                log_styles=[LogStyles.LOG_DEBUG + LogStyles.ROTATE_LOGS],
+            )
+
+        self.assertTrue(
+            "log type" in catch.exception.args[0],
+            msg="__validate_styles did not raise the expected error message",
+        )
+
+    # __handle_style
+
+    @mock.patch("os.makedirs")
+    def test_handle_style_to_acts_log_creates_handler(self, *_):
+        """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler."""
+        info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG
+
+        log = log_stream.create_logger(self._testMethodName, log_styles=info_acts_log)
+
+        self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))
+
+    @mock.patch("os.makedirs")
+    def test_handle_style_to_acts_log_creates_handler_is_lowest_level(self, *_):
+        """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler
+        that is set to the lowest LogStyles level."""
+        info_acts_log = LogStyles.LOG_DEBUG + LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG
+
+        log = log_stream.create_logger(self._testMethodName, log_styles=info_acts_log)
+
+        self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))
+        self.assertEqual(log.handlers[1].level, logging.DEBUG)
+
+    @mock.patch("os.makedirs")
+    def test_handle_style_to_stdout_creates_stream_handler(self, *_):
+        """Tests that using the flag TO_STDOUT creates a StreamHandler."""
+        info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_STDOUT
+
+        log = log_stream.create_logger(self._testMethodName, log_styles=info_acts_log)
+
+        self.assertTrue(isinstance(log.handlers[1], logging.StreamHandler))
+
+    @mock.patch("os.makedirs")
+    def test_handle_style_creates_file_handler(self, *_):
+        """Tests handle_style creates a MovableFileHandler for the MONOLITH_LOG."""
+        info_acts_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
+
+        expected = mock.MagicMock()
+        with self.patch("MovableFileHandler", return_value=expected):
+            log = log_stream.create_logger(
+                self._testMethodName, log_styles=info_acts_log
+            )
+
+        self.assertEqual(log.handlers[1], expected)
+
+    @mock.patch("os.makedirs")
+    def test_handle_style_creates_rotating_file_handler(self, *_):
+        """Tests handle_style creates a MovableFileHandler for the ROTATE_LOGS."""
+        info_acts_log = (
+            LogStyles.LOG_INFO + LogStyles.ROTATE_LOGS + LogStyles.MONOLITH_LOG
+        )
+
+        expected = mock.MagicMock()
+        with self.patch("MovableRotatingFileHandler", return_value=expected):
+            log = log_stream.create_logger(
+                self._testMethodName, log_styles=info_acts_log
+            )
+
+        self.assertEqual(log.handlers[1], expected)
+
+    # __create_rotating_file_handler
+
+    def test_create_rotating_file_handler_does_what_it_says_it_does(self):
+        """Tests that __create_rotating_file_handler does exactly that."""
+        expected = mock.MagicMock()
+
+        with self.patch("MovableRotatingFileHandler", return_value=expected):
+            # Through name-mangling, this function is automatically renamed. See
+            # https://docs.python.org/3/tutorial/classes.html#private-variables
+            fh = _LogStream._LogStream__create_rotating_file_handler("")
+
+        self.assertEqual(
+            expected, fh, "The function did not return a MovableRotatingFileHandler."
+        )
+
+    # __get_file_handler_creator
+
+    def test_get_file_handler_creator_returns_rotating_file_handler(self):
+        """Tests the function returns a MovableRotatingFileHandler when the log_style
+        has LogStyle.ROTATE_LOGS."""
+        expected = mock.MagicMock()
+
+        with self.patch(
+            "_LogStream._LogStream__create_rotating_file_handler", return_value=expected
+        ):
+            # Through name-mangling, this function is automatically renamed. See
+            # https://docs.python.org/3/tutorial/classes.html#private-variables
+            fh_creator = _LogStream._LogStream__get_file_handler_creator(
+                LogStyles.ROTATE_LOGS
+            )
+
+        self.assertEqual(
+            expected,
+            fh_creator("/d/u/m/m/y/p/a/t/h"),
+            "The function did not return a MovableRotatingFileHandler.",
+        )
+
+    def test_get_file_handler_creator_returns_file_handler(self):
+        """Tests the function returns a MovableFileHandler when the log_style does NOT
+        have LogStyle.ROTATE_LOGS."""
+        expected = mock.MagicMock()
+
+        with self.patch("MovableFileHandler", return_value=expected):
+            # Through name-mangling, this function is automatically renamed. See
+            # https://docs.python.org/3/tutorial/classes.html#private-variables
+            handler = _LogStream._LogStream__get_file_handler_creator(LogStyles.NONE)()
+
+        self.assertTrue(isinstance(handler, mock.Mock))
+
+    # __get_lowest_log_level
+
+    def test_get_lowest_level_gets_lowest_level(self):
+        """Tests __get_lowest_level returns the lowest LogStyle level given."""
+        level = _LogStream._LogStream__get_lowest_log_level(LogStyles.ALL_LEVELS)
+        self.assertEqual(level, LogStyles.LOG_DEBUG)
+
+    # __get_current_output_dir
+
+    @mock.patch("os.makedirs")
+    def test_get_current_output_dir_gets_correct_path(self, *_):
+        """Tests __get_current_output_dir gets the correct path from the context"""
+        info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
+
+        base_path = "BASEPATH"
+        subcontext = "SUBCONTEXT"
+        with self.patch("MovableFileHandler"):
+            logstream = log_stream._LogStream(
+                self._testMethodName,
+                log_styles=info_monolith_log,
+                base_path=base_path,
+                subcontext=subcontext,
+            )
+
+        expected = os.path.join(base_path, subcontext)
+        self.assertEqual(logstream._LogStream__get_current_output_dir(), expected)
+
+    # __create_handler
+
+    @mock.patch("os.makedirs")
+    def test_create_handler_creates_handler_at_correct_path(self, *_):
+        """Tests that __create_handler calls the handler creator with the
+        correct absolute path to the log file.
+        """
+        info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
+        base_path = "BASEPATH"
+        with self.patch("MovableFileHandler") as file_handler:
+            log_stream.create_logger(
+                self._testMethodName, log_styles=info_monolith_log, base_path=base_path
+            )
+            expected = os.path.join(base_path, f"{self._testMethodName}_{'info'}.txt")
+            file_handler.assert_called_with(expected)
+
+    # __remove_handler
+
+    @mock.patch("os.makedirs")
+    def test_remove_handler_removes_a_handler(self, *_):
+        """Tests that __remove_handler removes the handler from the logger and
+        closes the handler.
+        """
+        dummy_obj = mock.Mock()
+        dummy_obj.logger = mock.Mock()
+        handler = mock.Mock()
+        _LogStream._LogStream__remove_handler(dummy_obj, handler)
+
+        self.assertTrue(dummy_obj.logger.removeHandler.called)
+        self.assertTrue(handler.close.called)
+
+    # update_handlers
+
+    @mock.patch("os.makedirs")
+    def test_update_handlers_updates_filehandler_target(self, _):
+        """Tests that update_handlers invokes the underlying
+        MovableFileHandler.set_file method on the correct path.
+        """
+        info_testclass_log = LogStyles.LOG_INFO + LogStyles.TESTCLASS_LOG
+        file_name = "FILENAME"
+        with self.patch("MovableFileHandler"):
+            log = log_stream.create_logger(
+                self._testMethodName, log_styles=info_testclass_log
+            )
+            handler = log.handlers[-1]
+            handler.baseFilename = file_name
+            stream = log_stream._log_streams[log.name]
+            stream._LogStream__get_current_output_dir = lambda: "BASEPATH/TestClass"
+
+            stream.update_handlers(context.NewTestClassContextEvent())
+
+            handler.set_file.assert_called_with("BASEPATH/TestClass/FILENAME")
+
+    # cleanup
+
+    @mock.patch("os.makedirs")
+    def test_cleanup_removes_all_handlers(self, *_):
+        """Tests that cleanup removes all handlers in the logger, except
+        the NullHandler.
+        """
+        info_testcase_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
+        with self.patch("MovableFileHandler"):
+            log_stream.create_logger(self._testMethodName, log_styles=info_testcase_log)
+
+        created_log_stream = log_stream._log_streams[self._testMethodName]
+        created_log_stream.cleanup()
+
+        self.assertEqual(len(created_log_stream.logger.handlers), 1)
+
+
+class LogStreamModuleTests(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls):
+        # logging.log_path only exists if logger._setup_test_logger is called.
+        # Here we set it to a value that is likely to not exist so file IO is
+        # not executed (an error is raised instead of creating the file).
+        logging.log_path = "/f/a/i/l/p/a/t/h"
+
+    def setUp(self):
+        log_stream._log_streams = {}
+
+    # _update_handlers
+
+    @staticmethod
+    def create_new_context_event():
+        return context.NewContextEvent()
+
+    def test_update_handlers_delegates_calls_to_log_streams(self):
+        """Tests _update_handlers calls update_handlers on each log_stream."""
+        log_stream._log_streams = {"a": mock.Mock(), "b": mock.Mock()}
+
+        log_stream._update_handlers(self.create_new_context_event())
+
+        self.assertTrue(log_stream._log_streams["a"].update_handlers.called)
+        self.assertTrue(log_stream._log_streams["b"].update_handlers.called)
+
+    # _set_logger
+
+    def test_set_logger_overwrites_previous_logger(self):
+        """Tests that calling set_logger overwrites the previous logger within
+        log_stream._log_streams.
+        """
+        previous = mock.Mock()
+        log_stream._log_streams = {"a": previous}
+        expected = mock.Mock()
+        expected.name = "a"
+        log_stream._set_logger(expected)
+
+        self.assertEqual(log_stream._log_streams["a"], expected)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/__init__.py b/packages/antlion/unit_tests/libs/ota/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/ota/__init__.py
rename to packages/antlion/unit_tests/libs/ota/__init__.py
diff --git a/src/antlion/unit_tests/libs/ota/dummy_ota_package.zip b/packages/antlion/unit_tests/libs/ota/dummy_ota_package.zip
similarity index 100%
rename from src/antlion/unit_tests/libs/ota/dummy_ota_package.zip
rename to packages/antlion/unit_tests/libs/ota/dummy_ota_package.zip
Binary files differ
diff --git a/src/antlion/unit_tests/libs/ota/ota_runners/__init__.py b/packages/antlion/unit_tests/libs/ota/ota_runners/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/ota/ota_runners/__init__.py
rename to packages/antlion/unit_tests/libs/ota/ota_runners/__init__.py
diff --git a/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
new file mode 100644
index 0000000..f99346b
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import unittest
+
+import mock
+
+from antlion.libs.ota.ota_runners import ota_runner, ota_runner_factory
+from antlion.libs.ota.ota_runners.ota_runner_factory import OtaConfigError
+
+
+class OtaRunnerFactoryTests(unittest.TestCase):
+    """Tests all of the functions in the ota_runner_factory module."""
+
+    def setUp(self):
+        self.device = mock.MagicMock()
+        self.device.serial = "fake_serial"
+
+    def test_get_ota_value_from_config_no_map_key_missing(self):
+        acts_config = {}
+        with self.assertRaises(OtaConfigError):
+            ota_runner_factory.get_ota_value_from_config(
+                acts_config, "ota_tool", self.device
+            )
+
+    def test_get_ota_value_from_config_with_map_key_missing(self):
+        acts_config = {"ota_map": {"fake_serial": "MockOtaTool"}}
+        with self.assertRaises(OtaConfigError):
+            ota_runner_factory.get_ota_value_from_config(
+                acts_config, "ota_tool", self.device
+            )
+
+    def test_get_ota_value_from_config_with_map_key_found(self):
+        expected_value = "/path/to/tool"
+        acts_config = {
+            "ota_map": {"fake_serial": "MockOtaTool"},
+            "ota_tool_MockOtaTool": expected_value,
+        }
+        ret = ota_runner_factory.get_ota_value_from_config(
+            acts_config, "ota_tool", self.device
+        )
+        self.assertEqual(expected_value, ret)
+
+    def test_create_from_configs_raise_when_non_default_tool_path_missing(self):
+        acts_config = {
+            "ota_tool": "FakeTool",
+        }
+        try:
+            ota_runner_factory.create_from_configs(acts_config, self.device)
+        except OtaConfigError:
+            return
+        self.fail(
+            "create_from_configs did not throw an error when a tool was"
+            "specified without a tool path."
+        )
+
+    def test_create_from_configs_without_map_makes_proper_calls(self):
+        acts_config = {
+            "ota_package": "jkl;",
+            "ota_sl4a": "qaz",
+            "ota_tool": "FakeTool",
+            "FakeTool": "qwerty",
+        }
+        function_path = "antlion.libs.ota.ota_runners.ota_runner_factory.create"
+        with mock.patch(function_path) as mocked_function:
+            ota_runner_factory.create_from_configs(acts_config, self.device)
+            mocked_function.assert_called_with(
+                "jkl;", "qaz", self.device, "FakeTool", "qwerty"
+            )
+
+    def test_create_from_configs_with_map_makes_proper_calls(self):
+        acts_config = {
+            "ota_map": {"fake_serial": "hardwareA"},
+            "ota_package_hardwareA": "jkl;",
+            "ota_sl4a_hardwareA": "qaz",
+            "ota_tool_hardwareA": "FakeTool",
+            "FakeTool": "qwerty",
+        }
+        function_path = "antlion.libs.ota.ota_runners.ota_runner_factory.create"
+        with mock.patch(function_path) as mocked_function:
+            ota_runner_factory.create_from_configs(acts_config, self.device)
+            mocked_function.assert_called_with(
+                "jkl;", "qaz", self.device, "FakeTool", "qwerty"
+            )
+
+    def test_create_raise_on_ota_pkg_and_sl4a_fields_have_different_types(self):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
+            with self.assertRaises(TypeError):
+                ota_runner_factory.create("ota_package", ["ota_sl4a"], self.device)
+
+    def test_create_raise_on_ota_package_not_a_list_or_string(self):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
+            with self.assertRaises(TypeError):
+                ota_runner_factory.create({"ota": "pkg"}, {"ota": "sl4a"}, self.device)
+
+    def test_create_returns_single_ota_runner_on_ota_package_being_a_str(self):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
+            ret = ota_runner_factory.create("", "", self.device)
+            self.assertEqual(type(ret), ota_runner.SingleUseOtaRunner)
+
+    def test_create_returns_multi_ota_runner_on_ota_package_being_a_list(self):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
+            ret = ota_runner_factory.create([], [], self.device)
+            self.assertEqual(type(ret), ota_runner.MultiUseOtaRunner)
+
+    def test_create_returns_bound_ota_runner_on_second_request(self):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
+            first_return = ota_runner_factory.create([], [], self.device)
+            logging.disable(logging.WARNING)
+            second_return = ota_runner_factory.create([], [], self.device)
+            logging.disable(logging.NOTSET)
+            self.assertEqual(first_return, second_return)
+
+    def test_create_returns_different_ota_runner_on_second_request(self):
+        with mock.patch("antlion.libs.ota.ota_tools.ota_tool_factory.create"):
+            first_return = ota_runner_factory.create(
+                [], [], self.device, use_cached_runners=False
+            )
+            second_return = ota_runner_factory.create(
+                [], [], self.device, use_cached_runners=False
+            )
+            self.assertNotEqual(first_return, second_return)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
new file mode 100644
index 0000000..37a74c1
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
@@ -0,0 +1,283 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import unittest
+
+import mock
+
+from antlion.libs.ota.ota_runners import ota_runner
+from antlion.libs.ota.ota_tools import ota_tool
+
+
+class MockOtaTool(ota_tool.OtaTool):
+    def __init__(self, command):
+        super(MockOtaTool, self).__init__(command)
+        self.update_call_count = 0
+        self.cleanup_call_count = 0
+
+    def update(self, unused):
+        self.update_call_count += 1
+
+    def cleanup(self, unused):
+        self.cleanup_call_count += 1
+
+    def reset_count(self):
+        self.update_call_count = 0
+        self.cleanup_call_count = 0
+
+    def assert_calls_equal(self, test, number_of_expected_calls):
+        test.assertEqual(number_of_expected_calls, self.update_call_count)
+        test.assertEqual(number_of_expected_calls, self.cleanup_call_count)
+
+
+class OtaRunnerImpl(ota_runner.OtaRunner):
+    """Sets properties to return an empty string to allow OtaRunner tests."""
+
+    def get_sl4a_apk(self):
+        return ""
+
+    def get_ota_package(self):
+        return ""
+
+    def validate_update(self):
+        pass
+
+
+class OtaRunnerTest(unittest.TestCase):
+    """Tests the OtaRunner class."""
+
+    def setUp(self):
+        self.prev_sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
+        ota_runner.SL4A_SERVICE_SETUP_TIME = 0
+
+    def tearDown(self):
+        ota_runner.SL4A_SERVICE_SETUP_TIME = self.prev_sl4a_service_setup_time
+
+    def test_update(self):
+        device = mock.MagicMock()
+        device.skip_sl4a = False
+        tool = MockOtaTool("mock_command")
+        runner = OtaRunnerImpl(tool, device)
+        runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "b"])
+        runner.get_post_build_id = lambda: "abc"
+
+        runner._update()
+
+        self.assertTrue(device.stop_services.called)
+        self.assertTrue(device.wait_for_boot_completion.called)
+        self.assertTrue(device.start_services.called)
+        self.assertTrue(device.adb.install.called)
+        tool.assert_calls_equal(self, 1)
+
+    def test_update_fail_on_no_change_to_build(self):
+        device = mock.MagicMock()
+        tool = MockOtaTool("mock_command")
+        runner = OtaRunnerImpl(tool, device)
+        runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "a"])
+        runner.get_post_build_id = lambda: "abc"
+        try:
+            runner._update()
+            self.fail("Matching build fingerprints did not throw an error!")
+        except ota_runner.OtaError:
+            pass
+
+    def test_init(self):
+        device = mock.MagicMock()
+        tool = MockOtaTool("mock_command")
+        runner = ota_runner.OtaRunner(tool, device)
+
+        self.assertEqual(runner.ota_tool, tool)
+        self.assertEqual(runner.android_device, device)
+        self.assertEqual(runner.serial, device.serial)
+
+    def test_get_post_build_id_grabs_valid_data(self):
+        device = mock.MagicMock()
+        tool = MockOtaTool("mock_command")
+        runner = OtaRunnerImpl(tool, device)
+        ota_package_path = os.path.join(
+            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
+            "dummy_ota_package.zip",
+        )
+        runner.get_ota_package = lambda: ota_package_path
+        self.assertEqual(runner.get_post_build_id(), "post-build_information")
+
+    def test_get_ota_package_metadata_value_does_not_exist(self):
+        device = mock.MagicMock()
+        tool = MockOtaTool("mock_command")
+        runner = OtaRunnerImpl(tool, device)
+        ota_package_path = os.path.join(
+            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
+            "dummy_ota_package.zip",
+        )
+        runner.get_ota_package = lambda: ota_package_path
+        self.assertEqual(runner.get_ota_package_metadata("garbage-data"), None)
+
+
+class SingleUseOtaRunnerTest(unittest.TestCase):
+    """Tests the SingleUseOtaRunner class."""
+
+    def setUp(self):
+        self.device = mock.MagicMock()
+        self.tool = MockOtaTool("mock_command")
+
+    def test_update_first_update_runs(self):
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
+        try:
+            with mock.patch.object(ota_runner.OtaRunner, "_update"):
+                runner.update()
+        except ota_runner.OtaError:
+            self.fail(
+                "SingleUseOtaRunner threw an exception on the first " "update call."
+            )
+
+    def test_update_second_update_raises_error(self):
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
+            runner.update()
+            try:
+                runner.update()
+            except ota_runner.OtaError:
+                return
+        self.fail(
+            "SingleUseOtaRunner did not throw an exception on the second" "update call."
+        )
+
+    def test_can_update_no_updates_called(self):
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
+        self.assertEqual(True, runner.can_update())
+
+    def test_can_update_has_updated_already(self):
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "", "")
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
+            runner.update()
+        self.assertEqual(False, runner.can_update())
+
+    def test_get_ota_package(self):
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "a", "b")
+        self.assertEqual(runner.get_ota_package(), "a")
+
+    def test_get_sl4a_apk(self):
+        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, "a", "b")
+        self.assertEqual(runner.get_sl4a_apk(), "b")
+
+
+class MultiUseOtaRunnerTest(unittest.TestCase):
+    """Tests the MultiUseOtaRunner class."""
+
+    def setUp(self):
+        self.device = mock.MagicMock()
+        self.tool = MockOtaTool("mock_command")
+
+    def test_update_first_update_runs(self):
+        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device, [""], [""])
+        try:
+            with mock.patch.object(ota_runner.OtaRunner, "_update"):
+                runner.update()
+        except ota_runner.OtaError:
+            self.fail(
+                "MultiUseOtaRunner threw an exception on the first " "update call."
+            )
+
+    def test_update_multiple_updates_run(self):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
+            runner.update()
+            try:
+                runner.update()
+            except ota_runner.OtaError:
+                self.fail(
+                    "MultiUseOtaRunner threw an exception before "
+                    "running out of update packages."
+                )
+
+    def test_update_too_many_update_calls_raises_error(self):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
+            runner.update()
+            runner.update()
+            try:
+                runner.update()
+            except ota_runner.OtaError:
+                return
+        self.fail(
+            "MultiUseOtaRunner did not throw an exception after running "
+            "out of update packages."
+        )
+
+    def test_can_update_no_updates_called(self):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        self.assertEqual(True, runner.can_update())
+
+    def test_can_update_has_more_updates_left(self):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
+            runner.update()
+        self.assertEqual(True, runner.can_update())
+
+    def test_can_update_ran_out_of_updates(self):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        with mock.patch.object(ota_runner.OtaRunner, "_update"):
+            runner.update()
+            runner.update()
+        self.assertEqual(False, runner.can_update())
+
+    def test_get_ota_package(self):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        self.assertEqual(runner.get_ota_package(), "first_pkg")
+
+    def test_get_sl4a_apk(self):
+        runner = ota_runner.MultiUseOtaRunner(
+            self.tool,
+            self.device,
+            ["first_pkg", "second_pkg"],
+            ["first_apk", "second_apk"],
+        )
+        self.assertEqual(runner.get_sl4a_apk(), "first_apk")
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/__init__.py b/packages/antlion/unit_tests/libs/ota/ota_tools/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/ota/ota_tools/__init__.py
rename to packages/antlion/unit_tests/libs/ota/ota_tools/__init__.py
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
new file mode 100644
index 0000000..51daad1
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import unittest
+
+import mock
+
+from antlion.controllers import android_device
+from antlion.libs.ota.ota_runners import ota_runner
+from antlion.libs.ota.ota_tools import adb_sideload_ota_tool, ota_tool
+
+
+def get_mock_android_device(serial="", ssh_connection=None):
+    """Returns a mocked AndroidDevice with a mocked adb/fastboot."""
+    with mock.patch("antlion.controllers.adb.AdbProxy") as adb_proxy, mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy"
+    ) as fb_proxy:
+        adb_proxy.return_value.getprop.return_value = "1.2.3"
+        fb_proxy.return_value.devices.return_value = ""
+        ret = mock.Mock(
+            android_device.AndroidDevice(serial=serial, ssh_connection=ssh_connection)
+        )
+        fb_proxy.reset_mock()
+        return ret
+
+
+class AdbSideloadOtaToolTest(unittest.TestCase):
+    """Tests the OtaTool class."""
+
+    def test_init(self):
+        expected_value = "commmand string"
+        self.assertEqual(ota_tool.OtaTool(expected_value).command, expected_value)
+
+    def setUp(self):
+        self.sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
+        ota_runner.SL4A_SERVICE_SETUP_TIME = 0
+        logging.log_path = "/tmp/log"
+
+    def tearDown(self):
+        ota_runner.SL4A_SERVICE_SETUP_TIME = self.sl4a_service_setup_time
+
+    @staticmethod
+    def test_start():
+        # This test could have a bunch of verify statements,
+        # but its probably not worth it.
+        device = get_mock_android_device()
+        ota_package_path = os.path.join(
+            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
+            "dummy_ota_package.zip",
+        )
+        tool = adb_sideload_ota_tool.AdbSideloadOtaTool(ota_package_path)
+        runner = ota_runner.SingleUseOtaRunner(tool, device, ota_package_path, "")
+        runner.android_device.adb.getprop = mock.Mock(side_effect=["a", "b"])
+        runner.update()
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
new file mode 100644
index 0000000..4769171
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from antlion.libs.ota.ota_tools import ota_tool_factory
+
+
+class MockOtaTool(object):
+    def __init__(self, command):
+        self.command = command
+
+
+class OtaToolFactoryTests(unittest.TestCase):
+    def setUp(self):
+        ota_tool_factory._constructed_tools = {}
+
+    def test_create_constructor_exists(self):
+        ota_tool_factory._CONSTRUCTORS = {
+            MockOtaTool.__name__: lambda command: MockOtaTool(command),
+        }
+        ret = ota_tool_factory.create(MockOtaTool.__name__, "command")
+        self.assertEqual(type(ret), MockOtaTool)
+        self.assertTrue(ret in ota_tool_factory._constructed_tools.values())
+
+    def test_create_not_in_constructors(self):
+        ota_tool_factory._CONSTRUCTORS = {}
+        with self.assertRaises(KeyError):
+            ota_tool_factory.create(MockOtaTool.__name__, "command")
+
+    def test_create_returns_cached_tool(self):
+        ota_tool_factory._CONSTRUCTORS = {
+            MockOtaTool.__name__: lambda command: MockOtaTool(command),
+        }
+        ret_a = ota_tool_factory.create(MockOtaTool.__name__, "command")
+        ret_b = ota_tool_factory.create(MockOtaTool.__name__, "command")
+        self.assertEqual(ret_a, ret_b)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
new file mode 100644
index 0000000..09ddad2
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from antlion.libs.ota.ota_tools import ota_tool
+
+
+class OtaToolTests(unittest.TestCase):
+    """Tests the OtaTool class."""
+
+    def test_init(self):
+        expected_value = "commmand string"
+        self.assertEqual(ota_tool.OtaTool(expected_value).command, expected_value)
+
+    def test_start_throws_error_on_unimplemented(self):
+        obj = "some object"
+        with self.assertRaises(NotImplementedError):
+            ota_tool.OtaTool("").update(obj)
+
+    def test_end_is_not_abstract(self):
+        obj = "some object"
+        try:
+            ota_tool.OtaTool("").cleanup(obj)
+        except:
+            self.fail("End is not required and should be a virtual function.")
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
new file mode 100644
index 0000000..6e8f3d0
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import unittest
+
+import mock
+
+from antlion.controllers import android_device
+from antlion.libs.ota.ota_runners import ota_runner
+from antlion.libs.ota.ota_tools import update_device_ota_tool
+
+
+def get_mock_android_device(serial="", ssh_connection=None):
+    """Returns a mocked AndroidDevice with a mocked adb/fastboot."""
+    with mock.patch("antlion.controllers.adb.AdbProxy") as adb_proxy, mock.patch(
+        "antlion.controllers.fastboot.FastbootProxy"
+    ) as fb_proxy:
+        adb_proxy.return_value.getprop.return_value = "1.2.3"
+        fb_proxy.return_value.devices.return_value = ""
+        ret = mock.Mock(
+            android_device.AndroidDevice(serial=serial, ssh_connection=ssh_connection)
+        )
+        fb_proxy.reset_mock()
+        return ret
+
+
+class UpdateDeviceOtaToolTest(unittest.TestCase):
+    """Tests for UpdateDeviceOtaTool."""
+
+    def setUp(self):
+        self.sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
+        ota_runner.SL4A_SERVICE_SETUP_TIME = 0
+        logging.log_path = "/tmp/log"
+
+    def tearDown(self):
+        ota_runner.SL4A_SERVICE_SETUP_TIME = self.sl4a_service_setup_time
+
+    def test_update(self):
+        ota_package_path = os.path.join(
+            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
+            "dummy_ota_package.zip",
+        )
+        with mock.patch("tempfile.mkdtemp") as mkdtemp, mock.patch(
+            "shutil.rmtree"
+        ) as rmtree, mock.patch("antlion.utils.unzip_maintain_permissions"):
+            mkdtemp.return_value = ""
+            rmtree.return_value = ""
+            device = get_mock_android_device()
+            tool = update_device_ota_tool.UpdateDeviceOtaTool(ota_package_path)
+            runner = mock.Mock(ota_runner.SingleUseOtaRunner(tool, device, "", ""))
+            runner.return_value.android_device = device
+            with mock.patch("antlion.libs.proc.job.run"):
+                tool.update(runner)
+            del tool
+
+    def test_del(self):
+        ota_package_path = os.path.join(
+            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
+            "dummy_ota_package.zip",
+        )
+        with mock.patch("tempfile.mkdtemp") as mkdtemp, mock.patch(
+            "shutil.rmtree"
+        ) as rmtree, mock.patch("antlion.utils.unzip_maintain_permissions"):
+            mkdtemp.return_value = ""
+            rmtree.return_value = ""
+            tool = update_device_ota_tool.UpdateDeviceOtaTool(ota_package_path)
+            del tool
+            self.assertTrue(mkdtemp.called)
+            self.assertTrue(rmtree.called)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/packages/antlion/unit_tests/libs/ota/ota_updater_test.py b/packages/antlion/unit_tests/libs/ota/ota_updater_test.py
new file mode 100644
index 0000000..763500e
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/ota/ota_updater_test.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+import mock
+
+from antlion.libs.ota import ota_updater
+from antlion.libs.ota.ota_runners import ota_runner
+
+
+class MockAndroidDevice(object):
+    def __init__(self, serial):
+        self.serial = serial
+        self.log = mock.Mock()
+        self.take_bug_report = mock.MagicMock()
+
+
+class MockOtaRunner(object):
+    def __init__(self):
+        self.call_count = 0
+        self.should_fail = False
+        self.can_update_value = "CAN_UPDATE_CALLED"
+
+    def set_failure(self, should_fail=True):
+        self.should_fail = should_fail
+
+    def update(self):
+        self.call_count += 1
+        if self.should_fail:
+            raise ota_runner.OtaError
+
+    def can_update(self):
+        return self.can_update_value
+
+    def validate_update(self):
+        pass
+
+
+class OtaUpdaterTests(unittest.TestCase):
+    """Tests the methods in the ota_updater module."""
+
+    def test_initialize(self):
+        user_params = {"a": 1, "b": 2, "c": 3}
+        android_devices = ["x", "y", "z"]
+        with mock.patch(
+            "antlion.libs.ota.ota_runners.ota_runner_factory." "create_from_configs"
+        ) as fn:
+            ota_updater.initialize(user_params, android_devices)
+            for i in range(len(android_devices)):
+                fn.assert_any_call(user_params, android_devices[i])
+            self.assertSetEqual(
+                set(android_devices), set(ota_updater.ota_runners.keys())
+            )
+
+    def test_check_initialization_is_initialized(self):
+        device = MockAndroidDevice("serial")
+        ota_updater.ota_runners = {device: ota_runner.OtaRunner("tool", device)}
+        try:
+            ota_updater._check_initialization(device)
+        except ota_runner.OtaError:
+            self.fail("_check_initialization raised for initialized runner!")
+
+    def test_check_initialization_is_not_initialized(self):
+        device = MockAndroidDevice("serial")
+        ota_updater.ota_runners = {}
+        with self.assertRaises(KeyError):
+            ota_updater._check_initialization(device)
+
+    def test_update_do_not_ignore_failures_and_failures_occur(self):
+        device = MockAndroidDevice("serial")
+        runner = MockOtaRunner()
+        runner.set_failure(True)
+        ota_updater.ota_runners = {device: runner}
+        with self.assertRaises(ota_runner.OtaError):
+            ota_updater.update(device)
+
+    def test_update_ignore_failures_and_failures_occur(self):
+        device = MockAndroidDevice("serial")
+        runner = MockOtaRunner()
+        runner.set_failure(True)
+        ota_updater.ota_runners = {device: runner}
+        try:
+            ota_updater.update(device, ignore_update_errors=True)
+        except ota_runner.OtaError:
+            self.fail("OtaError was raised when errors are to be ignored!")
+
+    def test_can_update(self):
+        device = MockAndroidDevice("serial")
+        runner = MockOtaRunner()
+        ota_updater.ota_runners = {device: runner}
+        self.assertEqual(ota_updater.can_update(device), "CAN_UPDATE_CALLED")
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/libs/proc/__init__.py b/packages/antlion/unit_tests/libs/proc/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/proc/__init__.py
rename to packages/antlion/unit_tests/libs/proc/__init__.py
diff --git a/packages/antlion/unit_tests/libs/proc/process_test.py b/packages/antlion/unit_tests/libs/proc/process_test.py
new file mode 100644
index 0000000..d362c9b
--- /dev/null
+++ b/packages/antlion/unit_tests/libs/proc/process_test.py
@@ -0,0 +1,366 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import subprocess
+import unittest
+
+import mock
+
+from antlion.libs.proc.process import Process, ProcessError
+
+
+class FakeThread(object):
+    def __init__(self, target=None):
+        self.target = target
+        self.alive = False
+
+    def _on_start(self):
+        pass
+
+    def start(self):
+        self.alive = True
+        if self._on_start:
+            self._on_start()
+
+    def stop(self):
+        self.alive = False
+
+    def join(self):
+        pass
+
+
+class ProcessTest(unittest.TestCase):
+    """Tests the antlion.libs.proc.process.Process class."""
+
+    def setUp(self):
+        self._Process__start_process = Process._Process__start_process
+
+    def tearDown(self):
+        Process._Process__start_process = self._Process__start_process
+
+    @staticmethod
+    def patch(imported_name, *args, **kwargs):
+        return mock.patch(f"antlion.libs.proc.process.{imported_name}", *args, **kwargs)
+
+    # set_on_output_callback
+
+    def test_set_on_output_callback(self):
+        """Tests that set_on_output_callback sets on_output_callback."""
+        callback = mock.Mock()
+
+        process = Process("cmd").set_on_output_callback(callback)
+        process._on_output_callback()
+
+        self.assertTrue(callback.called)
+
+    # set_on_terminate_callback
+
+    def test_set_on_terminate_callback(self):
+        """Tests that set_on_terminate_callback sets _on_terminate_callback."""
+        callback = mock.Mock()
+
+        process = Process("cmd").set_on_terminate_callback(callback)
+        process._on_terminate_callback()
+
+        self.assertTrue(callback.called)
+
+    # start
+
+    def test_start_raises_if_called_back_to_back(self):
+        """Tests that start raises an exception if it has already been called
+        prior.
+
+        This is required to prevent references to processes and threads from
+        being overwritten, potentially causing ACTS to hang."""
+        process = Process("cmd")
+
+        # Here we need the thread to start the process object.
+        class FakeThreadImpl(FakeThread):
+            def _on_start(self):
+                process._process = mock.Mock()
+
+        with self.patch("Thread", FakeThreadImpl):
+            process.start()
+            expected_msg = "Process has already started."
+            with self.assertRaisesRegex(ProcessError, expected_msg):
+                process.start()
+
+    def test_start_starts_listening_thread(self):
+        """Tests that start starts the _exec_popen_loop function."""
+        process = Process("cmd")
+
+        # Here we need the thread to start the process object.
+        class FakeThreadImpl(FakeThread):
+            def _on_start(self):
+                process._process = mock.Mock()
+
+        with self.patch("Thread", FakeThreadImpl):
+            process.start()
+
+        self.assertTrue(process._listening_thread.alive)
+        self.assertEqual(process._listening_thread.target, process._exec_loop)
+
+    # wait
+
+    def test_wait_raises_if_called_back_to_back(self):
+        """Tests that wait raises an exception if it has already been called
+        prior."""
+        process = Process("cmd")
+        process._process = mock.Mock()
+
+        process.wait(0)
+        expected_msg = "Process is already being stopped."
+        with self.assertRaisesRegex(ProcessError, expected_msg):
+            process.wait(0)
+
+    @mock.patch.object(Process, "_kill_process")
+    def test_wait_kills_after_timeout(self, *_):
+        """Tests that if a TimeoutExpired error is thrown during wait, the
+        process is killed."""
+        process = Process("cmd")
+        process._process = mock.Mock()
+        process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
+
+        process.wait(0)
+
+        self.assertEqual(process._kill_process.called, True)
+
+    @mock.patch("os.getpgid", side_effect=lambda id: id)
+    @mock.patch("os.killpg")
+    def test_sends_signal(self, mock_os, *_):
+        """Tests that signal is sent to process.."""
+        process = Process("cmd")
+        mock_process = mock.Mock()
+        mock_process.pid = -1
+        process._process = mock_process
+
+        process.signal(51641)
+
+        mock_os.assert_called_with(-1, 51641)
+
+    def test_signal_raises_error_on_windows(self, *_):
+        """Tests that signaling is unsupported in windows with appropriate
+        error msg."""
+        process = Process("cmd")
+        mock_inner_process = mock.Mock()
+        mock_inner_process.pid = -1
+        process._process = mock_inner_process
+
+        with mock.patch("antlion.libs.proc.process._on_windows", True):
+            with self.assertRaises(ProcessError):
+                process.signal(51641)
+
+    @mock.patch.object(Process, "_kill_process")
+    def test_wait_sets_stopped_to_true_before_process_kill(self, *_):
+        """Tests that stop() sets the _stopped attribute to True.
+
+        This order is required to prevent the _exec_loop from calling
+        _on_terminate_callback when the user has killed the process.
+        """
+        verifier = mock.Mock()
+        verifier.passed = False
+
+        def test_call_order():
+            self.assertTrue(process._stopped)
+            verifier.passed = True
+
+        process = Process("cmd")
+        process._process = mock.Mock()
+        process._process.poll.return_value = None
+        process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
+        process._kill_process = test_call_order
+
+        process.wait()
+
+        self.assertEqual(verifier.passed, True)
+
+    def test_wait_joins_listening_thread_if_it_exists(self):
+        """Tests wait() joins _listening_thread if it exists."""
+        process = Process("cmd")
+        process._process = mock.Mock()
+        mocked_thread = mock.Mock()
+        process._listening_thread = mocked_thread
+
+        process.wait(0)
+
+        self.assertEqual(mocked_thread.join.called, True)
+
+    def test_wait_clears_listening_thread_if_it_exists(self):
+        """Tests wait() joins _listening_thread if it exists.
+
+        Threads can only be started once, so after wait has been called, we
+        want to make sure we clear the listening thread.
+        """
+        process = Process("cmd")
+        process._process = mock.Mock()
+        process._listening_thread = mock.Mock()
+
+        process.wait(0)
+
+        self.assertEqual(process._listening_thread, None)
+
+    def test_wait_joins_redirection_thread_if_it_exists(self):
+        """Tests wait() joins _listening_thread if it exists."""
+        process = Process("cmd")
+        process._process = mock.Mock()
+        mocked_thread = mock.Mock()
+        process._redirection_thread = mocked_thread
+
+        process.wait(0)
+
+        self.assertEqual(mocked_thread.join.called, True)
+
+    def test_wait_clears_redirection_thread_if_it_exists(self):
+        """Tests wait() joins _listening_thread if it exists.
+
+        Threads can only be started once, so after wait has been called, we
+        want to make sure we clear the listening thread.
+        """
+        process = Process("cmd")
+        process._process = mock.Mock()
+        process._redirection_thread = mock.Mock()
+
+        process.wait(0)
+
+        self.assertEqual(process._redirection_thread, None)
+
+    # stop
+
+    def test_stop_sets_stopped_to_true(self):
+        """Tests that stop() sets the _stopped attribute to True."""
+        process = Process("cmd")
+        process._process = mock.Mock()
+
+        process.stop()
+
+        self.assertTrue(process._stopped)
+
+    def test_stop_sets_stopped_to_true_before_process_kill(self):
+        """Tests that stop() sets the _stopped attribute to True.
+
+        This order is required to prevent the _exec_loop from calling
+        _on_terminate_callback when the user has killed the process.
+        """
+        verifier = mock.Mock()
+        verifier.passed = False
+
+        def test_call_order():
+            self.assertTrue(process._stopped)
+            verifier.passed = True
+
+        process = Process("cmd")
+        process._process = mock.Mock()
+        process._process.poll.return_value = None
+        process._kill_process = test_call_order
+        process._process.wait.side_effect = subprocess.TimeoutExpired("", "")
+
+        process.stop()
+
+        self.assertEqual(verifier.passed, True)
+
+    def test_stop_calls_wait(self):
+        """Tests that stop() also has the functionality of wait()."""
+        process = Process("cmd")
+        process._process = mock.Mock()
+        process.wait = mock.Mock()
+
+        process.stop()
+
+        self.assertEqual(process.wait.called, True)
+
+    # _redirect_output
+
+    def test_redirect_output_feeds_all_lines_to_on_output_callback(self):
+        """Tests that _redirect_output loops until all lines are parsed."""
+        received_list = []
+
+        def appender(line):
+            received_list.append(line)
+
+        process = Process("cmd")
+        process.set_on_output_callback(appender)
+        process._process = mock.Mock()
+        process._process.stdout.readline.side_effect = [b"a\n", b"b\n", b""]
+
+        process._redirect_output()
+
+        self.assertEqual(received_list[0], "a")
+        self.assertEqual(received_list[1], "b")
+        self.assertEqual(len(received_list), 2)
+
+    # __start_process
+
+    def test_start_process_returns_a_popen_object(self):
+        """Tests that a Popen object is returned by __start_process."""
+        with self.patch("subprocess.Popen", return_value="verification"):
+            self.assertEqual(Process._Process__start_process("cmd"), "verification")
+
+    # _exec_loop
+
+    def test_exec_loop_redirections_output(self):
+        """Tests that the _exec_loop function calls to redirect the output."""
+        process = Process("cmd")
+        Process._Process__start_process = mock.Mock()
+
+        with self.patch("Thread", FakeThread):
+            process._exec_loop()
+
+        self.assertEqual(process._redirection_thread.target, process._redirect_output)
+        self.assertEqual(process._redirection_thread.alive, True)
+
+    def test_exec_loop_waits_for_process(self):
+        """Tests that the _exec_loop waits for the process to complete before
+        returning."""
+        process = Process("cmd")
+        Process._Process__start_process = mock.Mock()
+
+        with self.patch("Thread", FakeThread):
+            process._exec_loop()
+
+        self.assertEqual(process._process.wait.called, True)
+
+    def test_exec_loop_loops_if_not_stopped(self):
+        process = Process("1st")
+        Process._Process__start_process = mock.Mock()
+        process._on_terminate_callback = mock.Mock(side_effect=[["2nd"], None])
+
+        with self.patch("Thread", FakeThread):
+            process._exec_loop()
+
+        self.assertEqual(Process._Process__start_process.call_count, 2)
+        self.assertEqual(
+            Process._Process__start_process.call_args_list[0][0], (["1st"],)
+        )
+        self.assertEqual(
+            Process._Process__start_process.call_args_list[1][0], (["2nd"],)
+        )
+
+    def test_exec_loop_does_not_loop_if_stopped(self):
+        process = Process("1st")
+        Process._Process__start_process = mock.Mock()
+        process._on_terminate_callback = mock.Mock(side_effect=["2nd", None])
+        process._stopped = True
+
+        with self.patch("Thread", FakeThread):
+            process._exec_loop()
+
+        self.assertEqual(Process._Process__start_process.call_count, 1)
+        self.assertEqual(
+            Process._Process__start_process.call_args_list[0][0], (["1st"],)
+        )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/mock_controller.py b/packages/antlion/unit_tests/mock_controller.py
similarity index 100%
rename from src/antlion/unit_tests/mock_controller.py
rename to packages/antlion/unit_tests/mock_controller.py
diff --git a/src/antlion/unit_tests/test_data/1k_2k.raw b/packages/antlion/unit_tests/test_data/1k_2k.raw
similarity index 100%
rename from src/antlion/unit_tests/test_data/1k_2k.raw
rename to packages/antlion/unit_tests/test_data/1k_2k.raw
Binary files differ
diff --git a/packages/antlion/unit_tests/test_suite.py b/packages/antlion/unit_tests/test_suite.py
new file mode 100755
index 0000000..ad578d5
--- /dev/null
+++ b/packages/antlion/unit_tests/test_suite.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+import tempfile
+import unittest
+
+
+class TestResult(object):
+    """
+    Attributes:
+        failures_future: The list of failed test cases during this test.
+        output_file: The file containing the stderr/stdout for this test.
+        test_suite: The unittest.TestSuite used. Useful for debugging.
+        test_filename: The *_test.py file that ran in this test.
+    """
+
+    def __init__(self, test_result, output_file, test_suite, test_filename):
+        self.test_result = test_result
+        self.output_file = output_file
+        self.test_suite = test_suite
+        self.test_filename = test_filename
+
+
+def run_all_unit_tests():
+    suite = unittest.TestSuite()
+    test_files = []
+    loader = unittest.TestLoader()
+    for root, _, files in os.walk(os.path.dirname(__file__)):
+        for filename in files:
+            if filename.endswith("_test.py"):
+                test_files.append(os.path.join(root, filename))
+                try:
+                    suite.addTest(loader.discover(root, filename))
+                except ImportError as e:
+                    if "Start directory is not importable" not in e.args[0]:
+                        raise
+                    message = ". Did you forget to add an __init__.py file?"
+                    raise ImportError(e.args[0] + message)
+
+    output_dir = tempfile.mkdtemp()
+
+    results = []
+
+    for index, test in enumerate(suite._tests):
+        output_file = os.path.join(output_dir, f"test_{index}.output")
+
+        test_result = subprocess.Popen(
+            [sys.executable, test_files[index]],
+            stdout=open(output_file, "w+"),
+            stderr=subprocess.STDOUT,
+        )
+        results.append(TestResult(test_result, output_file, test, test_files[index]))
+
+    all_failures = []
+    for index, result in enumerate(results):
+        try:
+            failures = result.test_result.wait(timeout=60)
+            if failures:
+                print(f"Failure logs for {result.test_filename}:", file=sys.stderr)
+                with open(result.output_file, "r") as out_file:
+                    print(out_file.read(), file=sys.stderr)
+                all_failures.append(f"{result.test_filename} (failed)")
+        except subprocess.TimeoutExpired:
+            all_failures.append(f"{result.test_filename} (timed out)")
+            print(
+                f"The following test timed out: {result.test_filename!r}",
+                file=sys.stderr,
+            )
+            with open(result.output_file, "r") as out_file:
+                print(out_file.read(), file=sys.stderr)
+
+    # Prints a summary over all unit tests failed.
+    if all_failures:
+        print("The following tests failed:", file=sys.stderr)
+        for failure in all_failures:
+            print("    ", failure, file=sys.stderr)
+
+    exit(bool(all_failures))
+
+
+if __name__ == "__main__":
+    run_all_unit_tests()
diff --git a/packages/antlion/utils.py b/packages/antlion/utils.py
new file mode 100755
index 0000000..dd23d9d
--- /dev/null
+++ b/packages/antlion/utils.py
@@ -0,0 +1,1177 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import concurrent.futures
+import datetime
+import ipaddress
+import json
+import logging
+import os
+import platform
+import random
+import re
+import signal
+import socket
+import string
+import subprocess
+import time
+import traceback
+import zipfile
+from concurrent.futures import ThreadPoolExecutor
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Any
+
+from mobly import signals
+
+from antlion.libs.proc import job
+from antlion.runner import Runner
+
+if TYPE_CHECKING:
+    from antlion.controllers.android_device import AndroidDevice
+    from antlion.controllers.fuchsia_device import FuchsiaDevice
+    from antlion.controllers.utils_lib.ssh.connection import SshConnection
+
+# File name length is limited to 255 chars on some OS, so we need to make sure
+# the file names we output fits within the limit.
+MAX_FILENAME_LEN = 255
+
+# All Fuchsia devices use this suffix for link-local mDNS host names.
+FUCHSIA_MDNS_TYPE = "_fuchsia._udp.local."
+
+# Default max seconds it takes to Duplicate Address Detection to finish before
+# assigning an IPv6 address.
+DAD_TIMEOUT_SEC = 30
+
+
+class ActsUtilsError(Exception):
+    """Generic error raised for exceptions in ACTS utils."""
+
+
+ascii_letters_and_digits = string.ascii_letters + string.digits
+valid_filename_chars = f"-_.{ascii_letters_and_digits}"
+
+models = (
+    "sprout",
+    "occam",
+    "hammerhead",
+    "bullhead",
+    "razor",
+    "razorg",
+    "shamu",
+    "angler",
+    "volantis",
+    "volantisg",
+    "mantaray",
+    "fugu",
+    "ryu",
+    "marlin",
+    "sailfish",
+)
+
+manufacture_name_to_model = {
+    "flo": "razor",
+    "flo_lte": "razorg",
+    "flounder": "volantis",
+    "flounder_lte": "volantisg",
+    "dragon": "ryu",
+}
+
+GMT_to_olson = {
+    "GMT-9": "America/Anchorage",
+    "GMT-8": "US/Pacific",
+    "GMT-7": "US/Mountain",
+    "GMT-6": "US/Central",
+    "GMT-5": "US/Eastern",
+    "GMT-4": "America/Barbados",
+    "GMT-3": "America/Buenos_Aires",
+    "GMT-2": "Atlantic/South_Georgia",
+    "GMT-1": "Atlantic/Azores",
+    "GMT+0": "Africa/Casablanca",
+    "GMT+1": "Europe/Amsterdam",
+    "GMT+2": "Europe/Athens",
+    "GMT+3": "Europe/Moscow",
+    "GMT+4": "Asia/Baku",
+    "GMT+5": "Asia/Oral",
+    "GMT+6": "Asia/Almaty",
+    "GMT+7": "Asia/Bangkok",
+    "GMT+8": "Asia/Hong_Kong",
+    "GMT+9": "Asia/Tokyo",
+    "GMT+10": "Pacific/Guam",
+    "GMT+11": "Pacific/Noumea",
+    "GMT+12": "Pacific/Fiji",
+    "GMT+13": "Pacific/Tongatapu",
+    "GMT-11": "Pacific/Midway",
+    "GMT-10": "Pacific/Honolulu",
+}
+
+
+def abs_path(path):
+    """Resolve the '.' and '~' in a path to get the absolute path.
+
+    Args:
+        path: The path to expand.
+
+    Returns:
+        The absolute path of the input path.
+    """
+    return os.path.abspath(os.path.expanduser(path))
+
+
+def get_current_epoch_time():
+    """Current epoch time in milliseconds.
+
+    Returns:
+        An integer representing the current epoch time in milliseconds.
+    """
+    return int(round(time.time() * 1000))
+
+
+def get_current_human_time():
+    """Returns the current time in human readable format.
+
+    Returns:
+        The current time stamp in Month-Day-Year Hour:Min:Sec format.
+    """
+    return time.strftime("%m-%d-%Y %H:%M:%S ")
+
+
+def epoch_to_human_time(epoch_time):
+    """Converts an epoch timestamp to human readable time.
+
+    This essentially converts an output of get_current_epoch_time to an output
+    of get_current_human_time
+
+    Args:
+        epoch_time: An integer representing an epoch timestamp in milliseconds.
+
+    Returns:
+        A time string representing the input time.
+        None if input param is invalid.
+    """
+    if isinstance(epoch_time, int):
+        try:
+            d = datetime.datetime.fromtimestamp(epoch_time / 1000)
+            return d.strftime("%m-%d-%Y %H:%M:%S ")
+        except ValueError:
+            return None
+
+
+def get_timezone_olson_id():
+    """Return the Olson ID of the local (non-DST) timezone.
+
+    Returns:
+        A string representing one of the Olson IDs of the local (non-DST)
+        timezone.
+    """
+    tzoffset = int(time.timezone / 3600)
+    gmt = None
+    if tzoffset <= 0:
+        gmt = f"GMT+{-tzoffset}"
+    else:
+        gmt = f"GMT-{tzoffset}"
+    return GMT_to_olson[gmt]
+
+
+def get_next_device(test_bed_controllers, used_devices):
+    """Gets the next device in a list of testbed controllers
+
+    Args:
+        test_bed_controllers: A list of testbed controllers of a particular
+            type, for example a list ACTS Android devices.
+        used_devices: A list of devices that have been used.  This can be a
+            mix of devices, for example a fuchsia device and an Android device.
+    Returns:
+        The next device in the test_bed_controllers list or None if there are
+        no items that are not in the used devices list.
+    """
+    if test_bed_controllers:
+        device_list = test_bed_controllers
+    else:
+        raise ValueError("test_bed_controllers is empty.")
+    for used_device in used_devices:
+        if used_device in device_list:
+            device_list.remove(used_device)
+    if device_list:
+        return device_list[0]
+    else:
+        return None
+
+
+def find_files(paths, file_predicate):
+    """Locate files whose names and extensions match the given predicate in
+    the specified directories.
+
+    Args:
+        paths: A list of directory paths where to find the files.
+        file_predicate: A function that returns True if the file name and
+          extension are desired.
+
+    Returns:
+        A list of files that match the predicate.
+    """
+    file_list = []
+    if not isinstance(paths, list):
+        paths = [paths]
+    for path in paths:
+        p = abs_path(path)
+        for dirPath, subdirList, fileList in os.walk(p):
+            for fname in fileList:
+                name, ext = os.path.splitext(fname)
+                if file_predicate(name, ext):
+                    file_list.append((dirPath, name, ext))
+    return file_list
+
+
+def load_config(file_full_path, log_errors=True):
+    """Loads a JSON config file.
+
+    Returns:
+        A JSON object.
+    """
+    with open(file_full_path, "r") as f:
+        try:
+            return json.load(f)
+        except Exception as e:
+            if log_errors:
+                logging.error("Exception error to load %s: %s", f, e)
+            raise
+
+
+def rand_ascii_str(length: int) -> str:
+    """Generates a random string of specified length, composed of ascii letters
+    and digits.
+
+    Args:
+        length: The number of characters in the string.
+
+    Returns:
+        The random string generated.
+    """
+    letters = [random.choice(ascii_letters_and_digits) for i in range(length)]
+    return "".join(letters)
+
+
+def rand_hex_str(length: int) -> str:
+    """Generates a random string of specified length, composed of hex digits
+
+    Args:
+        length: The number of characters in the string.
+
+    Returns:
+        The random string generated.
+    """
+    letters = [random.choice(string.hexdigits) for i in range(length)]
+    return "".join(letters)
+
+
+# Thead/Process related functions.
+def concurrent_exec(func, param_list):
+    """Executes a function with different parameters pseudo-concurrently.
+
+    This is basically a map function. Each element (should be an iterable) in
+    the param_list is unpacked and passed into the function. Due to Python's
+    GIL, there's no true concurrency. This is suited for IO-bound tasks.
+
+    Args:
+        func: The function that parforms a task.
+        param_list: A list of iterables, each being a set of params to be
+            passed into the function.
+
+    Returns:
+        A list of return values from each function execution. If an execution
+        caused an exception, the exception object will be the corresponding
+        result.
+    """
+    with concurrent.futures.ThreadPoolExecutor(max_workers=30) as executor:
+        # Start the load operations and mark each future with its params
+        future_to_params = {executor.submit(func, *p): p for p in param_list}
+        return_vals = []
+        for future in concurrent.futures.as_completed(future_to_params):
+            params = future_to_params[future]
+            try:
+                return_vals.append(future.result())
+            except Exception as exc:
+                print(f"{params} generated an exception: {traceback.format_exc()}")
+                return_vals.append(exc)
+        return return_vals
+
+
+def exe_cmd(*cmds):
+    """Executes commands in a new shell.
+
+    Args:
+        cmds: A sequence of commands and arguments.
+
+    Returns:
+        The output of the command run.
+
+    Raises:
+        OSError is raised if an error occurred during the command execution.
+    """
+    cmd = " ".join(cmds)
+    proc = subprocess.Popen(
+        cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
+    )
+    (out, err) = proc.communicate()
+    if not err:
+        return out
+    raise OSError(err)
+
+
+def require_sl4a(android_devices):
+    """Makes sure sl4a connection is established on the given AndroidDevice
+    objects.
+
+    Args:
+        android_devices: A list of AndroidDevice objects.
+
+    Raises:
+        AssertionError is raised if any given android device does not have SL4A
+        connection established.
+    """
+    for ad in android_devices:
+        msg = f"SL4A connection not established properly on {ad.serial}."
+        assert ad.droid, msg
+
+
+def _assert_subprocess_running(proc):
+    """Checks if a subprocess has terminated on its own.
+
+    Args:
+        proc: A subprocess returned by subprocess.Popen.
+
+    Raises:
+        ActsUtilsError is raised if the subprocess has stopped.
+    """
+    ret = proc.poll()
+    if ret is not None:
+        out, err = proc.communicate()
+        raise ActsUtilsError(
+            "Process %d has terminated. ret: %d, stderr: %s,"
+            " stdout: %s" % (proc.pid, ret, err, out)
+        )
+
+
+def start_standing_subprocess(cmd, check_health_delay=0, shell=True):
+    """Starts a long-running subprocess.
+
+    This is not a blocking call and the subprocess started by it should be
+    explicitly terminated with stop_standing_subprocess.
+
+    For short-running commands, you should use exe_cmd, which blocks.
+
+    You can specify a health check after the subprocess is started to make sure
+    it did not stop prematurely.
+
+    Args:
+        cmd: string, the command to start the subprocess with.
+        check_health_delay: float, the number of seconds to wait after the
+                            subprocess starts to check its health. Default is 0,
+                            which means no check.
+
+    Returns:
+        The subprocess that got started.
+    """
+    proc = subprocess.Popen(
+        cmd,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        shell=shell,
+        preexec_fn=os.setpgrp,
+    )
+    logging.debug("Start standing subprocess with cmd: %s", cmd)
+    if check_health_delay > 0:
+        time.sleep(check_health_delay)
+        _assert_subprocess_running(proc)
+    return proc
+
+
+def stop_standing_subprocess(proc, kill_signal=signal.SIGTERM):
+    """Stops a subprocess started by start_standing_subprocess.
+
+    Before killing the process, we check if the process is running, if it has
+    terminated, ActsUtilsError is raised.
+
+    Catches and ignores the PermissionError which only happens on Macs.
+
+    Args:
+        proc: Subprocess to terminate.
+    """
+    pid = proc.pid
+    logging.debug("Stop standing subprocess %d", pid)
+    _assert_subprocess_running(proc)
+    try:
+        os.killpg(pid, kill_signal)
+    except PermissionError:
+        pass
+
+
+def wait_for_standing_subprocess(proc, timeout=None):
+    """Waits for a subprocess started by start_standing_subprocess to finish
+    or times out.
+
+    Propagates the exception raised by the subprocess.wait(.) function.
+    The subprocess.TimeoutExpired exception is raised if the process timed-out
+    rather then terminating.
+
+    If no exception is raised: the subprocess terminated on its own. No need
+    to call stop_standing_subprocess() to kill it.
+
+    If an exception is raised: the subprocess is still alive - it did not
+    terminate. Either call stop_standing_subprocess() to kill it, or call
+    wait_for_standing_subprocess() to keep waiting for it to terminate on its
+    own.
+
+    Args:
+        p: Subprocess to wait for.
+        timeout: An integer number of seconds to wait before timing out.
+    """
+    proc.wait(timeout)
+
+
+def sync_device_time(ad):
+    """Sync the time of an android device with the current system time.
+
+    Both epoch time and the timezone will be synced.
+
+    Args:
+        ad: The android device to sync time on.
+    """
+    ad.adb.shell("settings put global auto_time 0", ignore_status=True)
+    ad.adb.shell("settings put global auto_time_zone 0", ignore_status=True)
+    droid = ad.droid
+    droid.setTimeZone(get_timezone_olson_id())
+    droid.setTime(get_current_epoch_time())
+
+
+def set_ambient_display(ad, new_state):
+    """Set "Ambient Display" in Settings->Display
+
+    Args:
+        ad: android device object.
+        new_state: new state for "Ambient Display". True or False.
+    """
+    ad.adb.shell(f"settings put secure doze_enabled {1 if new_state else 0}")
+
+
+def set_location_service(ad, new_state):
+    """Set Location service on/off in Settings->Location
+
+    Args:
+        ad: android device object.
+        new_state: new state for "Location service".
+            If new_state is False, turn off location service.
+            If new_state if True, set location service to "High accuracy".
+    """
+    ad.adb.shell(
+        "content insert --uri "
+        " content://com.google.settings/partner --bind "
+        "name:s:network_location_opt_in --bind value:s:1"
+    )
+    ad.adb.shell(
+        "content insert --uri "
+        " content://com.google.settings/partner --bind "
+        "name:s:use_location_for_services --bind value:s:1"
+    )
+    if new_state:
+        ad.adb.shell("settings put secure location_mode 3")
+    else:
+        ad.adb.shell("settings put secure location_mode 0")
+
+
+def parse_ping_ouput(ad, count, out, loss_tolerance=20):
+    """Ping Parsing util.
+
+    Args:
+        ad: Android Device Object.
+        count: Number of ICMP packets sent
+        out: shell output text of ping operation
+        loss_tolerance: Threshold after which flag test as false
+    Returns:
+        False: if packet loss is more than loss_tolerance%
+        True: if all good
+    """
+    result = re.search(
+        r"(\d+) packets transmitted, (\d+) received, (\d+)% packet loss", out
+    )
+    if not result:
+        ad.log.info("Ping failed with %s", out)
+        return False
+
+    packet_loss = int(result.group(3))
+    packet_xmit = int(result.group(1))
+    packet_rcvd = int(result.group(2))
+    min_packet_xmit_rcvd = (100 - loss_tolerance) * 0.01
+    if (
+        packet_loss > loss_tolerance
+        or packet_xmit < count * min_packet_xmit_rcvd
+        or packet_rcvd < count * min_packet_xmit_rcvd
+    ):
+        ad.log.error(
+            "%s, ping failed with loss more than tolerance %s%%",
+            result.group(0),
+            loss_tolerance,
+        )
+        return False
+    ad.log.info("Ping succeed with %s", result.group(0))
+    return True
+
+
+def adb_shell_ping(
+    ad: AndroidDevice,
+    dest_ip: str,
+    count: int = 120,
+    timeout: int = 200,
+    loss_tolerance: int = 20,
+) -> bool:
+    """Ping utility using adb shell.
+
+    Args:
+        ad: Android Device Object.
+        count: Number of ICMP packets to send
+        dest_ip: hostname or IP address
+                 default www.google.com
+        timeout: timeout for icmp pings to complete.
+    """
+    ping_cmd = "ping -W 1"
+    if count:
+        ping_cmd += f" -c {count}"
+    if dest_ip:
+        ping_cmd += f" {dest_ip}"
+    try:
+        ad.log.info("Starting ping test to %s using adb command %s", dest_ip, ping_cmd)
+        out = ad.adb.shell(ping_cmd, timeout=timeout, ignore_status=True)
+        if not parse_ping_ouput(ad, count, out, loss_tolerance):
+            return False
+        return True
+    except Exception as e:
+        ad.log.warning("Ping Test to %s failed with exception %s", dest_ip, e)
+        return False
+
+
+def zip_directory(zip_name, src_dir):
+    """Compress a directory to a .zip file.
+
+    This implementation is thread-safe.
+
+    Args:
+        zip_name: str, name of the generated archive
+        src_dir: str, path to the source directory
+    """
+    with zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) as zip:
+        for root, dirs, files in os.walk(src_dir):
+            for file in files:
+                path = os.path.join(root, file)
+                zip.write(path, os.path.relpath(path, src_dir))
+
+
+def unzip_maintain_permissions(zip_path, extract_location):
+    """Unzip a .zip file while maintaining permissions.
+
+    Args:
+        zip_path: The path to the zipped file.
+        extract_location: the directory to extract to.
+    """
+    with zipfile.ZipFile(zip_path, "r") as zip_file:
+        for info in zip_file.infolist():
+            _extract_file(zip_file, info, extract_location)
+
+
+def _extract_file(zip_file, zip_info, extract_location):
+    """Extracts a single entry from a ZipFile while maintaining permissions.
+
+    Args:
+        zip_file: A zipfile.ZipFile.
+        zip_info: A ZipInfo object from zip_file.
+        extract_location: The directory to extract to.
+    """
+    out_path = zip_file.extract(zip_info.filename, path=extract_location)
+    perm = zip_info.external_attr >> 16
+    os.chmod(out_path, perm)
+
+
+def get_command_uptime(command_regex):
+    """Returns the uptime for a given command.
+
+    Args:
+        command_regex: A regex that matches the command line given. Must be
+            pgrep compatible.
+    """
+    pid = job.run(f"pgrep -f {command_regex}").stdout
+    runtime = ""
+    if pid:
+        runtime = job.run(f'ps -o etime= -p "{pid}"').stdout
+    return runtime
+
+
+def get_device_process_uptime(adb, process):
+    """Returns the uptime of a device process."""
+    pid = adb.shell(f"pidof {process}", ignore_status=True)
+    runtime = ""
+    if pid:
+        runtime = adb.shell(f'ps -o etime= -p "{pid}"')
+    return runtime
+
+
+def is_valid_ipv4_address(address):
+    try:
+        socket.inet_pton(socket.AF_INET, address)
+    except AttributeError:  # no inet_pton here, sorry
+        try:
+            socket.inet_aton(address)
+        except socket.error:
+            return False
+        return address.count(".") == 3
+    except socket.error:  # not a valid address
+        return False
+
+    return True
+
+
+def is_valid_ipv6_address(address):
+    if "%" in address:
+        address = address.split("%")[0]
+    try:
+        socket.inet_pton(socket.AF_INET6, address)
+    except socket.error:  # not a valid address
+        return False
+    return True
+
+
+def get_interface_ip_addresses(
+    comm_channel: AndroidDevice | SshConnection | FuchsiaDevice,
+    interface: str,
+) -> dict[str, list[str]]:
+    """Gets all of the ip addresses, ipv4 and ipv6, associated with a
+       particular interface name.
+
+    Args:
+        comm_channel: How to send commands to a device.  Can be ssh, adb serial,
+            etc.  Must have the run function implemented.
+        interface: The interface name on the device, ie eth0
+
+    Returns:
+        A list of dictionaries of the the various IP addresses:
+            ipv4_private: Any 192.168, 172.16, 10, or 169.254 addresses
+            ipv4_public: Any IPv4 public addresses
+            ipv6_link_local: Any fe80:: addresses
+            ipv6_private_local: Any fd00:: addresses
+            ipv6_public: Any publicly routable addresses
+    """
+    # Local imports are used here to prevent cyclic dependency.
+    from antlion.controllers.android_device import AndroidDevice
+    from antlion.controllers.fuchsia_device import FuchsiaDevice
+    from antlion.controllers.utils_lib.ssh.connection import SshConnection
+
+    is_local = comm_channel == job
+    if type(comm_channel) is AndroidDevice:
+        addrs = comm_channel.adb.shell(
+            f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
+        ).splitlines()
+    elif type(comm_channel) is SshConnection or is_local:
+        addrs = comm_channel.run(
+            f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
+        ).stdout.splitlines()
+    elif type(comm_channel) is FuchsiaDevice:
+        interfaces = comm_channel.sl4f.netstack_lib.netstackListInterfaces()
+        err = interfaces.get("error")
+        if err is not None:
+            raise ActsUtilsError(f"Failed get_interface_ip_addresses: {err}")
+        addrs = []
+        for item in interfaces.get("result"):
+            if item["name"] != interface:
+                continue
+            for ipv4_address in item["ipv4_addresses"]:
+                ipv4_address = ".".join(map(str, ipv4_address))
+                addrs.append(ipv4_address)
+            for ipv6_address in item["ipv6_addresses"]:
+                converted_ipv6_address = []
+                for octet in ipv6_address:
+                    converted_ipv6_address.append(format(octet, "x").zfill(2))
+                ipv6_address = "".join(converted_ipv6_address)
+                ipv6_address = ":".join(
+                    ipv6_address[i : i + 4] for i in range(0, len(ipv6_address), 4)
+                )
+                addrs.append(str(ipaddress.ip_address(ipv6_address)))
+    else:
+        raise ValueError("Unsupported method to send command to device.")
+
+    ipv4_private_local_addresses = []
+    ipv4_public_addresses = []
+    ipv6_link_local_addresses = []
+    ipv6_private_local_addresses = []
+    ipv6_public_addresses = []
+
+    for addr in addrs:
+        on_device_ip = ipaddress.ip_address(addr)
+        if on_device_ip.version == 4:
+            if on_device_ip.is_private:
+                ipv4_private_local_addresses.append(str(on_device_ip))
+            elif on_device_ip.is_global or (
+                # Carrier private doesn't have a property, so we check if
+                # all other values are left unset.
+                not on_device_ip.is_reserved
+                and not on_device_ip.is_unspecified
+                and not on_device_ip.is_link_local
+                and not on_device_ip.is_loopback
+                and not on_device_ip.is_multicast
+            ):
+                ipv4_public_addresses.append(str(on_device_ip))
+        elif on_device_ip.version == 6:
+            if on_device_ip.is_link_local:
+                ipv6_link_local_addresses.append(str(on_device_ip))
+            elif on_device_ip.is_private:
+                ipv6_private_local_addresses.append(str(on_device_ip))
+            elif on_device_ip.is_global:
+                ipv6_public_addresses.append(str(on_device_ip))
+
+    return {
+        "ipv4_private": ipv4_private_local_addresses,
+        "ipv4_public": ipv4_public_addresses,
+        "ipv6_link_local": ipv6_link_local_addresses,
+        "ipv6_private_local": ipv6_private_local_addresses,
+        "ipv6_public": ipv6_public_addresses,
+    }
+
+
+class AddressTimeout(signals.TestError):
+    pass
+
+
+class MultipleAddresses(signals.TestError):
+    pass
+
+
+def get_addr(
+    comm_channel: AndroidDevice | SshConnection | FuchsiaDevice,
+    interface: str,
+    addr_type: str = "ipv4_private",
+    timeout_sec: int | None = None,
+) -> str:
+    """Get the requested type of IP address for an interface; if an address is
+    not available, retry until the timeout has been reached.
+
+    Args:
+        addr_type: Type of address to get as defined by the return value of
+            utils.get_interface_ip_addresses.
+        timeout_sec: Seconds to wait to acquire an address if there isn't one
+            already available. If fetching an IPv4 address, the default is 3
+            seconds. If IPv6, the default is 30 seconds for Duplicate Address
+            Detection.
+
+    Returns:
+        A string containing the requested address.
+
+    Raises:
+        TestAbortClass: timeout_sec is None and invalid addr_type
+        AddressTimeout: No address is available after timeout_sec
+        MultipleAddresses: Several addresses are available
+    """
+    if not timeout_sec:
+        if "ipv4" in addr_type:
+            timeout_sec = 3
+        elif "ipv6" in addr_type:
+            timeout_sec = DAD_TIMEOUT_SEC
+        else:
+            raise signals.TestAbortClass(f'Unknown addr_type "{addr_type}"')
+
+    start = time.time()
+    elapsed = 0
+
+    while elapsed <= timeout_sec:
+        ip_addrs = get_interface_ip_addresses(comm_channel, interface)[addr_type]
+        if len(ip_addrs) > 1:
+            raise MultipleAddresses(
+                f'Expected only one "{addr_type}" address, got {ip_addrs}'
+            )
+        elif len(ip_addrs) == 1:
+            return ip_addrs[0]
+        elapsed = time.time() - start
+
+    raise AddressTimeout(f'No available "{addr_type}" address after {timeout_sec}s')
+
+
+def get_interface_based_on_ip(comm_channel: Any, desired_ip_address: str) -> str:
+    """Gets the interface for a particular IP
+
+    Args:
+        comm_channel: How to send commands to a device.  Can be ssh, adb serial,
+            etc.  Must have the run function implemented.
+        desired_ip_address: The IP address that is being looked for on a device.
+
+    Returns:
+        The name of the test interface.
+
+    Raises:
+        RuntimeError: when desired_ip_address is not found
+    """
+
+    desired_ip_address = desired_ip_address.split("%", 1)[0]
+    all_ips_and_interfaces = comm_channel.run(
+        "(ip -o -4 addr show; ip -o -6 addr show) | " "awk '{print $2\" \"$4}'"
+    ).stdout
+    for ip_address_and_interface in all_ips_and_interfaces.split("\n"):
+        if desired_ip_address in ip_address_and_interface:
+            return ip_address_and_interface.split()[1][:-1]
+    raise RuntimeError(
+        f'IP "{desired_ip_address}" not found in list: \n{all_ips_and_interfaces}'
+    )
+
+
+def renew_linux_ip_address(runner: Runner, interface: str) -> None:
+    runner.run(f"sudo ip link set {interface} down")
+    runner.run(f"sudo ip link set {interface} up")
+    runner.run(f"sudo dhclient -r {interface}")
+    runner.run(f"sudo dhclient {interface}")
+
+
+def get_ping_command(
+    dest_ip: str,
+    count: int = 3,
+    interval: int = 1000,
+    timeout: int = 1000,
+    size: int = 56,
+    os_type: str = "Linux",
+    additional_ping_params: str = "",
+) -> str:
+    """Builds ping command string based on address type, os, and params.
+
+    Args:
+        dest_ip: string, address to ping (ipv4 or ipv6)
+        count: int, number of requests to send
+        interval: int, time in seconds between requests
+        timeout: int, time in seconds to wait for response
+        size: int, number of bytes to send,
+        os_type: string, os type of the source device (supports 'Linux',
+            'Darwin')
+        additional_ping_params: string, command option flags to
+            append to the command string
+
+    Returns:
+        The ping command.
+    """
+    if is_valid_ipv4_address(dest_ip):
+        ping_binary = "ping"
+    elif is_valid_ipv6_address(dest_ip):
+        ping_binary = "ping6"
+    else:
+        raise ValueError(f"Invalid ip addr: {dest_ip}")
+
+    if os_type == "Darwin":
+        if is_valid_ipv6_address(dest_ip):
+            # ping6 on MacOS doesn't support timeout
+            logging.debug("Ignoring timeout, as ping6 on MacOS does not support it.")
+            timeout_flag = []
+        else:
+            timeout_flag = ["-t", str(timeout / 1000)]
+    elif os_type == "Linux":
+        timeout_flag = ["-W", str(timeout / 1000)]
+    else:
+        raise ValueError("Invalid OS.  Only Linux and MacOS are supported.")
+
+    ping_cmd = [
+        ping_binary,
+        *timeout_flag,
+        "-c",
+        str(count),
+        "-i",
+        str(interval / 1000),
+        "-s",
+        str(size),
+        additional_ping_params,
+        dest_ip,
+    ]
+    return " ".join(ping_cmd)
+
+
+def ping(
+    comm_channel: Runner,
+    dest_ip: str,
+    count: int = 3,
+    interval: int = 1000,
+    timeout: int = 1000,
+    size: int = 56,
+    additional_ping_params: str = "",
+) -> PingResult:
+    """Generic linux ping function, supports local (acts.libs.proc.job) and
+    SshConnections (acts.libs.proc.job over ssh) to Linux based OSs and MacOS.
+
+    NOTES: This will work with Android over SSH, but does not function over ADB
+    as that has a unique return format.
+
+    Args:
+        comm_channel: communication channel over which to send ping command.
+            Must have 'run' function that returns at least command, stdout,
+            stderr, and exit_status (see acts.libs.proc.job)
+        dest_ip: address to ping (ipv4 or ipv6)
+        count: int, number of packets to send
+        interval: int, time in milliseconds between pings
+        timeout: int, time in milliseconds to wait for response
+        size: int, size of packets in bytes
+        additional_ping_params: string, command option flags to
+            append to the command string
+
+    Returns:
+        Dict containing:
+            command: string
+            exit_status: int (0 or 1)
+            stdout: string
+            stderr: string
+            transmitted: int, number of packets transmitted
+            received: int, number of packets received
+            packet_loss: int, percentage packet loss
+            time: int, time of ping command execution (in milliseconds)
+            rtt_min: float, minimum round trip time
+            rtt_avg: float, average round trip time
+            rtt_max: float, maximum round trip time
+            rtt_mdev: float, round trip time standard deviation
+
+        Any values that cannot be parsed are left as None
+    """
+    from antlion.controllers.utils_lib.ssh.connection import SshConnection
+
+    is_local = comm_channel == job
+    os_type = platform.system() if is_local else "Linux"
+    ping_cmd = get_ping_command(
+        dest_ip,
+        count=count,
+        interval=interval,
+        timeout=timeout,
+        size=size,
+        os_type=os_type,
+        additional_ping_params=additional_ping_params,
+    )
+
+    if isinstance(comm_channel, SshConnection) or is_local:
+        logging.debug(
+            "Running ping with parameters (count: %s, interval: %s, "
+            "timeout: %s, size: %s)",
+            count,
+            interval,
+            timeout,
+            size,
+        )
+        ping_result = comm_channel.run(ping_cmd, ignore_status=True)
+    else:
+        raise ValueError(f"Unsupported comm_channel: {type(comm_channel)}")
+
+    if isinstance(ping_result, job.Error):
+        ping_result = ping_result.result
+
+    summary = re.search(
+        "([0-9]+) packets transmitted.*?([0-9]+) received.*?([0-9]+)% packet "
+        "loss.*?time ([0-9]+)",
+        ping_result.stdout,
+    )
+    rtt_stats = re.search(
+        "= ([0-9.]+)/([0-9.]+)/([0-9.]+)/([0-9.]+)", ping_result.stdout
+    )
+    return PingResult(
+        exit_status=ping_result.returncode,
+        stdout=ping_result.stdout,
+        stderr=ping_result.stderr,
+        transmitted=int(summary.group(1)) if summary else None,
+        received=int(summary.group(2)) if summary else None,
+        time_ms=float(summary.group(4)) / 1000 if summary else None,
+        rtt_min_ms=float(rtt_stats.group(1)) if rtt_stats else None,
+        rtt_avg_ms=float(rtt_stats.group(2)) if rtt_stats else None,
+        rtt_max_ms=float(rtt_stats.group(3)) if rtt_stats else None,
+        rtt_mdev_ms=float(rtt_stats.group(4)) if rtt_stats else None,
+    )
+
+
+@dataclass
+class PingResult:
+    exit_status: int
+    stdout: str
+    stderr: str
+    transmitted: int | None
+    received: int | None
+    time_ms: float | None
+    rtt_min_ms: float | None
+    rtt_avg_ms: float | None
+    rtt_max_ms: float | None
+    rtt_mdev_ms: float | None
+
+    @property
+    def success(self) -> bool:
+        return self.exit_status == 0
+
+
+def ip_in_subnet(ip, subnet):
+    """Validate that ip is in a given subnet.
+
+    Args:
+        ip: string, ip address to verify (eg. '192.168.42.158')
+        subnet: string, subnet to check (eg. '192.168.42.0/24')
+
+    Returns:
+        True, if ip in subnet, else False
+    """
+    return ipaddress.ip_address(ip) in ipaddress.ip_network(subnet)
+
+
+def mac_address_str_to_list(mac_addr_str):
+    """Converts mac address string to list of decimal octets.
+
+    Args:
+        mac_addr_string: string, mac address
+            e.g. '12:34:56:78:9a:bc'
+
+    Returns
+        list, representing mac address octets in decimal
+            e.g. [18, 52, 86, 120, 154, 188]
+    """
+    return [int(octet, 16) for octet in mac_addr_str.split(":")]
+
+
+def mac_address_list_to_str(mac_addr_list: bytes) -> str:
+    """Converts list of decimal octets representing mac address to string.
+
+    Args:
+        mac_addr_list: list, representing mac address octets in decimal
+            e.g. [18, 52, 86, 120, 154, 188]
+
+    Returns:
+        string, mac address
+            e.g. '12:34:56:78:9a:bc'
+    """
+    # Print each octet as hex, right justified, width of 2, and fill with "0".
+    return ":".join([f"{octet:0>2x}" for octet in mac_addr_list])
+
+
+def get_fuchsia_mdns_ipv6_address(device_mdns_name):
+    """Finds the IPv6 link-local address of a Fuchsia device matching a mDNS
+    name.
+
+    Args:
+        device_mdns_name: name of Fuchsia device (e.g. gig-clone-sugar-slash)
+
+    Returns:
+        string, IPv6 link-local address
+    """
+    import psutil
+    from zeroconf import IPVersion, Zeroconf
+
+    if not device_mdns_name:
+        return None
+
+    def mdns_query(interface, address):
+        logging.info(
+            f'Sending mDNS query for device "{device_mdns_name}" using "{address}"'
+        )
+        try:
+            zeroconf = Zeroconf(ip_version=IPVersion.V6Only, interfaces=[address])
+        except RuntimeError as e:
+            if "No adapter found for IP address" in e.args[0]:
+                # Most likely, a device went offline and its control
+                # interface was deleted. This is acceptable since the
+                # device that went offline isn't guaranteed to be the
+                # device we're searching for.
+                logging.warning(f'No adapter found for "{address}"')
+                return None
+            raise
+
+        device_records = zeroconf.get_service_info(
+            FUCHSIA_MDNS_TYPE, f"{device_mdns_name}.{FUCHSIA_MDNS_TYPE}"
+        )
+
+        if device_records:
+            for device_address in device_records.parsed_addresses():
+                device_ip_address = ipaddress.ip_address(device_address)
+                scoped_address = f"{device_address}%{interface}"
+                if (
+                    device_ip_address.version == 6
+                    and device_ip_address.is_link_local
+                    and ping(job, dest_ip=scoped_address).success
+                ):
+                    logging.info(
+                        f'Found device "{device_mdns_name}" at "{scoped_address}"'
+                    )
+                    zeroconf.close()
+                    del zeroconf
+                    return scoped_address
+
+        zeroconf.close()
+        del zeroconf
+        return None
+
+    with ThreadPoolExecutor() as executor:
+        futures = []
+
+        interfaces = psutil.net_if_addrs()
+        for interface in interfaces:
+            for addr in interfaces[interface]:
+                address = addr.address.split("%")[0]
+                if (
+                    addr.family == socket.AF_INET6
+                    and ipaddress.ip_address(address).is_link_local
+                    and address != "fe80::1"
+                ):
+                    futures.append(executor.submit(mdns_query, interface, address))
+
+        for future in futures:
+            addr = future.result()
+            if addr:
+                return addr
+
+    logging.error(f'Unable to find IP address for device "{device_mdns_name}"')
+    return None
+
+
+def get_device(devices, device_type):
+    """Finds a unique device with the specified "device_type" attribute from a
+    list. If none is found, defaults to the first device in the list.
+
+    Example:
+        get_device(android_devices, device_type="DUT")
+        get_device(fuchsia_devices, device_type="DUT")
+        get_device(android_devices + fuchsia_devices, device_type="DUT")
+
+    Args:
+        devices: A list of device controller objects.
+        device_type: (string) Type of device to find, specified by the
+            "device_type" attribute.
+
+    Returns:
+        The matching device controller object, or the first device in the list
+        if not found.
+
+    Raises:
+        ValueError is raised if none or more than one device is
+        matched.
+    """
+    if not devices:
+        raise ValueError("No devices available")
+
+    matches = [
+        d for d in devices if hasattr(d, "device_type") and d.device_type == device_type
+    ]
+
+    if len(matches) == 0:
+        # No matches for the specified "device_type", use the first device
+        # declared.
+        return devices[0]
+    if len(matches) > 1:
+        # Specifing multiple devices with the same "device_type" is a
+        # configuration error.
+        raise ValueError(
+            f'More than one device matching "device_type" == "{device_type}"'
+        )
+
+    return matches[0]
diff --git a/packages/antlion/validation.py b/packages/antlion/validation.py
new file mode 100644
index 0000000..b985eb5
--- /dev/null
+++ b/packages/antlion/validation.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from typing import Collection, Literal, Mapping, TypeGuard, TypeVar, overload
+
+from mobly import signals
+
+
+class ValidatorError(signals.TestAbortClass):
+    pass
+
+
+class FieldNotFoundError(ValidatorError):
+    pass
+
+
+class FieldTypeError(ValidatorError):
+    pass
+
+
+T = TypeVar("T")
+
+
+class _NO_DEFAULT:
+    pass
+
+
+class MapValidator:
+    def __init__(self, map: Mapping[str, object]) -> None:
+        self.map = map
+
+    @overload
+    def get(self, type: type[T], key: str, default: None) -> T | None: ...
+
+    @overload
+    def get(
+        self, type: type[T], key: str, default: T | _NO_DEFAULT = _NO_DEFAULT()
+    ) -> T: ...
+
+    def get(
+        self, type: type[T], key: str, default: T | None | _NO_DEFAULT = _NO_DEFAULT()
+    ) -> T | None:
+        """Access the map requiring a value type at the specified key.
+
+        If default is set and the map does not contain the specified key, the
+        default will be returned.
+
+        Args:
+            type: Expected type of the value
+            key: Key to index into the map with
+            default: Default value when the map does not contain key
+
+        Returns:
+            Value of the expected type, or None if default is None.
+
+        Raises:
+            FieldNotFound: when default is not set and the map does not contain
+                the specified key
+            FieldTypeError: when the value at the specified key is not the
+                expected type
+        """
+        if key not in self.map:
+            if isinstance(default, type) or default is None:
+                return default
+            raise FieldNotFoundError(
+                f'Required field "{key}" is missing; expected {type.__name__}'
+            )
+        val = self.map[key]
+        if val is None and default is None:
+            return None
+        if not isinstance(val, type):
+            raise FieldTypeError(
+                f'Expected "{key}" to be {type.__name__}, got {describe_type(val)}'
+            )
+        return val
+
+    @overload
+    def list(self, key: str) -> ListValidator: ...
+
+    @overload
+    def list(self, key: str, optional: Literal[False]) -> ListValidator: ...
+
+    @overload
+    def list(self, key: str, optional: Literal[True]) -> ListValidator | None: ...
+
+    def list(self, key: str, optional: bool = False) -> ListValidator | None:
+        """Access the map requiring a list at the specified key.
+
+        If optional is True and the map does not contain the specified key, None
+        will be returned.
+
+        Args:
+            key: Key to index into the map with
+            optional: If True, will return None if the map does not contain key
+
+        Returns:
+            ListValidator or None if optional is True.
+
+        Raises:
+            FieldNotFound: when optional is False and the map does not contain
+                the specified key
+            FieldTypeError: when the value at the specified key is not a list
+        """
+        if key not in self.map:
+            if optional:
+                return None
+            raise FieldNotFoundError(
+                f'Required field "{key}" is missing; expected list'
+            )
+        return ListValidator(key, self.get(list, key))
+
+
+class ListValidator:
+    def __init__(self, name: str, val: list[object]) -> None:
+        self.name = name
+        self.val = val
+
+    def all(self, type: type[T]) -> list[T]:
+        """Access the list requiring all elements to be the specified type.
+
+        Args:
+            type: Expected type of all elements
+
+        Raises:
+            FieldTypeError: when an element is not the expected type
+        """
+        if not is_list_of(self.val, type):
+            raise FieldTypeError(
+                f'Expected "{self.name}" to be list[{type.__name__}], '
+                f"got {describe_type(self.val)}"
+            )
+        return self.val
+
+
+def describe_type(o: object) -> str:
+    """Describe the complete type of the object.
+
+    Different from type() by recursing when a mapping or collection is found.
+    """
+    if isinstance(o, Mapping):
+        keys = set([describe_type(k) for k in o.keys()])
+        values = set([describe_type(v) for v in o.values()])
+        return f'dict[{" | ".join(keys)}, {" | ".join(values)}]'
+    if isinstance(o, Collection) and not isinstance(o, str):
+        elements = set([describe_type(x) for x in o])
+        return f'list[{" | ".join(elements)}]'
+    return type(o).__name__
+
+
+def is_list_of(val: list[object], type: type[T]) -> TypeGuard[list[T]]:
+    return all(isinstance(x, type) for x in val)
diff --git a/pyproject.toml b/pyproject.toml
index c0fa915..8e6482a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -8,66 +8,149 @@
 name = "antlion"
 description = "Host-driven, hardware-agnostic Fuchsia connectivity tests"
 requires-python = ">=3.8"
-license = {text = "Apache-2.0"}
+license = { text = "Apache-2.0" }
 dynamic = ["version"]
 readme = "README.md"
-dependencies = [
-    "mobly==1.12.0",
-    "pyyaml>=5.1",
-    "tenacity~=8.0",
-]
+dependencies = ["mobly==1.12.2", "pyyaml>=5.1", "tenacity~=8.0"]
 
 [project.optional-dependencies]
 # Required to support development tools
 dev = [
-    "shiv",    # packaging python
-    "toml",    # required for pyproject.toml
-    "vulture", # finds unused code
-    "yapf",    # code formatting
+    "autoflake~=2.1", # remove unused code
+    "black~=23.3",    # code formatting
+    "isort~=5.12",    # import sorting
+    "mock~=5.0",      # required for unit tests
+    "mypy~=1.8",      # static type checking
+    "shiv~=1.0",      # packaging python
+    "toml==0.10.2",   # required for pyproject.toml
+    "vulture~=2.11",   # finds unused code
+
+    # Library stubs for type checking
+    "types-PyYAML~=6.0",
+    "types-mock~=5.0",
+    "types-psutil~=5.9",
 ]
 digital_loggers_pdu = ["dlipower"]
-bluetooth = ["soundfile"]
 html_graphing = ["bokeh"]
 flash = ["usbinfo"]
 mdns = ["psutil", "zeroconf"]
-android = [
-    "Monsoon",
-    "numpy",
-    "paramiko[ed25519]",
-    "pylibftdi",
-    "pyserial",
-    "requests",
-    "scapy",
-    "scipy",
-]
-
-[project.scripts]
-antlion = "antlion.bin.act:main"
+android = ["numpy", "scapy"]
 
 [tool.setuptools.packages.find]
-where = ["src"]
+where = ["packages"]
 
 [tool.autoflake]
 imports = [
-    "Monsoon",
     "antlion",
     "dataclasses",
     "dlipower",
     "mobly",
     "mock",
     "numpy",
-    "paramiko",
-    "protobuf",
-    "pylibftdi",
-    "requests",
     "scapy",
-    "scipy",
     "tenacity",
     "usbinfo",
     "zeroconf",
 ]
 
-[tools.vulture]
-paths = ["src"]
+[tool.isort]
+profile = "black"
+known_local_folder = ["antlion"]
+
+[tool.mypy]
+mypy_path = "stubs, $FUCHSIA_DIR/src/testing/end_to_end/honeydew, $FUCHSIA_DIR/src/developer/ffx/lib/fuchsia-controller/python"
+python_version = "3.11"
+
+#
+# Disallow dynamic typing
+#
+
+#disallow_any_unimported = true
+#disallow_any_expr = true
+#disallow_any_decorated = true
+#disallow_any_explicit = true
+#disallow_any_generics = true
+disallow_subclassing_any = true
+
+#
+# Untyped definitions and calls
+#
+
+#disallow_untyped_calls = true
+#disallow_untyped_defs = true
+#disallow_incomplete_defs = true
+check_untyped_defs = true
+disallow_untyped_decorators = true
+
+#
+# Configuring warnings
+#
+
+warn_redundant_casts = true
+#warn_unused_ignores = true
+warn_no_return = true
+#warn_return_any = true
+#warn_unreachable = true
+
+#
+# Miscellaneous strictness flags
+#
+
+strict_equality = true
+
+exclude = [
+    # TODO(http://b/285950835): Fix typing of relevant Fuchsia-related utilities and
+    # tests
+    "packages/antlion/unit_tests/",
+    "tests/netstack/NetstackIfaceTest.py",
+    "tests/wlan/functional/DownloadStressTest.py",
+    "tests/wlan/functional/WlanWirelessNetworkManagementTest.py",
+    "tests/wlan/performance/WlanWmmTest.py",
+
+    # TODO(http://b/274619290): Remove the following files when the migration from ACTS
+    # to Mobly is complete.
+    "packages/antlion/base_test.py",
+    "packages/antlion/context.py",
+    "packages/antlion/libs/yaml_writer.py",
+
+    # TODO(http://b/285950976): Fix typing of non-Fuchsia related controllers and test
+    # utilities, or remove if no longer relevant.
+    "packages/antlion/controllers/adb.py",
+    "packages/antlion/controllers/android_device.py",
+    "packages/antlion/controllers/iperf_server.py",
+    "packages/antlion/controllers/packet_sender.py",
+    "packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py",
+    "packages/antlion/controllers/sl4a_lib/event_dispatcher.py",
+    "packages/antlion/controllers/sl4a_lib/rpc_client.py",
+    "packages/antlion/controllers/sl4a_lib/sl4a_manager.py",
+    "packages/antlion/controllers/sniffer_lib/local/tcpdump.py",
+    "packages/antlion/controllers/sniffer_lib/local/tshark.py",
+    "packages/antlion/libs/logging/log_stream.py",
+    "packages/antlion/libs/ota/ota_runners/ota_runner_factory.py",
+    "packages/antlion/libs/ota/ota_tools/ota_tool_factory.py",
+    "packages/antlion/libs/proc/job.py",
+    "packages/antlion/libs/proc/process.py",
+    "packages/antlion/utils.py",
+    "setup.py",
+
+    "stubs/mobly/",
+]
+
+[[tool.mypy.overrides]]
+module = [
+    "png",
+    "fidl.*",
+    "fuchsia_controller_internal",
+]
+ignore_missing_imports = true
+
+[[tool.mypy.overrides]]
+module = [
+    "fidl.*",
+]
+ignore_errors = true
+
+[tool.vulture]
+paths = ["packages", "tests"]
 sort_by_size = true
 min_confidence = 80
diff --git a/runner/BUILD.gn b/runner/BUILD.gn
new file mode 100644
index 0000000..2dfc97f
--- /dev/null
+++ b/runner/BUILD.gn
@@ -0,0 +1,50 @@
+import("//build/rust/rustc_binary.gni")
+import("//build/rust/rustc_test.gni")
+
+rustc_binary("runner") {
+  output_name = "antlion-runner"
+  edition = "2021"
+  with_unit_tests = true
+
+  deps = [
+    "//src/developer/ffx/lib/netext:lib($host_toolchain)",
+    "//src/lib/mdns/rust:mdns",
+    "//src/lib/network/packet",
+    "//third_party/rust_crates:anyhow",
+    "//third_party/rust_crates:argh",
+    "//third_party/rust_crates:home",
+    "//third_party/rust_crates:itertools",
+    "//third_party/rust_crates:lazy_static",
+    "//third_party/rust_crates:libc",
+    "//third_party/rust_crates:serde",
+    "//third_party/rust_crates:serde_json",
+    "//third_party/rust_crates:serde_yaml",
+    "//third_party/rust_crates:socket2",
+    "//third_party/rust_crates:thiserror",
+  ]
+
+  test_deps = [
+    "//third_party/rust_crates:assert_matches",
+    "//third_party/rust_crates:indoc",
+    "//third_party/rust_crates:pretty_assertions",
+    "//third_party/rust_crates:tempfile",
+  ]
+
+  sources = [
+    "src/config.rs",
+    "src/driver/infra.rs",
+    "src/driver/local.rs",
+    "src/driver/mod.rs",
+    "src/env.rs",
+    "src/finder.rs",
+    "src/main.rs",
+    "src/net.rs",
+    "src/runner.rs",
+    "src/yaml.rs",
+  ]
+}
+
+group("tests") {
+  testonly = true
+  deps = [ ":runner_test($host_toolchain)" ]
+}
diff --git a/runner/README.md b/runner/README.md
new file mode 100644
index 0000000..45c926a
--- /dev/null
+++ b/runner/README.md
@@ -0,0 +1,42 @@
+# antlion-runner
+
+A program to run antlion locally and in infrastructure. Includes a config
+generator with mDNS discovery and sensible defaults.
+
+## Using locally with an emulator
+
+Running antlion locally with a Fuchsia emulator allows developers to perform a
+sanity checks on their changes. Running this way is very quick (~5 seconds) and
+can spot simple mistakes before code review!
+
+1. Build Fuchsia with antlion support
+   ```sh
+   jiri update -gc # if you haven't updated in a while
+   fx set workstation_eng_paused.qemu-x64 \
+      --with-host //third_party/antlion:e2e_tests \
+      --with-host //third_party/antlion:tests \
+      --with //src/testing/sl4f
+   fx build # if you haven't built in a while
+   ```
+2. Start the package server. Keep this running in the background.
+   ```sh
+   fx serve
+   ```
+3. In a separate terminal, start the emulator with access to external networks.
+   ```sh
+   ffx emu stop && ffx emu start -H --net tap && ffx log
+   ```
+4. In a separate terminal, run a test
+   ```sh
+   fx test --e2e --output //third_party/antlion:sl4f_sanity_test
+   ```
+
+> Note: Local testing with auxiliary devices is not supported by antlion runner.
+Use antlion directly for these cases; see the antlion [README](../README.md).
+
+## Testing
+
+```sh
+fx set core.qemu-x64 --with //vendor/google/build/python/antlion/runner:tests
+fx test --output //vendor/google/build/python/antlion/runner:runner_test
+```
diff --git a/runner/src/config.rs b/runner/src/config.rs
new file mode 100644
index 0000000..b42907b
--- /dev/null
+++ b/runner/src/config.rs
@@ -0,0 +1,165 @@
+// Copyright 2022 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::net::IpAddr;
+use crate::yaml;
+
+use std::path::PathBuf;
+
+use serde::{Deserialize, Serialize};
+use serde_yaml::Value;
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Config used by antlion for declaring testbeds and test parameters.
+pub(crate) struct Config {
+    #[serde(rename = "TestBeds")]
+    pub testbeds: Vec<Testbed>,
+    pub mobly_params: MoblyParams,
+}
+
+impl Config {
+    /// Merge the given test parameters into all testbeds.
+    pub fn merge_test_params(&mut self, test_params: Value) {
+        for testbed in self.testbeds.iter_mut() {
+            match testbed.test_params.as_mut() {
+                Some(existing) => yaml::merge(existing, test_params.clone()),
+                None => testbed.test_params = Some(test_params.clone()),
+            }
+        }
+    }
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Parameters consumed by Mobly.
+pub(crate) struct MoblyParams {
+    pub log_path: PathBuf,
+}
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// A group of interconnected devices to be used together during an antlion test.
+pub(crate) struct Testbed {
+    pub name: String,
+    pub controllers: Controllers,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub test_params: Option<Value>,
+}
+
+#[derive(Clone, Debug, Default, Serialize)]
+pub(crate) struct Controllers {
+    #[serde(rename = "FuchsiaDevice", skip_serializing_if = "Vec::is_empty")]
+    pub fuchsia_devices: Vec<Fuchsia>,
+    #[serde(rename = "AccessPoint", skip_serializing_if = "Vec::is_empty")]
+    pub access_points: Vec<AccessPoint>,
+    #[serde(rename = "Attenuator", skip_serializing_if = "Vec::is_empty")]
+    pub attenuators: Vec<Attenuator>,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Vec::is_empty")]
+    pub pdus: Vec<Pdu>,
+    #[serde(rename = "IPerfServer", skip_serializing_if = "Vec::is_empty")]
+    pub iperf_servers: Vec<IPerfServer>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// A Fuchsia device for use with antlion as defined by [fuchsia_device.py].
+///
+/// [fuchsia_device.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/fuchsia_device.py
+pub(crate) struct Fuchsia {
+    pub mdns_name: String,
+    pub ip: IpAddr,
+    pub take_bug_report_on_fail: bool,
+    pub ssh_binary_path: PathBuf,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub ssh_config: Option<PathBuf>,
+    pub ffx_binary_path: PathBuf,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub ffx_subtools_search_path: Option<PathBuf>,
+    pub ssh_priv_key: PathBuf,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
+    pub pdu_device: Option<PduRef>,
+    pub hard_reboot_on_fail: bool,
+    pub enable_honeydew: bool,
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+/// Reference to a PDU device. Used to specify which port the attached device
+/// maps to on the PDU.
+pub(crate) struct PduRef {
+    #[serde(default = "default_pdu_device")]
+    pub device: String,
+    #[serde(rename(serialize = "host"))]
+    pub ip: IpAddr,
+    pub port: u8,
+}
+
+fn default_pdu_device() -> String {
+    "synaccess.np02b".to_string()
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Declares an access point for use with antlion as defined by [access_point.py].
+///
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/access_point.py
+pub(crate) struct AccessPoint {
+    pub wan_interface: String,
+    pub ssh_config: SshConfig,
+    #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
+    pub pdu_device: Option<PduRef>,
+    #[serde(rename = "Attenuator", skip_serializing_if = "Option::is_none")]
+    pub attenuators: Option<Vec<AttenuatorRef>>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+pub(crate) struct SshConfig {
+    pub ssh_binary_path: PathBuf,
+    pub host: IpAddr,
+    pub user: String,
+    pub identity_file: PathBuf,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Reference to an attenuator device. Used to specify which ports the attached
+/// devices' channels maps to on the attenuator.
+pub(crate) struct AttenuatorRef {
+    #[serde(rename = "Address")]
+    pub address: IpAddr,
+    #[serde(rename = "attenuator_ports_wifi_2g")]
+    pub ports_2g: Vec<u8>,
+    #[serde(rename = "attenuator_ports_wifi_5g")]
+    pub ports_5g: Vec<u8>,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
+#[serde(rename_all = "PascalCase")]
+/// Declares an attenuator for use with antlion as defined by [attenuator.py].
+///
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/attenuator.py
+pub(crate) struct Attenuator {
+    pub model: String,
+    pub instrument_count: u8,
+    pub address: IpAddr,
+    pub protocol: String,
+    pub port: u16,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
+/// Declares a power distribution unit for use with antlion as defined by [pdu.py].
+///
+/// [pdu.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/pdu.py
+pub(crate) struct Pdu {
+    pub device: String,
+    pub host: IpAddr,
+}
+
+#[derive(Clone, Debug, Serialize)]
+/// Declares an iPerf3 server for use with antlion as defined by [iperf_server.py].
+///
+/// [iperf_server.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/iperf_server.py
+pub(crate) struct IPerfServer {
+    pub ssh_config: SshConfig,
+    pub port: u16,
+    pub test_interface: String,
+    pub use_killall: bool,
+}
diff --git a/runner/src/driver/infra.rs b/runner/src/driver/infra.rs
new file mode 100644
index 0000000..6f1440f
--- /dev/null
+++ b/runner/src/driver/infra.rs
@@ -0,0 +1,925 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::config::PduRef;
+use crate::config::{self, Config};
+use crate::driver::Driver;
+use crate::env::Environment;
+use crate::net::IpAddr;
+use crate::yaml;
+
+use std::collections::{HashMap, HashSet};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use anyhow::{anyhow, Context, Result};
+use itertools::Itertools;
+use serde::Deserialize;
+use serde_yaml::Value;
+use thiserror::Error;
+
+const TESTBED_NAME: &'static str = "antlion-runner";
+const ENV_OUT_DIR: &'static str = "FUCHSIA_TEST_OUTDIR";
+const ENV_TESTBED_CONFIG: &'static str = "FUCHSIA_TESTBED_CONFIG";
+const TEST_SUMMARY_FILE: &'static str = "test_summary.yaml";
+
+#[derive(Debug)]
+/// Driver for running antlion on emulated and hardware testbeds hosted by
+/// Fuchsia infrastructure.
+pub(crate) struct InfraDriver {
+    output_dir: PathBuf,
+    config: Config,
+}
+
+#[derive(Error, Debug)]
+pub(crate) enum InfraDriverError {
+    #[error("infra environment not detected, \"{0}\" environment variable not present")]
+    NotDetected(String),
+    #[error(transparent)]
+    Config(#[from] ConfigError),
+    #[error(transparent)]
+    Other(#[from] anyhow::Error),
+}
+
+#[derive(Error, Debug)]
+pub(crate) enum ConfigError {
+    #[error("ip {ip} in use by several devices")]
+    DuplicateIp { ip: IpAddr },
+    #[error("ip {ip} port {port} in use by several devices")]
+    DuplicatePort { ip: IpAddr, port: u8 },
+}
+
+impl InfraDriver {
+    /// Detect an InfraDriver. Returns None if the required environmental
+    /// variables are not found.
+    pub fn new<E: Environment>(
+        env: E,
+        ssh_binary: PathBuf,
+        ffx_binary: PathBuf,
+        ffx_subtools_search_path: Option<PathBuf>,
+        enable_honeydew: bool,
+    ) -> Result<Self, InfraDriverError> {
+        let config_path = match env.var(ENV_TESTBED_CONFIG) {
+            Ok(p) => PathBuf::from(p),
+            Err(std::env::VarError::NotPresent) => {
+                return Err(InfraDriverError::NotDetected(ENV_TESTBED_CONFIG.to_string()))
+            }
+            Err(e) => {
+                return Err(InfraDriverError::Other(anyhow!(
+                    "Failed to read \"{ENV_TESTBED_CONFIG}\" {e}"
+                )))
+            }
+        };
+        let config = fs::read_to_string(&config_path)
+            .with_context(|| format!("Failed to read \"{}\"", config_path.display()))?;
+        let targets: Vec<InfraTarget> = serde_json::from_str(&config)
+            .with_context(|| format!("Failed to parse into InfraTarget: \"{config}\""))?;
+        if targets.len() == 0 {
+            return Err(InfraDriverError::Other(anyhow!(
+                "Expected at least one target declared in \"{}\"",
+                config_path.display()
+            )));
+        }
+
+        let output_path = match env.var(ENV_OUT_DIR) {
+            Ok(p) => p,
+            Err(std::env::VarError::NotPresent) => {
+                return Err(InfraDriverError::NotDetected(ENV_OUT_DIR.to_string()))
+            }
+            Err(e) => {
+                return Err(InfraDriverError::Other(anyhow!(
+                    "Failed to read \"{ENV_OUT_DIR}\" {e}"
+                )))
+            }
+        };
+        let output_dir = PathBuf::from(output_path);
+        if !fs::metadata(&output_dir).context("Failed to stat the output directory")?.is_dir() {
+            return Err(InfraDriverError::Other(anyhow!(
+                "Expected a directory but found a file at \"{}\"",
+                output_dir.display()
+            )));
+        }
+
+        Ok(InfraDriver {
+            output_dir: output_dir.clone(),
+            config: InfraDriver::parse_targets(
+                targets,
+                ssh_binary,
+                ffx_binary,
+                ffx_subtools_search_path,
+                output_dir,
+                enable_honeydew,
+            )?,
+        })
+    }
+
+    fn parse_targets(
+        targets: Vec<InfraTarget>,
+        ssh_binary: PathBuf,
+        ffx_binary: PathBuf,
+        ffx_subtools_search_path: Option<PathBuf>,
+        output_dir: PathBuf,
+        enable_honeydew: bool,
+    ) -> Result<Config, InfraDriverError> {
+        let mut fuchsia_devices: Vec<config::Fuchsia> = vec![];
+        let mut access_points: Vec<config::AccessPoint> = vec![];
+        let mut attenuators: HashMap<IpAddr, config::Attenuator> = HashMap::new();
+        let mut pdus: HashMap<IpAddr, config::Pdu> = HashMap::new();
+        let mut iperf_servers: Vec<config::IPerfServer> = vec![];
+        let mut test_params: Option<Value> = None;
+
+        let mut used_ips: HashSet<IpAddr> = HashSet::new();
+        let mut used_ports: HashMap<IpAddr, HashSet<u8>> = HashMap::new();
+
+        let mut register_ip = |ip: IpAddr| -> Result<(), InfraDriverError> {
+            if !used_ips.insert(ip.clone()) {
+                return Err(ConfigError::DuplicateIp { ip }.into());
+            }
+            Ok(())
+        };
+
+        let mut register_port = |ip: IpAddr, port: u8| -> Result<(), InfraDriverError> {
+            match used_ports.get_mut(&ip) {
+                Some(ports) => {
+                    if !ports.insert(port) {
+                        return Err(ConfigError::DuplicatePort { ip, port }.into());
+                    }
+                }
+                None => {
+                    if used_ports.insert(ip, HashSet::from([port])).is_some() {
+                        return Err(InfraDriverError::Other(anyhow!(
+                            "Used ports set was unexpectedly modified by concurrent use",
+                        )));
+                    }
+                }
+            };
+            Ok(())
+        };
+
+        let mut register_pdu = |p: Option<PduRef>| -> Result<(), InfraDriverError> {
+            if let Some(PduRef { device, ip, port }) = p {
+                register_port(ip.clone(), port)?;
+                let new = config::Pdu { device: device.clone(), host: ip.clone() };
+                if let Some(old) = pdus.insert(ip.clone(), new.clone()) {
+                    if old != new {
+                        return Err(ConfigError::DuplicateIp { ip }.into());
+                    }
+                }
+            }
+            Ok(())
+        };
+
+        let mut register_attenuator = |a: Option<AttenuatorRef>| -> Result<(), InfraDriverError> {
+            if let Some(a) = a {
+                let new = config::Attenuator {
+                    model: "minicircuits".to_string(),
+                    instrument_count: 4,
+                    address: a.ip.clone(),
+                    protocol: "http".to_string(),
+                    port: 80,
+                };
+                if let Some(old) = attenuators.insert(a.ip.clone(), new.clone()) {
+                    if old != new {
+                        return Err(ConfigError::DuplicateIp { ip: a.ip }.into());
+                    }
+                }
+            }
+            Ok(())
+        };
+
+        let mut merge_test_params = |p: Option<Value>| {
+            match (test_params.as_mut(), p) {
+                (None, Some(new)) => test_params = Some(new),
+                (Some(existing), Some(new)) => yaml::merge(existing, new),
+                (_, None) => {}
+            };
+        };
+
+        for target in targets {
+            match target {
+                InfraTarget::FuchsiaDevice { nodename, ipv4, ipv6, ssh_key, pdu, test_params } => {
+                    let ip: IpAddr = if !ipv4.is_empty() {
+                        ipv4.parse().context("Invalid IPv4 address")
+                    } else if !ipv6.is_empty() {
+                        ipv6.parse().context("Invalid IPv6 address")
+                    } else {
+                        Err(anyhow!("IP address not specified"))
+                    }?;
+
+                    fuchsia_devices.push(config::Fuchsia {
+                        mdns_name: nodename.clone(),
+                        ip: ip.clone(),
+                        take_bug_report_on_fail: true,
+                        ssh_binary_path: ssh_binary.clone(),
+                        // TODO(http://b/244747218): Remove when ssh_config is refactored away
+                        ssh_config: None,
+                        ffx_binary_path: ffx_binary.clone(),
+                        ffx_subtools_search_path: ffx_subtools_search_path.clone(),
+                        ssh_priv_key: ssh_key.clone(),
+                        pdu_device: pdu.clone(),
+                        hard_reboot_on_fail: true,
+                        enable_honeydew,
+                    });
+
+                    register_ip(ip)?;
+                    register_pdu(pdu)?;
+                    merge_test_params(test_params);
+                }
+                InfraTarget::AccessPoint { ip, attenuator, pdu, ssh_key } => {
+                    access_points.push(config::AccessPoint {
+                        wan_interface: "eth0".to_string(),
+                        ssh_config: config::SshConfig {
+                            ssh_binary_path: ssh_binary.clone(),
+                            host: ip.clone(),
+                            user: "root".to_string(),
+                            identity_file: ssh_key.clone(),
+                        },
+                        pdu_device: pdu.clone(),
+                        attenuators: attenuator.as_ref().map(|a| {
+                            vec![config::AttenuatorRef {
+                                address: a.ip.clone(),
+                                ports_2g: vec![1, 2, 3],
+                                ports_5g: vec![1, 2, 3],
+                            }]
+                        }),
+                    });
+
+                    register_ip(ip)?;
+                    register_pdu(pdu)?;
+                    register_attenuator(attenuator)?;
+                }
+                InfraTarget::IPerfServer { ip, user, test_interface, pdu, ssh_key } => {
+                    iperf_servers.push(config::IPerfServer {
+                        ssh_config: config::SshConfig {
+                            ssh_binary_path: ssh_binary.clone(),
+                            host: ip.clone(),
+                            user: user.to_string(),
+                            identity_file: ssh_key.clone(),
+                        },
+                        port: 5201,
+                        test_interface: test_interface.clone(),
+                        use_killall: true,
+                    });
+
+                    register_ip(ip.clone())?;
+                    register_pdu(pdu)?;
+                }
+            };
+        }
+
+        Ok(Config {
+            testbeds: vec![config::Testbed {
+                name: TESTBED_NAME.to_string(),
+                controllers: config::Controllers {
+                    fuchsia_devices: fuchsia_devices,
+                    access_points: access_points,
+                    attenuators: attenuators
+                        .into_values()
+                        .sorted_by_key(|a| a.address.clone())
+                        .collect(),
+                    pdus: pdus.into_values().sorted_by_key(|p| p.host.clone()).collect(),
+                    iperf_servers: iperf_servers,
+                },
+                test_params,
+            }],
+            mobly_params: config::MoblyParams { log_path: output_dir },
+        })
+    }
+}
+
+impl Driver for InfraDriver {
+    fn output_path(&self) -> &Path {
+        self.output_dir.as_path()
+    }
+    fn config(&self) -> Config {
+        self.config.clone()
+    }
+    fn teardown(&self) -> Result<()> {
+        let results_path =
+            self.output_dir.join(TESTBED_NAME).join("latest").join(TEST_SUMMARY_FILE);
+        match fs::File::open(&results_path) {
+            Ok(mut results) => {
+                println!("\nTest results from {}\n", results_path.display());
+                println!("[=====MOBLY RESULTS=====]");
+                std::io::copy(&mut results, &mut std::io::stdout())
+                    .context("Failed to copy results to stdout")?;
+            }
+            Err(e) => eprintln!("Failed to open \"{}\": {}", results_path.display(), e),
+        };
+
+        // Remove any symlinks from the output directory; this causes errors
+        // while uploading to CAS.
+        //
+        // TODO: Remove when the fix is released and supported on Swarming bots
+        // https://github.com/bazelbuild/remote-apis-sdks/pull/229.
+        remove_symlinks(self.output_dir.clone())?;
+
+        Ok(())
+    }
+}
+
+fn remove_symlinks<P: AsRef<Path>>(path: P) -> Result<()> {
+    let meta = fs::symlink_metadata(path.as_ref())?;
+    if meta.is_symlink() {
+        fs::remove_file(path)?;
+    } else if meta.is_dir() {
+        for entry in fs::read_dir(path)? {
+            remove_symlinks(entry?.path())?;
+        }
+    }
+    Ok(())
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(tag = "type")]
+/// Schema used to communicate target information from the test environment set
+/// up by botanist.
+///
+/// See https://cs.opensource.google/fuchsia/fuchsia/+/main:tools/botanist/README.md
+enum InfraTarget {
+    FuchsiaDevice {
+        nodename: String,
+        ipv4: String,
+        ipv6: String,
+        ssh_key: PathBuf,
+        pdu: Option<PduRef>,
+        test_params: Option<Value>,
+    },
+    AccessPoint {
+        ip: IpAddr,
+        ssh_key: PathBuf,
+        attenuator: Option<AttenuatorRef>,
+        pdu: Option<PduRef>,
+    },
+    IPerfServer {
+        ip: IpAddr,
+        ssh_key: PathBuf,
+        #[serde(default = "default_iperf_user")]
+        user: String,
+        test_interface: String,
+        pdu: Option<PduRef>,
+    },
+}
+
+fn default_iperf_user() -> String {
+    "pi".to_string()
+}
+
+#[derive(Clone, Debug, Deserialize)]
+struct AttenuatorRef {
+    ip: IpAddr,
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    use crate::run;
+    use crate::runner::{ExitStatus, Runner};
+
+    use std::ffi::OsStr;
+
+    use assert_matches::assert_matches;
+    use indoc::formatdoc;
+    use pretty_assertions::assert_eq;
+    use serde_json::json;
+    use tempfile::{NamedTempFile, TempDir};
+
+    const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%2";
+
+    #[derive(Default)]
+    struct MockRunner {
+        out_dir: PathBuf,
+        config: std::cell::Cell<PathBuf>,
+    }
+    impl MockRunner {
+        fn new(out_dir: PathBuf) -> Self {
+            Self { out_dir, ..Default::default() }
+        }
+    }
+    impl Runner for MockRunner {
+        fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+            self.config.set(config);
+
+            let antlion_out = self.out_dir.join(TESTBED_NAME).join("latest");
+            fs::create_dir_all(&antlion_out)
+                .context("Failed to create antlion output directory")?;
+            fs::write(antlion_out.join(TEST_SUMMARY_FILE), "")
+                .context("Failed to write test_summary.yaml")?;
+            Ok(ExitStatus::Ok)
+        }
+    }
+
+    struct MockEnvironment {
+        config: Option<PathBuf>,
+        out_dir: Option<PathBuf>,
+    }
+    impl Environment for MockEnvironment {
+        fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, std::env::VarError> {
+            if key.as_ref() == ENV_TESTBED_CONFIG {
+                self.config
+                    .clone()
+                    .ok_or(std::env::VarError::NotPresent)
+                    .map(|p| p.into_os_string().into_string().unwrap())
+            } else if key.as_ref() == ENV_OUT_DIR {
+                self.out_dir
+                    .clone()
+                    .ok_or(std::env::VarError::NotPresent)
+                    .map(|p| p.into_os_string().into_string().unwrap())
+            } else {
+                Err(std::env::VarError::NotPresent)
+            }
+        }
+    }
+
+    #[test]
+    fn infra_not_detected() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let env = MockEnvironment { config: None, out_dir: None };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None, false);
+        assert_matches!(got, Err(InfraDriverError::NotDetected(_)));
+    }
+
+    #[test]
+    fn infra_not_detected_config() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+        let env = MockEnvironment { config: None, out_dir: Some(out_dir.path().to_path_buf()) };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None, false);
+        assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_TESTBED_CONFIG);
+    }
+
+    #[test]
+    fn infra_not_detected_out_dir() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let env =
+            MockEnvironment { config: Some(testbed_config.path().to_path_buf()), out_dir: None };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None, false);
+        assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_OUT_DIR);
+    }
+
+    #[test]
+    fn infra_invalid_config() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(testbed_config.as_file(), &json!({ "foo": "bar" })).unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None, false);
+        assert_matches!(got, Err(_));
+    }
+
+    #[test]
+    fn infra() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), Some(ffx_subtools.path().to_path_buf()), false)
+                .unwrap();
+        run(runner, driver, None).unwrap();
+
+        let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let ffx_subtools_path = ffx_subtools.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+              enable_honeydew: false
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_with_test_params() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+                "test_params": {
+                    "sl4f_sanity_test_params": {
+                        "can_overwrite": false,
+                        "from_original": true,
+                    }
+                }
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), Some(ffx_subtools.path().to_path_buf()), false)
+                .unwrap();
+        let params = "
+            sl4f_sanity_test_params:
+                merged_with: true
+                can_overwrite: true
+        ";
+        let params = serde_yaml::from_str(params).unwrap();
+        run(runner, driver, Some(params)).unwrap();
+
+        let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let ffx_subtools_path = ffx_subtools.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+              enable_honeydew: false
+          TestParams:
+            sl4f_sanity_test_params:
+              can_overwrite: true
+              from_original: true
+              merged_with: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_with_auxiliary_devices() {
+        const FUCHSIA_PDU_IP: &'static str = "192.168.42.14";
+        const FUCHSIA_PDU_PORT: u8 = 1;
+        const AP_IP: &'static str = "192.168.42.11";
+        const AP_AND_IPERF_PDU_IP: &'static str = "192.168.42.13";
+        const AP_PDU_PORT: u8 = 1;
+        const ATTENUATOR_IP: &'static str = "192.168.42.15";
+        const IPERF_IP: &'static str = "192.168.42.12";
+        const IPERF_USER: &'static str = "alice";
+        const IPERF_PDU_PORT: u8 = 2;
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": FUCHSIA_NAME,
+                "ipv4": "",
+                "ipv6": FUCHSIA_ADDR,
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": FUCHSIA_PDU_IP,
+                    "port": FUCHSIA_PDU_PORT,
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": AP_IP,
+                "ssh_key": ssh_key.path(),
+                "attenuator": {
+                    "ip": ATTENUATOR_IP,
+                },
+                "pdu": {
+                    "ip": AP_AND_IPERF_PDU_IP,
+                    "port": AP_PDU_PORT,
+                    "device": "fancy-pdu",
+                },
+            }, {
+                "type": "IPerfServer",
+                "ip": IPERF_IP,
+                "ssh_key": ssh_key.path(),
+                "user": IPERF_USER,
+                "test_interface": "eth0",
+                "pdu": {
+                    "ip": AP_AND_IPERF_PDU_IP,
+                    "port": IPERF_PDU_PORT,
+                    "device": "fancy-pdu",
+                },
+            }]),
+        )
+        .unwrap();
+
+        let runner = MockRunner::new(out_dir.path().to_path_buf());
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let driver =
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), Some(ffx_subtools.path().to_path_buf()), false)
+                .unwrap();
+        run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let ffx_subtools_path = ffx_subtools.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
+              ssh_priv_key: {ssh_key_path}
+              PduDevice:
+                device: synaccess.np02b
+                host: {FUCHSIA_PDU_IP}
+                port: {FUCHSIA_PDU_PORT}
+              hard_reboot_on_fail: true
+              enable_honeydew: false
+            AccessPoint:
+            - wan_interface: eth0
+              ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {AP_IP}
+                user: root
+                identity_file: {ssh_key_path}
+              PduDevice:
+                device: fancy-pdu
+                host: {AP_AND_IPERF_PDU_IP}
+                port: {AP_PDU_PORT}
+              Attenuator:
+              - Address: {ATTENUATOR_IP}
+                attenuator_ports_wifi_2g:
+                - 1
+                - 2
+                - 3
+                attenuator_ports_wifi_5g:
+                - 1
+                - 2
+                - 3
+            Attenuator:
+            - Model: minicircuits
+              InstrumentCount: 4
+              Address: {ATTENUATOR_IP}
+              Protocol: http
+              Port: 80
+            PduDevice:
+            - device: fancy-pdu
+              host: {AP_AND_IPERF_PDU_IP}
+            - device: synaccess.np02b
+              host: {FUCHSIA_PDU_IP}
+            IPerfServer:
+            - ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {IPERF_IP}
+                user: {IPERF_USER}
+                identity_file: {ssh_key_path}
+              port: 5201
+              test_interface: eth0
+              use_killall: true
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn infra_duplicate_port_pdu() {
+        let pdu_ip: IpAddr = "192.168.42.13".parse().unwrap();
+        let pdu_port = 1;
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": "",
+                "ipv6": "fe80::1%2",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": pdu_ip,
+                    "port": pdu_port,
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": "192.168.42.11",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": pdu_ip,
+                    "port": pdu_port,
+                },
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None, false);
+        assert_matches!(got,
+            Err(InfraDriverError::Config(ConfigError::DuplicatePort { ip, port }))
+                if ip == pdu_ip && port == pdu_port
+        );
+    }
+
+    #[test]
+    fn infra_duplicate_ip_pdu() {
+        let duplicate_ip: IpAddr = "192.168.42.13".parse().unwrap();
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": "",
+                "ipv6": "fe80::1%2",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": duplicate_ip,
+                    "port": 1,
+                    "device": "A",
+                },
+            }, {
+                "type": "AccessPoint",
+                "ip": "192.168.42.11",
+                "ssh_key": ssh_key.path(),
+                "pdu": {
+                    "ip": duplicate_ip,
+                    "port": 2,
+                    "device": "B",
+                },
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        assert_matches!(
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None, false),
+            Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
+                if ip == duplicate_ip
+        );
+    }
+
+    #[test]
+    fn infra_duplicate_ip_devices() {
+        let duplicate_ip: IpAddr = "192.168.42.11".parse().unwrap();
+
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let testbed_config = NamedTempFile::new().unwrap();
+        serde_json::to_writer_pretty(
+            testbed_config.as_file(),
+            &json!([{
+                "type": "FuchsiaDevice",
+                "nodename": "foo",
+                "ipv4": duplicate_ip,
+                "ipv6": "",
+                "ssh_key": ssh_key.path(),
+            }, {
+                "type": "AccessPoint",
+                "ip": duplicate_ip,
+                "ssh_key": ssh_key.path(),
+            }]),
+        )
+        .unwrap();
+
+        let env = MockEnvironment {
+            config: Some(testbed_config.path().to_path_buf()),
+            out_dir: Some(out_dir.path().to_path_buf()),
+        };
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None, false);
+        assert_matches!(got,
+            Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
+                if ip == duplicate_ip
+        );
+    }
+
+    #[test]
+    fn remove_symlinks_works() {
+        const SYMLINK_FILE: &'static str = "latest";
+
+        let out_dir = TempDir::new().unwrap();
+        let test_file = NamedTempFile::new_in(&out_dir).unwrap();
+        let symlink_path = out_dir.path().join(SYMLINK_FILE);
+
+        #[cfg(unix)]
+        std::os::unix::fs::symlink(&test_file, &symlink_path).unwrap();
+        #[cfg(windows)]
+        std::os::windows::fs::symlink_file(&test_file, &symlink_path).unwrap();
+
+        assert_matches!(remove_symlinks(out_dir.path()), Ok(()));
+        assert_matches!(fs::symlink_metadata(symlink_path), Err(e) if e.kind() == std::io::ErrorKind::NotFound);
+        assert_matches!(fs::symlink_metadata(test_file), Ok(meta) if meta.is_file());
+    }
+}
diff --git a/runner/src/driver/local.rs b/runner/src/driver/local.rs
new file mode 100644
index 0000000..a7da2e6
--- /dev/null
+++ b/runner/src/driver/local.rs
@@ -0,0 +1,308 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::config;
+use crate::driver::Driver;
+use crate::finder::{Answer, Finder};
+use crate::net::IpAddr;
+
+use std::path::{Path, PathBuf};
+
+use anyhow::{ensure, Context, Result};
+use home::home_dir;
+
+const TESTBED_NAME: &'static str = "antlion-runner";
+
+/// Driver for running antlion locally on an emulated or hardware testbed with
+/// optional mDNS discovery when a DHCP server is not available. This is useful
+/// for testing changes locally in a development environment.
+pub(crate) struct LocalDriver {
+    target: LocalTarget,
+    output_dir: PathBuf,
+    ssh_binary: PathBuf,
+    ffx_binary: PathBuf,
+    ffx_subtools_search_path: Option<PathBuf>,
+    enable_honeydew: bool,
+}
+
+impl LocalDriver {
+    pub fn new<F>(
+        device: Option<String>,
+        ssh_binary: PathBuf,
+        ssh_key: Option<PathBuf>,
+        ffx_binary: PathBuf,
+        ffx_subtools_search_path: Option<PathBuf>,
+        out_dir: Option<PathBuf>,
+        enable_honeydew: bool,
+    ) -> Result<Self>
+    where
+        F: Finder,
+    {
+        let output_dir = match out_dir {
+            Some(p) => Ok(p),
+            None => std::env::current_dir().context("Failed to get current working directory"),
+        }?;
+        Ok(Self {
+            target: LocalTarget::new::<F>(device, ssh_key)?,
+            output_dir,
+            ssh_binary,
+            ffx_binary,
+            ffx_subtools_search_path,
+            enable_honeydew,
+        })
+    }
+}
+
+impl Driver for LocalDriver {
+    fn output_path(&self) -> &Path {
+        self.output_dir.as_path()
+    }
+    fn config(&self) -> config::Config {
+        config::Config {
+            testbeds: vec![config::Testbed {
+                name: TESTBED_NAME.to_string(),
+                controllers: config::Controllers {
+                    fuchsia_devices: vec![config::Fuchsia {
+                        mdns_name: self.target.name.clone(),
+                        ip: self.target.ip.clone(),
+                        take_bug_report_on_fail: true,
+                        ssh_binary_path: self.ssh_binary.clone(),
+                        // TODO(http://b/244747218): Remove when ssh_config is refactored away
+                        ssh_config: None,
+                        ffx_binary_path: self.ffx_binary.clone(),
+                        ffx_subtools_search_path: self.ffx_subtools_search_path.clone(),
+                        ssh_priv_key: self.target.ssh_key.clone(),
+                        pdu_device: None,
+                        hard_reboot_on_fail: true,
+                        enable_honeydew: self.enable_honeydew,
+                    }],
+                    ..Default::default()
+                },
+                test_params: None,
+            }],
+            mobly_params: config::MoblyParams { log_path: self.output_dir.clone() },
+        }
+    }
+    fn teardown(&self) -> Result<()> {
+        println!(
+            "\nView full antlion logs at {}",
+            self.output_dir.join(TESTBED_NAME).join("latest").display()
+        );
+        Ok(())
+    }
+}
+
+/// LocalTargetInfo performs best-effort discovery of target information from
+/// standard Fuchsia environmental variables.
+struct LocalTarget {
+    name: String,
+    ip: IpAddr,
+    ssh_key: PathBuf,
+}
+
+impl LocalTarget {
+    fn new<F>(device: Option<String>, ssh_key: Option<PathBuf>) -> Result<Self>
+    where
+        F: Finder,
+    {
+        let device_name = device.or_else(|| match std::env::var("FUCHSIA_DIR") {
+            Ok(dir) => match std::fs::read_to_string(format!("{dir}/out/default.device")) {
+                Ok(name) => Some(name.trim().to_string()),
+                Err(_) => {
+                    println!("A default device using \"fx set-device\" has not been set");
+                    println!("Using the first Fuchsia device discovered via mDNS");
+                    None
+                }
+            },
+            Err(_) => {
+                println!("Neither --device nor FUCHSIA_DIR has been set");
+                println!("Using the first Fuchsia device discovered via mDNS");
+                None
+            }
+        });
+
+        let Answer { name, ip } = F::find_device(device_name)?;
+
+        // TODO: Move this validation out to Args
+        let ssh_key = ssh_key
+            .or_else(|| home_dir().map(|p| p.join(".ssh/fuchsia_ed25519").to_path_buf()))
+            .context("Failed to detect the private Fuchsia SSH key")?;
+
+        ensure!(
+            ssh_key.try_exists().with_context(|| format!(
+                "Failed to check existence of SSH key \"{}\"",
+                ssh_key.display()
+            ))?,
+            "Cannot find SSH key \"{}\"",
+            ssh_key.display()
+        );
+
+        Ok(LocalTarget { name, ip, ssh_key })
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    use crate::run;
+    use crate::runner::{ExitStatus, Runner};
+
+    use indoc::formatdoc;
+    use pretty_assertions::assert_eq;
+    use tempfile::{NamedTempFile, TempDir};
+
+    const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%eth0";
+    const FUCHSIA_IP: &'static str = "fe80::1";
+    const SCOPE_ID: &'static str = "eth0";
+
+    struct MockFinder;
+    impl Finder for MockFinder {
+        fn find_device(_: Option<String>) -> Result<Answer> {
+            Ok(Answer {
+                name: FUCHSIA_NAME.to_string(),
+                ip: IpAddr::V6(FUCHSIA_IP.parse().unwrap(), Some(SCOPE_ID.to_string())),
+            })
+        }
+    }
+
+    #[derive(Default)]
+    struct MockRunner {
+        config: std::cell::Cell<PathBuf>,
+    }
+    impl Runner for MockRunner {
+        fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+            self.config.set(config);
+            Ok(ExitStatus::Ok)
+        }
+    }
+
+    #[test]
+    fn local_invalid_ssh_key() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        assert!(LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(PathBuf::new()),
+            ffx.path().to_path_buf(),
+            None,
+            Some(out_dir.path().to_path_buf()),
+            false,
+        )
+        .is_err());
+    }
+
+    #[test]
+    fn local() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
+            Some(out_dir.path().to_path_buf()),
+            false,
+        )
+        .unwrap();
+
+        run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display();
+        let ssh_key_path = ssh_key.path().display();
+        let ffx_path = ffx.path().display();
+        let ffx_subtools_path = ffx_subtools.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+              enable_honeydew: false
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn local_with_test_params() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new::<MockFinder>(
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
+            Some(out_dir.path().to_path_buf()),
+            false,
+        )
+        .unwrap();
+
+        let params_yaml = "
+        sl4f_sanity_test_params:
+            foo: bar
+        ";
+        let params = serde_yaml::from_str(params_yaml).unwrap();
+
+        run(runner, driver, Some(params)).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display().to_string();
+        let ssh_key_path = ssh_key.path().display().to_string();
+        let ffx_path = ffx.path().display().to_string();
+        let ffx_subtools_path = ffx_subtools.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: true
+              enable_honeydew: false
+          TestParams:
+            sl4f_sanity_test_params:
+              foo: bar
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+}
diff --git a/runner/src/driver/mod.rs b/runner/src/driver/mod.rs
new file mode 100644
index 0000000..35de41f
--- /dev/null
+++ b/runner/src/driver/mod.rs
@@ -0,0 +1,24 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+pub(crate) mod infra;
+pub(crate) mod local;
+
+use crate::config::Config;
+
+use std::path::Path;
+
+use anyhow::Result;
+
+/// Driver provide insight into the information surrounding running an antlion
+/// test.
+pub(crate) trait Driver {
+    /// Path to output directory for test artifacts.
+    fn output_path(&self) -> &Path;
+    /// Antlion config for use during test.
+    fn config(&self) -> Config;
+    /// Additional logic to run after all tests run, regardless of tests passing
+    /// or failing.
+    fn teardown(&self) -> Result<()>;
+}
diff --git a/runner/src/env.rs b/runner/src/env.rs
new file mode 100644
index 0000000..ede8b74
--- /dev/null
+++ b/runner/src/env.rs
@@ -0,0 +1,25 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use std::ffi::OsStr;
+use std::env::VarError;
+
+/// Inspection of the process's environment.
+pub(crate) trait Environment {
+	/// Fetches the environment variable `key` from the current process.
+	/// 
+	/// See [std::env::var] for details.
+	///
+	/// [std::env::var]: https://doc.rust-lang.org/std/env/fn.var.html
+    fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError>;
+}
+
+/// Query the local process's environment.
+pub(crate) struct LocalEnvironment;
+
+impl Environment for LocalEnvironment {
+    fn var<K: AsRef<OsStr>>(&self, key: K) -> Result<String, VarError> {
+        std::env::var(key)
+    }
+}
diff --git a/runner/src/finder.rs b/runner/src/finder.rs
new file mode 100644
index 0000000..be11251
--- /dev/null
+++ b/runner/src/finder.rs
@@ -0,0 +1,210 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use crate::net::IpAddr;
+
+use std::io;
+use std::net::{Ipv6Addr, SocketAddr, SocketAddrV6, UdpSocket};
+use std::str;
+use std::time::{Duration, Instant};
+
+use anyhow::{bail, Context, Result};
+use mdns::protocol as dns;
+use netext::{get_mcast_interfaces, IsLocalAddr, McastInterface};
+use packet::{InnerPacketBuilder, ParseBuffer};
+use socket2::{Domain, Protocol, Socket, Type};
+
+const FUCHSIA_DOMAIN: &str = "_fuchsia._udp.local";
+const MDNS_MCAST_V6: Ipv6Addr = Ipv6Addr::new(0xff02, 0, 0, 0, 0, 0, 0, 0x00fb);
+const MDNS_PORT: u16 = 5353;
+const MDNS_TIMEOUT: Duration = Duration::from_secs(10);
+
+lazy_static::lazy_static! {
+    static ref MDNS_QUERY: &'static [u8] = construct_query_buf(FUCHSIA_DOMAIN);
+}
+
+/// Find Fuchsia devices.
+pub(crate) trait Finder {
+    /// Find a Fuchsia device, preferring `device_name` if specified.
+    fn find_device(device_name: Option<String>) -> Result<Answer>;
+}
+
+/// Answer from a Finder.
+pub(crate) struct Answer {
+    /// Name of the Fuchsia device.
+    pub name: String,
+    /// IP address of the Fuchsia device.
+    pub ip: IpAddr,
+}
+
+pub(crate) struct MulticastDns {}
+
+impl Finder for MulticastDns {
+    /// Find a Fuchsia device using mDNS. If `device_name` is not specified, the
+    /// first device will be used.
+    fn find_device(device_name: Option<String>) -> Result<Answer> {
+        let interfaces =
+            get_mcast_interfaces().context("Failed to list multicast-enabled interfaces")?;
+        let interface_names =
+            interfaces.iter().map(|i| i.name.clone()).collect::<Vec<String>>().join(", ");
+        if let Some(ref d) = device_name {
+            println!("Performing mDNS discovery for {d} on interfaces: {interface_names}");
+        } else {
+            println!("Performing mDNS discovery on interfaces: {interface_names}");
+        }
+
+        let socket = create_socket(interfaces.iter()).context("Failed to create mDNS socket")?;
+
+        // TODO(http://b/264936590): Remove the race condition where the Fuchsia
+        // device can send its answer before this socket starts listening. Add an
+        // async runtime and concurrently listen for answers while sending queries.
+        send_queries(&socket, interfaces.iter()).context("Failed to send mDNS queries")?;
+        let answer = listen_for_answers(socket, device_name)?;
+
+        println!("Device {} found at {}", answer.name, answer.ip);
+        Ok(answer)
+    }
+}
+
+fn construct_query_buf(service: &str) -> &'static [u8] {
+    let question = dns::QuestionBuilder::new(
+        dns::DomainBuilder::from_str(service).unwrap(),
+        dns::Type::Ptr,
+        dns::Class::In,
+        true,
+    );
+
+    let mut message = dns::MessageBuilder::new(0, true);
+    message.add_question(question);
+
+    let mut buf = vec![0; message.bytes_len()];
+    message.serialize(buf.as_mut_slice());
+    Box::leak(buf.into_boxed_slice())
+}
+
+/// Create a socket for both sending and listening on all multicast-capable
+/// interfaces.
+fn create_socket<'a>(interfaces: impl Iterator<Item = &'a McastInterface>) -> Result<Socket> {
+    let socket = Socket::new(Domain::IPV6, Type::DGRAM, Some(Protocol::UDP))?;
+    let read_timeout = Duration::from_millis(100);
+    socket
+        .set_read_timeout(Some(read_timeout))
+        .with_context(|| format!("Failed to set SO_RCVTIMEO to {}ms", read_timeout.as_millis()))?;
+    socket.set_only_v6(true).context("Failed to set IPV6_V6ONLY")?;
+    socket.set_reuse_address(true).context("Failed to set SO_REUSEADDR")?;
+    socket.set_reuse_port(true).context("Failed to set SO_REUSEPORT")?;
+
+    for interface in interfaces {
+        // Listen on all multicast-enabled interfaces
+        match interface.id() {
+            Ok(id) => match socket.join_multicast_v6(&MDNS_MCAST_V6, id) {
+                Ok(()) => {}
+                Err(e) => eprintln!("Failed to join mDNS multicast group on interface {id}: {e}"),
+            },
+            Err(e) => eprintln!("Failed to listen on interface {}: {}", interface.name, e),
+        }
+    }
+
+    socket
+        .bind(&SocketAddrV6::new(Ipv6Addr::UNSPECIFIED, 0, 0, 0).into())
+        .with_context(|| format!("Failed to bind to unspecified IPv6"))?;
+
+    Ok(socket)
+}
+
+fn send_queries<'a>(
+    socket: &Socket,
+    interfaces: impl Iterator<Item = &'a McastInterface>,
+) -> Result<()> {
+    let to_addr = SocketAddrV6::new(MDNS_MCAST_V6, MDNS_PORT, 0, 0).into();
+
+    for interface in interfaces {
+        let id = interface
+            .id()
+            .with_context(|| format!("Failed to get interface ID for {}", interface.name))?;
+        socket
+            .set_multicast_if_v6(id)
+            .with_context(|| format!("Failed to set multicast interface for {}", interface.name))?;
+        for addr in &interface.addrs {
+            if let SocketAddr::V6(addr_v6) = addr {
+                if !addr.ip().is_local_addr() || addr.ip().is_loopback() {
+                    continue;
+                }
+                if let Err(e) = socket.send_to(&MDNS_QUERY, &to_addr) {
+                    eprintln!(
+                        "Failed to send mDNS query out {} via {}: {e}",
+                        interface.name,
+                        addr_v6.ip()
+                    );
+                    continue;
+                }
+            }
+        }
+    }
+    Ok(())
+}
+
+fn listen_for_answers(socket: Socket, device_name: Option<String>) -> Result<Answer> {
+    let s: UdpSocket = socket.into();
+    let mut buf = [0; 1500];
+
+    let end = Instant::now() + MDNS_TIMEOUT;
+    while Instant::now() < end {
+        match s.recv_from(&mut buf) {
+            Ok((packet_bytes, src_sock_addr)) => {
+                if !src_sock_addr.ip().is_local_addr() {
+                    continue;
+                }
+
+                let mut packet_buf = &mut buf[..packet_bytes];
+                match packet_buf.parse::<dns::Message<_>>() {
+                    Ok(message) => {
+                        if !message.answers.iter().any(|a| a.domain == FUCHSIA_DOMAIN) {
+                            continue;
+                        }
+                        for answer in message.additional {
+                            if let Some(std::net::IpAddr::V6(addr)) = answer.rdata.ip_addr() {
+                                if let SocketAddr::V6(src_v6) = src_sock_addr {
+                                    let name = answer
+                                        .domain
+                                        .to_string()
+                                        .trim_end_matches(".local")
+                                        .to_string();
+                                    let scope_id = scope_id_to_name_checked(src_v6.scope_id())?;
+
+                                    if let Some(ref device) = device_name {
+                                        if &name != device {
+                                            println!("Found irrelevant device {name} at {addr}%{scope_id}");
+                                            continue;
+                                        }
+                                    }
+
+                                    return Ok(Answer {
+                                        name,
+                                        ip: IpAddr::V6(addr, Some(scope_id)),
+                                    });
+                                }
+                            }
+                        }
+                    }
+                    Err(err) => eprintln!("Failed to parse mDNS packet: {err:?}"),
+                }
+            }
+            Err(err) if err.kind() == io::ErrorKind::WouldBlock => {}
+            Err(err) => return Err(err.into()),
+        }
+    }
+
+    bail!("device {device_name:?} not found")
+}
+
+fn scope_id_to_name_checked(scope_id: u32) -> Result<String> {
+    let mut buf = vec![0; libc::IF_NAMESIZE];
+    let res = unsafe { libc::if_indextoname(scope_id, buf.as_mut_ptr() as *mut libc::c_char) };
+    if res.is_null() {
+        bail!("{scope_id} is not a valid network interface ID")
+    } else {
+        Ok(String::from_utf8_lossy(&buf.split(|&c| c == 0u8).next().unwrap_or(&[0u8])).to_string())
+    }
+}
diff --git a/runner/src/main.rs b/runner/src/main.rs
new file mode 100644
index 0000000..fb04c49
--- /dev/null
+++ b/runner/src/main.rs
@@ -0,0 +1,171 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+mod config;
+mod driver;
+mod env;
+mod finder;
+mod net;
+mod runner;
+mod yaml;
+
+use crate::driver::infra::{InfraDriver, InfraDriverError};
+use crate::runner::ExitStatus;
+
+use std::fs::File;
+use std::path::PathBuf;
+use std::{fs, process::ExitCode};
+
+use anyhow::{Context, Result};
+use argh::FromArgs;
+use serde_yaml::Value;
+
+#[derive(FromArgs)]
+/// antlion runner with config generation
+struct Args {
+    /// name of the Fuchsia device to use for testing; defaults to using mDNS
+    /// discovery
+    #[argh(option)]
+    device: Option<String>,
+
+    /// path to the SSH binary used to communicate with all devices
+    #[argh(option, from_str_fn(parse_file))]
+    ssh_binary: PathBuf,
+
+    /// path to the SSH private key used to communicate with Fuchsia; defaults
+    /// to ~/.ssh/fuchsia_ed25519
+    #[argh(option, from_str_fn(parse_file))]
+    ssh_key: Option<PathBuf>,
+
+    /// path to the FFX binary used to communicate with Fuchsia
+    #[argh(option, from_str_fn(parse_file))]
+    ffx_binary: PathBuf,
+
+    /// search path to the FFX binary used to communicate with Fuchsia
+    #[argh(option, from_str_fn(parse_directory))]
+    ffx_subtools_search_path: Option<PathBuf>,
+
+    /// path to the python interpreter binary (e.g. /bin/python3.9)
+    #[argh(option)]
+    python_bin: String,
+
+    /// path to the antlion zipapp, ending in .pyz
+    #[argh(option, from_str_fn(parse_file))]
+    antlion_pyz: PathBuf,
+
+    /// path to a directory for outputting artifacts; defaults to the current
+    /// working directory or FUCHSIA_TEST_OUTDIR
+    #[argh(option, from_str_fn(parse_directory))]
+    out_dir: Option<PathBuf>,
+
+    /// path to additional YAML config for this test; placed in the
+    /// "test_params" key in the antlion config
+    #[argh(option, from_str_fn(parse_file))]
+    test_params: Option<PathBuf>,
+
+    /// list of test cases to run; defaults to all test cases
+    #[argh(positional)]
+    test_cases: Vec<String>,
+
+    /// flag to enable using honeydew fuchsia device; defaults to false
+    #[argh(switch)]
+    enable_honeydew: bool,
+}
+
+fn parse_file(s: &str) -> Result<PathBuf, String> {
+    let path = PathBuf::from(s);
+    let _ = File::open(&path).map_err(|e| format!("Failed to open \"{s}\": {e}"))?;
+    Ok(path)
+}
+
+fn parse_directory(s: &str) -> Result<PathBuf, String> {
+    let path = PathBuf::from(s);
+    let meta =
+        std::fs::metadata(&path).map_err(|e| format!("Failed to read metadata of \"{s}\": {e}"))?;
+    if meta.is_file() {
+        return Err(format!("Expected a directory but found a file at \"{s}\""));
+    }
+    Ok(path)
+}
+
+fn run<R, D>(runner: R, driver: D, test_params: Option<Value>) -> Result<ExitCode>
+where
+    R: runner::Runner,
+    D: driver::Driver,
+{
+    let mut config = driver.config();
+    if let Some(params) = test_params {
+        config.merge_test_params(params);
+    }
+
+    let yaml =
+        serde_yaml::to_string(&config).context("Failed to convert antlion config to YAML")?;
+
+    let output_path = driver.output_path().to_path_buf();
+    let config_path = output_path.join("config.yaml");
+    println!("Writing {}", config_path.display());
+    println!("\n{yaml}\n");
+    fs::write(&config_path, yaml).context("Failed to write config to file")?;
+
+    let exit_code = runner.run(config_path).context("Failed to run antlion")?;
+    match exit_code {
+        ExitStatus::Ok => println!("Antlion successfully exited"),
+        ExitStatus::Err(code) => eprintln!("Antlion failed with status code {}", code),
+        ExitStatus::Interrupt(Some(code)) => eprintln!("Antlion interrupted by signal {}", code),
+        ExitStatus::Interrupt(None) => eprintln!("Antlion interrupted by signal"),
+    };
+    driver.teardown().context("Failed to teardown environment")?;
+    Ok(exit_code.into())
+}
+
+fn main() -> Result<ExitCode> {
+    let args: Args = argh::from_env();
+    let env = env::LocalEnvironment;
+    let runner = runner::ProcessRunner {
+        python_bin: args.python_bin,
+        antlion_pyz: args.antlion_pyz,
+        test_cases: args.test_cases,
+    };
+
+    let test_params = match args.test_params {
+        Some(path) => {
+            let text = fs::read_to_string(&path)
+                .with_context(|| format!("Failed to read file \"{}\"", path.display()))?;
+            let yaml = serde_yaml::from_str(&text)
+                .with_context(|| format!("Failed to parse \"{text}\" as YAML"))?;
+            Some(yaml)
+        }
+        None => None,
+    };
+
+    match InfraDriver::new(
+        env,
+        args.ssh_binary.clone(),
+        args.ffx_binary.clone(),
+        args.ffx_subtools_search_path.clone(),
+        args.enable_honeydew.clone(),
+    ) {
+        Ok(env) => return run(runner, env, test_params),
+        Err(InfraDriverError::NotDetected(_)) => {}
+        Err(InfraDriverError::Config(e)) => {
+            return Err(anyhow::Error::from(e).context("Config validation"))
+        }
+        Err(InfraDriverError::Other(e)) => {
+            return Err(anyhow::Error::from(e).context("Unexpected infra driver error"))
+        }
+    };
+
+    let env = driver::local::LocalDriver::new::<finder::MulticastDns>(
+        args.device.clone(),
+        args.ssh_binary.clone(),
+        args.ssh_key.clone(),
+        args.ffx_binary.clone(),
+        args.ffx_subtools_search_path.clone(),
+        args.out_dir.clone(),
+        args.enable_honeydew.clone(),
+    )
+    .context("Failed to detect local environment")?;
+
+    run(runner, env, test_params)
+}
diff --git a/runner/src/net.rs b/runner/src/net.rs
new file mode 100644
index 0000000..35dc07a
--- /dev/null
+++ b/runner/src/net.rs
@@ -0,0 +1,219 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+use std::fmt::{Debug, Display};
+use std::marker::PhantomData;
+use std::net::{Ipv4Addr, Ipv6Addr};
+
+use netext::IsLocalAddr;
+use serde::{Deserialize, Serialize};
+use thiserror::Error;
+
+/// IP address with support for IPv6 scope identifiers as defined in RFC 4007.
+#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
+pub enum IpAddr {
+    /// An IPv4 address.
+    V4(Ipv4Addr),
+    /// An IPv6 address with optional scope identifier.
+    V6(Ipv6Addr, Option<String>),
+}
+
+impl Into<std::net::IpAddr> for IpAddr {
+    fn into(self) -> std::net::IpAddr {
+        match self {
+            IpAddr::V4(ip) => std::net::IpAddr::from(ip),
+            IpAddr::V6(ip, _) => std::net::IpAddr::from(ip),
+        }
+    }
+}
+
+impl From<Ipv6Addr> for IpAddr {
+    fn from(value: Ipv6Addr) -> Self {
+        IpAddr::V6(value, None)
+    }
+}
+
+impl From<Ipv4Addr> for IpAddr {
+    fn from(value: Ipv4Addr) -> Self {
+        IpAddr::V4(value)
+    }
+}
+
+impl From<std::net::IpAddr> for IpAddr {
+    fn from(value: std::net::IpAddr) -> Self {
+        match value {
+            std::net::IpAddr::V4(ip) => IpAddr::from(ip),
+            std::net::IpAddr::V6(ip) => IpAddr::from(ip),
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Error)]
+/// An error which can be returned when parsing an IP address with optional IPv6
+/// scope ID. See [`std::net::AddrParseError`].
+pub enum AddrParseError {
+    #[error(transparent)]
+    IpInvalid(#[from] std::net::AddrParseError),
+    #[error("no interface found with name \"{0}\"")]
+    InterfaceNotFound(String),
+    #[error("only IPv6 link-local may include a scope ID")]
+    /// Scope IDs are only supported for IPv6 link-local addresses as per RFC
+    /// 6874 Section 4.
+    ScopeNotSupported,
+}
+
+impl std::str::FromStr for IpAddr {
+    type Err = AddrParseError;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        let mut parts = s.splitn(2, '%');
+        let addr = parts.next().unwrap(); // first element is guaranteed
+        let ip = std::net::IpAddr::from_str(addr)?;
+        let scope = parts.next();
+        match (ip, scope) {
+            (std::net::IpAddr::V4(ip), None) => Ok(IpAddr::from(ip)),
+            (std::net::IpAddr::V4(_), Some(_)) => Err(AddrParseError::ScopeNotSupported),
+            (std::net::IpAddr::V6(ip), None) => Ok(IpAddr::V6(ip, None)),
+            (std::net::IpAddr::V6(ip), Some(scope)) => {
+                if !ip.is_link_local_addr() {
+                    return Err(AddrParseError::ScopeNotSupported);
+                }
+                if scope.len() == 0 {
+                    return Err(AddrParseError::InterfaceNotFound(scope.to_string()))
+                }
+                Ok(IpAddr::V6(ip, Some(scope.to_string())))
+            }
+        }
+    }
+}
+
+impl Display for IpAddr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            IpAddr::V4(ip) => Display::fmt(ip, f),
+            IpAddr::V6(ip, None) => Display::fmt(ip, f),
+            IpAddr::V6(ip, Some(scope)) => {
+                Display::fmt(ip, f)?;
+                write!(f, "%{}", scope)
+            }
+        }
+    }
+}
+
+impl Debug for IpAddr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        Display::fmt(self, f)
+    }
+}
+
+impl Serialize for IpAddr {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: serde::Serializer,
+    {
+        serializer.serialize_str(self.to_string().as_str())
+    }
+}
+
+impl<'de> Deserialize<'de> for IpAddr {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: serde::Deserializer<'de>,
+    {
+        deserializer.deserialize_str(FromStrVisitor::new())
+    }
+}
+
+struct FromStrVisitor<T> {
+    ty: PhantomData<T>,
+}
+
+impl<T> FromStrVisitor<T> {
+    fn new() -> Self {
+        FromStrVisitor { ty: PhantomData }
+    }
+}
+
+impl<'de, T> serde::de::Visitor<'de> for FromStrVisitor<T>
+where
+    T: std::str::FromStr,
+    T::Err: std::fmt::Display,
+{
+    type Value = T;
+
+    fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        formatter.write_str("IP address")
+    }
+
+    fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+    where
+        E: serde::de::Error,
+    {
+        s.parse().map_err(serde::de::Error::custom)
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::{AddrParseError, IpAddr};
+    use assert_matches::assert_matches;
+
+    #[test]
+    fn parse_ip_invalid() {
+        assert_matches!("".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+        assert_matches!("192.168.1.".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+        assert_matches!("fe80:".parse::<IpAddr>(), Err(AddrParseError::IpInvalid(_)));
+    }
+
+    #[test]
+    fn parse_ipv4() {
+        assert_matches!(
+            "192.168.1.1".parse::<IpAddr>(),
+            Ok(IpAddr::V4(ip))
+                if ip == "192.168.1.1".parse::<std::net::Ipv4Addr>().unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_ipv4_with_scope() {
+        assert_matches!(
+            "192.168.1.1%1".parse::<IpAddr>(),
+            Err(AddrParseError::ScopeNotSupported)
+        );
+    }
+
+    #[test]
+    fn parse_ipv6() {
+        assert_matches!(
+            "fe80::1".parse::<IpAddr>(),
+            Ok(IpAddr::V6(ip, None))
+                if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
+        );
+    }
+
+    #[test]
+    fn parse_ipv6_global_with_scope() {
+        assert_matches!("2001::1%1".parse::<IpAddr>(), Err(AddrParseError::ScopeNotSupported));
+    }
+
+    #[test]
+    fn parse_ipv6_link_local_with_scope() {
+        assert_matches!(
+            "fe80::1%1".parse::<IpAddr>(),
+            Ok(IpAddr::V6(ip, Some(scope)))
+                if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
+                && scope == "1"
+        );
+    }
+
+    #[test]
+    fn parse_ipv6_link_local_with_scope_interface_not_found() {
+        // An empty scope ID should trigger a failed lookup.
+        assert_matches!(
+            "fe80::1%".parse::<IpAddr>(),
+            Err(AddrParseError::InterfaceNotFound(name))
+                if name == ""
+        );
+    }
+}
diff --git a/runner/src/runner.rs b/runner/src/runner.rs
new file mode 100644
index 0000000..986acf1
--- /dev/null
+++ b/runner/src/runner.rs
@@ -0,0 +1,91 @@
+// Copyright 2023 The Fuchsia Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#[cfg(unix)]
+use std::os::unix::process::ExitStatusExt;
+use std::process::Command;
+use std::{path::PathBuf, process::ExitCode};
+
+use anyhow::{Context, Result};
+use itertools::Itertools;
+
+/// Runner for dispatching antlion.
+pub(crate) trait Runner {
+    /// Run antlion using the provided config and output directory.
+    fn run(&self, config: PathBuf) -> Result<ExitStatus>;
+}
+
+/// Executes antlion as a local process.
+pub(crate) struct ProcessRunner {
+    pub python_bin: String,
+    pub antlion_pyz: PathBuf,
+    pub test_cases: Vec<String>,
+}
+
+impl Runner for ProcessRunner {
+    fn run(&self, config: PathBuf) -> Result<ExitStatus> {
+        let mut args = vec![
+            self.antlion_pyz.clone().into_os_string().into_string().unwrap(),
+            "--config".to_string(),
+            config.into_os_string().into_string().unwrap(),
+        ];
+
+        if !self.test_cases.is_empty() {
+            args.push("--test_case".to_string());
+            for test_case in self.test_cases.iter() {
+                args.push(test_case.clone());
+            }
+        }
+
+        println!(
+            "Launching antlion to run: \"{} {}\"\n",
+            &self.python_bin,
+            args.iter().format(" "),
+        );
+
+        let status = Command::new(&self.python_bin)
+            .args(args)
+            .status()
+            .context("Failed to execute antlion")?;
+
+        Ok(ExitStatus::from(status))
+    }
+}
+
+/// Describes the result of a child process after it has terminated.
+pub(crate) enum ExitStatus {
+    /// Process terminated without error.
+    Ok,
+    /// Process terminated with a non-zero status code.
+    Err(i32),
+    /// Process was interrupted by a signal.
+    Interrupt(Option<i32>),
+}
+
+impl From<std::process::ExitStatus> for ExitStatus {
+    fn from(status: std::process::ExitStatus) -> Self {
+        match status.code() {
+            Some(0) => ExitStatus::Ok,
+            Some(code) => ExitStatus::Err(code),
+            None if cfg!(target_os = "unix") => ExitStatus::Interrupt(status.signal()),
+            None => ExitStatus::Interrupt(None),
+        }
+    }
+}
+
+impl Into<ExitCode> for ExitStatus {
+    fn into(self) -> ExitCode {
+        match self {
+            ExitStatus::Ok => ExitCode::SUCCESS,
+            ExitStatus::Err(code) => {
+                let code = match u8::try_from(code) {
+                    Ok(c) => c,
+                    Err(_) => 1,
+                };
+                ExitCode::from(code)
+            }
+            ExitStatus::Interrupt(_) => ExitCode::FAILURE,
+        }
+    }
+}
diff --git a/runner/src/yaml.rs b/runner/src/yaml.rs
new file mode 100644
index 0000000..ae972bf
--- /dev/null
+++ b/runner/src/yaml.rs
@@ -0,0 +1,95 @@
+use serde_yaml::Value;
+
+/// Merge `b` into `a`, appending arrays and overwriting everything else.
+pub fn merge(a: &mut Value, b: Value) {
+    match (a, b) {
+        (Value::Mapping(ref mut a), Value::Mapping(b)) => {
+            for (k, v) in b {
+                if !a.contains_key(&k) {
+                    a.insert(k, v);
+                } else {
+                    merge(&mut a[&k], v);
+                }
+            }
+        }
+        (Value::Sequence(ref mut a), Value::Sequence(ref mut b)) => {
+            a.append(b);
+        }
+        (a, b) => *a = b,
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    #[test]
+    fn test_merge_mapping() {
+        let a = "
+            test_params:
+                name: a
+                who_called:
+                    was_a: true
+        ";
+        let mut a: Value = serde_yaml::from_str(a).unwrap();
+        let b = "
+            test_params:
+                name: b
+                who_called:
+                    was_b: true
+        ";
+        let b: Value = serde_yaml::from_str(b).unwrap();
+        merge(&mut a, b);
+        let want = "
+            test_params:
+                name: b
+                who_called:
+                    was_a: true
+                    was_b: true
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_append_arrays() {
+        let mut a: Value = serde_yaml::from_str(" - a").unwrap();
+        let b: Value = serde_yaml::from_str(" - b").unwrap();
+        merge(&mut a, b);
+        let want = "
+            - a
+            - b
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_append_arrays_allow_duplicates() {
+        let mut a: Value = serde_yaml::from_str(" - a").unwrap();
+        let b: Value = serde_yaml::from_str(" - a").unwrap();
+        merge(&mut a, b);
+        let want = "
+            - a
+            - a
+        ";
+        let want: Value = serde_yaml::from_str(want).unwrap();
+        assert_eq!(a, want);
+    }
+
+    #[test]
+    fn test_merge_overwrite_from_null() {
+        let mut a: Value = Value::Null;
+        let b: Value = serde_yaml::from_str("true").unwrap();
+        merge(&mut a, b.clone());
+        assert_eq!(a, b);
+    }
+
+    #[test]
+    fn test_merge_overwrite_with_null() {
+        let mut a: Value = serde_yaml::from_str("true").unwrap();
+        let b: Value = Value::Null;
+        merge(&mut a, b.clone());
+        assert_eq!(a, b);
+    }
+}
diff --git a/setup.py b/setup.py
index 30f198d..1f4dcf9 100644
--- a/setup.py
+++ b/setup.py
@@ -14,39 +14,38 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from setuptools import setup, find_packages
+from setuptools import find_packages, setup
 
 install_requires = [
-    "mobly==1.12.0",
+    "mobly==1.12.2",
     "pyyaml>=5.1",
     "tenacity~=8.0",
+    # TODO(b/240443856): Remove these dependencies once antlion runs in
+    # Fuchsia's LUCI infrastructure. These are needed for flashing and using
+    # mDNS discovery, which are unnecessary in the future infrastructure.
+    "usbinfo",
+    "psutil",
+    "zeroconf",
 ]
 
-setup(name='antlion',
-      version='0.2.0',
-      description = "Host-driven, hardware-agnostic Fuchsia connectivity tests",
-      license='Apache-2.0',
-      packages=find_packages(
-          where='src',
-      ),
-      package_dir={"": "src"},
-      include_package_data=True,
-      tests_require=[],
-      install_requires=install_requires,
-      extras_require={
-          'html_graphing': ['bokeh'],
-          'dev': ['shiv', 'toml', 'yapf'],
-          'digital_loggers_pdu': ['dlipower'],
-          'flash': ['usbinfo'],
-          'mdns': ['psutil', 'zeroconf'],
-          'android': [
-              'Monsoon',
-              'numpy',
-              'paramiko[ed25519]',
-              'pylibftdi',
-              'pyserial',
-              'requests',
-              'scapy',
-              'scipy',
-          ],
-      })
+setup(
+    name="antlion",
+    version="0.2.0",
+    description="Host-driven, hardware-agnostic Fuchsia connectivity tests",
+    license="Apache-2.0",
+    packages=find_packages(
+        where="packages",
+    ),
+    package_dir={"": "packages"},
+    include_package_data=True,
+    tests_require=[],
+    install_requires=install_requires,
+    extras_require={
+        "html_graphing": ["bokeh"],
+        "digital_loggers_pdu": ["dlipower"],
+        "android": [
+            "numpy",
+            "scapy",
+        ],
+    },
+)
diff --git a/src/antlion/asserts.py b/src/antlion/asserts.py
deleted file mode 100644
index ce0a7b1..0000000
--- a/src/antlion/asserts.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly.asserts import *
-
-
-# Have an instance of unittest.TestCase so we could reuse some logic from
-# python's own unittest.
-# _ProxyTest is required because py2 does not allow instantiating
-# unittest.TestCase directly.
-class _ProxyTest(unittest.TestCase):
-    def runTest(self):
-        pass
-
-
-_pyunit_proxy = _ProxyTest()
-
-
-def assert_almost_equal(first,
-                        second,
-                        places=7,
-                        msg=None,
-                        delta=None,
-                        extras=None):
-    """
-    Assert FIRST to be within +/- DELTA to SECOND, otherwise fail the
-    test.
-    :param first: The first argument, LHS
-    :param second: The second argument, RHS
-    :param places: For floating points, how many decimal places to look into
-    :param msg: Message to display on failure
-    :param delta: The +/- first and second could be apart from each other
-    :param extras: Extra object passed to test failure handler
-    :return:
-    """
-    my_msg = None
-    try:
-        if delta:
-            _pyunit_proxy.assertAlmostEqual(
-                first, second, msg=msg, delta=delta)
-        else:
-            _pyunit_proxy.assertAlmostEqual(
-                first, second, places=places, msg=msg)
-    except Exception as e:
-        my_msg = str(e)
-        if msg:
-            my_msg = "%s %s" % (my_msg, msg)
-    # This is a hack to remove the stacktrace produced by the above exception.
-    if my_msg is not None:
-        fail(my_msg, extras=extras)
diff --git a/src/antlion/base_test.py b/src/antlion/base_test.py
deleted file mode 100755
index 5e39933..0000000
--- a/src/antlion/base_test.py
+++ /dev/null
@@ -1,962 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import fnmatch
-import functools
-import importlib
-import logging
-import os
-import traceback
-from concurrent.futures import ThreadPoolExecutor
-
-from antlion import asserts
-from antlion import error
-from antlion import keys
-from antlion import logger
-from antlion import records
-from antlion import signals
-from antlion import tracelogger
-from antlion import utils
-from antlion.event import event_bus
-from antlion.event import subscription_bundle
-from antlion.event.decorators import subscribe_static
-from antlion.event.event import TestCaseBeginEvent
-from antlion.event.event import TestCaseEndEvent
-from antlion.event.event import TestClassBeginEvent
-from antlion.event.event import TestClassEndEvent
-from antlion.event.subscription_bundle import SubscriptionBundle
-
-from mobly.base_test import BaseTestClass as MoblyBaseTest
-from mobly.records import ExceptionRecord
-
-# Macro strings for test result reporting
-TEST_CASE_TOKEN = "[Test Case]"
-RESULT_LINE_TEMPLATE = TEST_CASE_TOKEN + " %s %s"
-
-
-@subscribe_static(TestCaseBeginEvent)
-def _logcat_log_test_begin(event):
-    """Ensures that logcat is running. Write a logcat line indicating test case
-     begin."""
-    test_instance = event.test_class
-    try:
-        for ad in getattr(test_instance, 'android_devices', []):
-            if not ad.is_adb_logcat_on:
-                ad.start_adb_logcat()
-            # Write test start token to adb log if android device is attached.
-            if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s BEGIN %s" %
-                              (TEST_CASE_TOKEN, event.test_case_name))
-
-    except error.ActsError as e:
-        test_instance.results.error.append(
-            ExceptionRecord(e, 'Logcat for test begin: %s' %
-                            event.test_case_name))
-        test_instance.log.error('BaseTest setup_test error: %s' % e.details)
-    except Exception as e:
-        test_instance.log.warning(
-            'Unable to send BEGIN log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
-
-
-@subscribe_static(TestCaseEndEvent)
-def _logcat_log_test_end(event):
-    """Write a logcat line indicating test case end."""
-    test_instance = event.test_class
-    try:
-        # Write test end token to adb log if android device is attached.
-        for ad in getattr(test_instance, 'android_devices', []):
-            if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s END %s" %
-                              (TEST_CASE_TOKEN, event.test_case_name))
-
-    except error.ActsError as e:
-        test_instance.results.error.append(
-            ExceptionRecord(e,
-                            'Logcat for test end: %s' % event.test_case_name))
-        test_instance.log.error('BaseTest teardown_test error: %s' % e.details)
-    except Exception as e:
-        test_instance.log.warning(
-            'Unable to send END log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
-
-
-@subscribe_static(TestCaseBeginEvent)
-def _syslog_log_test_begin(event):
-    """This adds a BEGIN log message with the test name to the syslog of any
-    Fuchsia device"""
-    test_instance = event.test_class
-    try:
-        for fd in getattr(test_instance, 'fuchsia_devices', []):
-            if hasattr(fd, '_sl4f'):
-                fd.sl4f.logging_lib.logI(
-                    "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name))
-
-    except Exception as e:
-        test_instance.log.warning(
-            'Unable to send BEGIN log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
-
-
-@subscribe_static(TestCaseEndEvent)
-def _syslog_log_test_end(event):
-    """This adds a END log message with the test name to the syslog of any
-    Fuchsia device"""
-    test_instance = event.test_class
-    try:
-        for fd in getattr(test_instance, 'fuchsia_devices', []):
-            if hasattr(fd, '_sl4f'):
-                fd.sl4f.logging_lib.logI(
-                    "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name))
-
-    except Exception as e:
-        test_instance.log.warning(
-            'Unable to send END log command to all devices.')
-        test_instance.log.warning('Error: %s' % e)
-
-
-event_bus.register_subscription(_logcat_log_test_begin.subscription)
-event_bus.register_subscription(_logcat_log_test_end.subscription)
-event_bus.register_subscription(_syslog_log_test_begin.subscription)
-event_bus.register_subscription(_syslog_log_test_end.subscription)
-
-
-class Error(Exception):
-    """Raised for exceptions that occured in BaseTestClass."""
-
-
-class BaseTestClass(MoblyBaseTest):
-    """Base class for all test classes to inherit from. Inherits some
-    functionality from Mobly's base test class.
-
-    This class gets all the controller objects from test_runner and executes
-    the test cases requested within itself.
-
-    Most attributes of this class are set at runtime based on the configuration
-    provided.
-
-    Attributes:
-        tests: A list of strings, each representing a test case name.
-        TAG: A string used to refer to a test class. Default is the test class
-             name.
-        log: A logger object used for logging.
-        results: A records.TestResult object for aggregating test results from
-                 the execution of test cases.
-        controller_configs: A dict of controller configs provided by the user
-                            via the testbed config.
-        consecutive_failures: Tracks the number of consecutive test case
-                              failures within this class.
-        consecutive_failure_limit: Number of consecutive test failures to allow
-                                   before blocking remaining tests in the same
-                                   test class.
-        size_limit_reached: True if the size of the log directory has reached
-                            its limit.
-        current_test_name: A string that's the name of the test case currently
-                           being executed. If no test is executing, this should
-                           be None.
-    """
-
-    TAG = None
-
-    def __init__(self, configs):
-        """Initializes a BaseTestClass given a TestRunConfig, which provides
-        all of the config information for this test class.
-
-        Args:
-            configs: A config_parser.TestRunConfig object.
-        """
-        super().__init__(configs)
-
-        self.__handle_file_user_params()
-
-        self.class_subscriptions = SubscriptionBundle()
-        self.class_subscriptions.register()
-        self.all_subscriptions = [self.class_subscriptions]
-
-        self.current_test_name = None
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-        # TODO: remove after converging log path definitions with mobly
-        self.log_path = configs.log_path
-
-        self.consecutive_failures = 0
-        self.consecutive_failure_limit = self.user_params.get(
-            'consecutive_failure_limit', -1)
-        self.size_limit_reached = False
-        self.retryable_exceptions = signals.TestFailure
-
-    def _import_builtin_controllers(self):
-        """Import built-in controller modules.
-
-        Go through the testbed configs, find any built-in controller configs
-        and import the corresponding controller module from antlion.controllers
-        package.
-
-        Returns:
-            A list of controller modules.
-        """
-        builtin_controllers = []
-        for ctrl_name in keys.Config.builtin_controller_names.value:
-            if ctrl_name in self.controller_configs:
-                module_name = keys.get_module_name(ctrl_name)
-                module = importlib.import_module("antlion.controllers.%s" %
-                                                 module_name)
-                builtin_controllers.append(module)
-        return builtin_controllers
-
-    def __handle_file_user_params(self):
-        """For backwards compatibility, moves all contents of the "files" dict
-        into the root level of user_params.
-
-        This allows existing tests to run with the new Mobly-style format
-        without needing to make changes.
-        """
-        for key, value in self.user_params.items():
-            if key.endswith('files') and isinstance(value, dict):
-                new_user_params = dict(value)
-                new_user_params.update(self.user_params)
-                self.user_params = new_user_params
-                break
-
-    @staticmethod
-    def get_module_reference_name(a_module):
-        """Returns the module's reference name.
-
-        This is largely for backwards compatibility with log parsing. If the
-        module defines ACTS_CONTROLLER_REFERENCE_NAME, it will return that
-        value, or the module's submodule name.
-
-        Args:
-            a_module: Any module. Ideally, a controller module.
-        Returns:
-            A string corresponding to the module's name.
-        """
-        if hasattr(a_module, 'ACTS_CONTROLLER_REFERENCE_NAME'):
-            return a_module.ACTS_CONTROLLER_REFERENCE_NAME
-        else:
-            return a_module.__name__.split('.')[-1]
-
-    def register_controller(self,
-                            controller_module,
-                            required=True,
-                            builtin=False):
-        """Registers an ACTS controller module for a test class. Invokes Mobly's
-        implementation of register_controller.
-
-        An ACTS controller module is a Python lib that can be used to control
-        a device, service, or equipment. To be ACTS compatible, a controller
-        module needs to have the following members:
-
-            def create(configs):
-                [Required] Creates controller objects from configurations.
-                Args:
-                    configs: A list of serialized data like string/dict. Each
-                             element of the list is a configuration for a
-                             controller object.
-                Returns:
-                    A list of objects.
-
-            def destroy(objects):
-                [Required] Destroys controller objects created by the create
-                function. Each controller object shall be properly cleaned up
-                and all the resources held should be released, e.g. memory
-                allocation, sockets, file handlers etc.
-                Args:
-                    A list of controller objects created by the create function.
-
-            def get_info(objects):
-                [Optional] Gets info from the controller objects used in a test
-                run. The info will be included in test_result_summary.json under
-                the key "ControllerInfo". Such information could include unique
-                ID, version, or anything that could be useful for describing the
-                test bed and debugging.
-                Args:
-                    objects: A list of controller objects created by the create
-                             function.
-                Returns:
-                    A list of json serializable objects, each represents the
-                    info of a controller object. The order of the info object
-                    should follow that of the input objects.
-        Registering a controller module declares a test class's dependency the
-        controller. If the module config exists and the module matches the
-        controller interface, controller objects will be instantiated with
-        corresponding configs. The module should be imported first.
-
-        Args:
-            controller_module: A module that follows the controller module
-                interface.
-            required: A bool. If True, failing to register the specified
-                controller module raises exceptions. If False, returns None upon
-                failures.
-            builtin: Specifies that the module is a builtin controller module in
-                ACTS. If true, adds itself to test attributes.
-        Returns:
-            A list of controller objects instantiated from controller_module, or
-            None.
-
-        Raises:
-            When required is True, ControllerError is raised if no corresponding
-            config can be found.
-            Regardless of the value of "required", ControllerError is raised if
-            the controller module has already been registered or any other error
-            occurred in the registration process.
-        """
-        module_ref_name = self.get_module_reference_name(controller_module)
-        module_config_name = controller_module.MOBLY_CONTROLLER_CONFIG_NAME
-
-        # Get controller objects from Mobly's register_controller
-        controllers = self._controller_manager.register_controller(
-            controller_module, required=required)
-        if not controllers:
-            return None
-
-        # Log controller information
-        # Implementation of "get_info" is optional for a controller module.
-        if hasattr(controller_module, "get_info"):
-            controller_info = controller_module.get_info(controllers)
-            self.log.info("Controller %s: %s", module_config_name,
-                          controller_info)
-
-        if builtin:
-            setattr(self, module_ref_name, controllers)
-        return controllers
-
-    def _setup_class(self):
-        """Proxy function to guarantee the base implementation of setup_class
-        is called.
-        """
-        event_bus.post(TestClassBeginEvent(self))
-        # Import and register the built-in controller modules specified
-        # in testbed config.
-        for module in self._import_builtin_controllers():
-            self.register_controller(module, builtin=True)
-        return self.setup_class()
-
-    def _teardown_class(self):
-        """Proxy function to guarantee the base implementation of teardown_class
-        is called.
-        """
-        super()._teardown_class()
-        event_bus.post(TestClassEndEvent(self, self.results))
-
-    def _setup_test(self, test_name):
-        """Proxy function to guarantee the base implementation of setup_test is
-        called.
-        """
-        self.current_test_name = test_name
-
-        # Skip the test if the consecutive test case failure limit is reached.
-        if self.consecutive_failures == self.consecutive_failure_limit:
-            raise signals.TestError('Consecutive test failure')
-
-        return self.setup_test()
-
-    def setup_test(self):
-        """Setup function that will be called every time before executing each
-        test case in the test class.
-
-        To signal setup failure, return False or raise an exception. If
-        exceptions were raised, the stack trace would appear in log, but the
-        exceptions would not propagate to upper levels.
-
-        Implementation is optional.
-        """
-        return True
-
-    def _teardown_test(self, test_name):
-        """Proxy function to guarantee the base implementation of teardown_test
-        is called.
-        """
-        self.log.debug('Tearing down test %s' % test_name)
-        self.teardown_test()
-
-    def _on_fail(self, record):
-        """Proxy function to guarantee the base implementation of on_fail is
-        called.
-
-        Args:
-            record: The records.TestResultRecord object for the failed test
-                    case.
-        """
-        self.consecutive_failures += 1
-        if record.details:
-            self.log.error(record.details)
-        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
-        self.on_fail(record.test_name, record.begin_time)
-
-    def on_fail(self, test_name, begin_time):
-        """A function that is executed upon a test case failure.
-
-        User implementation is optional.
-
-        Args:
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-
-    def _on_pass(self, record):
-        """Proxy function to guarantee the base implementation of on_pass is
-        called.
-
-        Args:
-            record: The records.TestResultRecord object for the passed test
-                    case.
-        """
-        self.consecutive_failures = 0
-        msg = record.details
-        if msg:
-            self.log.info(msg)
-        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
-        self.on_pass(record.test_name, record.begin_time)
-
-    def on_pass(self, test_name, begin_time):
-        """A function that is executed upon a test case passing.
-
-        Implementation is optional.
-
-        Args:
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-
-    def _on_skip(self, record):
-        """Proxy function to guarantee the base implementation of on_skip is
-        called.
-
-        Args:
-            record: The records.TestResultRecord object for the skipped test
-                    case.
-        """
-        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
-        self.log.info("Reason to skip: %s", record.details)
-        self.on_skip(record.test_name, record.begin_time)
-
-    def on_skip(self, test_name, begin_time):
-        """A function that is executed upon a test case being skipped.
-
-        Implementation is optional.
-
-        Args:
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-
-    def _on_exception(self, record):
-        """Proxy function to guarantee the base implementation of on_exception
-        is called.
-
-        Args:
-            record: The records.TestResultRecord object for the failed test
-                    case.
-        """
-        self.log.exception(record.details)
-        self.on_exception(record.test_name, record.begin_time)
-
-    def on_exception(self, test_name, begin_time):
-        """A function that is executed upon an unhandled exception from a test
-        case.
-
-        Implementation is optional.
-
-        Args:
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-
-    def on_retry(self):
-        """Function to run before retrying a test through get_func_with_retry.
-
-        This function runs when a test is automatically retried. The function
-        can be used to modify internal test parameters, for example, to retry
-        a test with slightly different input variables.
-        """
-
-    def _exec_procedure_func(self, func, tr_record):
-        """Executes a procedure function like on_pass, on_fail etc.
-
-        This function will alternate the 'Result' of the test's record if
-        exceptions happened when executing the procedure function.
-
-        This will let signals.TestAbortAll through so abort_all works in all
-        procedure functions.
-
-        Args:
-            func: The procedure function to be executed.
-            tr_record: The TestResultRecord object associated with the test
-                       case executed.
-        """
-        try:
-            func(tr_record)
-        except signals.TestAbortAll:
-            raise
-        except Exception as e:
-            self.log.exception("Exception happened when executing %s for %s.",
-                               func.__name__, self.current_test_name)
-            tr_record.add_error(func.__name__, e)
-
-    def exec_one_testcase(self, test_name, test_func):
-        """Executes one test case and update test results.
-
-        Executes one test case, create a records.TestResultRecord object with
-        the execution information, and add the record to the test class's test
-        results.
-
-        Args:
-            test_name: Name of the test.
-            test_func: The test function.
-        """
-        class_name = self.__class__.__name__
-        tr_record = records.TestResultRecord(test_name, class_name)
-        tr_record.test_begin()
-        self.begin_time = int(tr_record.begin_time)
-        self.log_begin_time = tr_record.log_begin_time
-        self.test_name = tr_record.test_name
-        event_bus.post(TestCaseBeginEvent(self, self.test_name))
-        self.log.info("%s %s", TEST_CASE_TOKEN, test_name)
-
-        # Enable test retry if specified in the ACTS config
-        retry_tests = self.user_params.get('retry_tests', [])
-        full_test_name = '%s.%s' % (class_name, self.test_name)
-        if any(name in retry_tests for name in [class_name, full_test_name]):
-            test_func = self.get_func_with_retry(test_func)
-
-        verdict = None
-        test_signal = None
-        try:
-            try:
-                ret = self._setup_test(self.test_name)
-                asserts.assert_true(ret is not False,
-                                    "Setup for %s failed." % test_name)
-                verdict = test_func()
-            finally:
-                try:
-                    self._teardown_test(self.test_name)
-                except signals.TestAbortAll:
-                    raise
-                except Exception as e:
-                    self.log.error(traceback.format_exc())
-                    tr_record.add_error("teardown_test", e)
-        except (signals.TestFailure, AssertionError) as e:
-            test_signal = e
-            if self.user_params.get(
-                    keys.Config.key_test_failure_tracebacks.value, False):
-                self.log.exception(e)
-            tr_record.test_fail(e)
-        except signals.TestSkip as e:
-            # Test skipped.
-            test_signal = e
-            tr_record.test_skip(e)
-        except (signals.TestAbortClass, signals.TestAbortAll) as e:
-            # Abort signals, pass along.
-            test_signal = e
-            tr_record.test_fail(e)
-            raise e
-        except signals.TestPass as e:
-            # Explicit test pass.
-            test_signal = e
-            tr_record.test_pass(e)
-        except Exception as e:
-            test_signal = e
-            self.log.error(traceback.format_exc())
-            # Exception happened during test.
-            tr_record.test_error(e)
-        else:
-            if verdict or (verdict is None):
-                # Test passed.
-                tr_record.test_pass()
-                return
-            tr_record.test_fail()
-        finally:
-            tr_record.update_record()
-            try:
-                # Execute post-test procedures
-                result = tr_record.result
-                if result == records.TestResultEnums.TEST_RESULT_PASS:
-                    self._exec_procedure_func(self._on_pass, tr_record)
-                elif result == records.TestResultEnums.TEST_RESULT_FAIL:
-                    self._exec_procedure_func(self._on_fail, tr_record)
-                elif result == records.TestResultEnums.TEST_RESULT_SKIP:
-                    self._exec_procedure_func(self._on_skip, tr_record)
-                elif result == records.TestResultEnums.TEST_RESULT_ERROR:
-                    self._exec_procedure_func(self._on_exception, tr_record)
-                    self._exec_procedure_func(self._on_fail, tr_record)
-            finally:
-                self.results.add_record(tr_record)
-                self.summary_writer.dump(tr_record.to_dict(),
-                                         records.TestSummaryEntryType.RECORD)
-                self.current_test_name = None
-                event_bus.post(
-                    TestCaseEndEvent(self, self.test_name, test_signal))
-
-    def get_func_with_retry(self, func, attempts=2):
-        """Returns a wrapped test method that re-runs after failure. Return test
-        result upon success. If attempt limit reached, collect all failure
-        messages and raise a TestFailure signal.
-
-        Params:
-            func: The test method
-            attempts: Number of attempts to run test
-
-        Returns: result of the test method
-        """
-        exceptions = self.retryable_exceptions
-
-        def wrapper(*args, **kwargs):
-            error_msgs = []
-            extras = {}
-            retry = False
-            for i in range(attempts):
-                try:
-                    if retry:
-                        self.teardown_test()
-                        self.setup_test()
-                        self.on_retry()
-                    return func(*args, **kwargs)
-                except exceptions as e:
-                    retry = True
-                    msg = 'Failure on attempt %d: %s' % (i + 1, e.details)
-                    self.log.warning(msg)
-                    error_msgs.append(msg)
-                    if e.extras:
-                        extras['Attempt %d' % (i + 1)] = e.extras
-            raise signals.TestFailure('\n'.join(error_msgs), extras)
-
-        return wrapper
-
-    def run_generated_testcases(self,
-                                test_func,
-                                settings,
-                                args=None,
-                                kwargs=None,
-                                tag="",
-                                name_func=None,
-                                format_args=False):
-        """Deprecated. Please use pre_run and generate_tests.
-
-        Generated test cases are not written down as functions, but as a list
-        of parameter sets. This way we reduce code repetition and improve
-        test case scalability.
-
-        Args:
-            test_func: The common logic shared by all these generated test
-                       cases. This function should take at least one argument,
-                       which is a parameter set.
-            settings: A list of strings representing parameter sets. These are
-                      usually json strings that get loaded in the test_func.
-            args: Iterable of additional position args to be passed to
-                  test_func.
-            kwargs: Dict of additional keyword args to be passed to test_func
-            tag: Name of this group of generated test cases. Ignored if
-                 name_func is provided and operates properly.
-            name_func: A function that takes a test setting and generates a
-                       proper test name. The test name should be shorter than
-                       utils.MAX_FILENAME_LEN. Names over the limit will be
-                       truncated.
-            format_args: If True, args will be appended as the first argument
-                         in the args list passed to test_func.
-
-        Returns:
-            A list of settings that did not pass.
-        """
-        args = args or ()
-        kwargs = kwargs or {}
-        failed_settings = []
-
-        for setting in settings:
-            test_name = "{} {}".format(tag, setting)
-
-            if name_func:
-                try:
-                    test_name = name_func(setting, *args, **kwargs)
-                except:
-                    self.log.exception(("Failed to get test name from "
-                                        "test_func. Fall back to default %s"),
-                                       test_name)
-
-            self.results.requested.append(test_name)
-
-            if len(test_name) > utils.MAX_FILENAME_LEN:
-                test_name = test_name[:utils.MAX_FILENAME_LEN]
-
-            previous_success_cnt = len(self.results.passed)
-
-            if format_args:
-                self.exec_one_testcase(
-                    test_name,
-                    functools.partial(test_func, *(args + (setting, )),
-                                      **kwargs))
-            else:
-                self.exec_one_testcase(
-                    test_name,
-                    functools.partial(test_func, *((setting, ) + args),
-                                      **kwargs))
-
-            if len(self.results.passed) - previous_success_cnt != 1:
-                failed_settings.append(setting)
-
-        return failed_settings
-
-    def _exec_func(self, func, *args):
-        """Executes a function with exception safeguard.
-
-        This will let signals.TestAbortAll through so abort_all works in all
-        procedure functions.
-
-        Args:
-            func: Function to be executed.
-            args: Arguments to be passed to the function.
-
-        Returns:
-            Whatever the function returns, or False if unhandled exception
-            occured.
-        """
-        try:
-            return func(*args)
-        except signals.TestAbortAll:
-            raise
-        except:
-            self.log.exception("Exception happened when executing %s in %s.",
-                               func.__name__, self.TAG)
-            return False
-
-    def _block_all_test_cases(self, tests, reason='Failed class setup'):
-        """
-        Block all passed in test cases.
-        Args:
-            tests: The tests to block.
-            reason: Message describing the reason that the tests are blocked.
-                Default is 'Failed class setup'
-        """
-        for test_name, test_func in tests:
-            signal = signals.TestError(reason)
-            record = records.TestResultRecord(test_name, self.TAG)
-            record.test_begin()
-            if hasattr(test_func, 'gather'):
-                signal.extras = test_func.gather()
-            record.test_error(signal)
-            self.results.add_record(record)
-            self.summary_writer.dump(record.to_dict(),
-                                     records.TestSummaryEntryType.RECORD)
-            self._on_skip(record)
-
-    def run(self, test_names=None):
-        """Runs test cases within a test class by the order they appear in the
-        execution list.
-
-        One of these test cases lists will be executed, shown here in priority
-        order:
-        1. The test_names list, which is passed from cmd line.
-        2. The self.tests list defined in test class. Invalid names are
-           ignored.
-        3. All function that matches test case naming convention in the test
-           class.
-
-        Args:
-            test_names: A list of string that are test case names/patterns
-             requested in cmd line.
-
-        Returns:
-            The test results object of this class.
-        """
-        # Executes pre-setup procedures, like generating test methods.
-        if not self._pre_run():
-            return self.results
-
-        self.register_test_class_event_subscriptions()
-        self.log.info("==========> %s <==========", self.TAG)
-        # Devise the actual test cases to run in the test class.
-        if self.tests:
-            # Specified by run list in class.
-            valid_tests = list(self.tests)
-        else:
-            # No test case specified by user, gather the run list automatically.
-            valid_tests = self.get_existing_test_names()
-        if test_names:
-            # Match test cases with any of the user-specified patterns
-            matches = []
-            for test_name in test_names:
-                for valid_test in valid_tests:
-                    if (fnmatch.fnmatch(valid_test, test_name)
-                            and valid_test not in matches):
-                        matches.append(valid_test)
-        else:
-            matches = valid_tests
-        self.results.requested = matches
-        self.summary_writer.dump(self.results.requested_test_names_dict(),
-                                 records.TestSummaryEntryType.TEST_NAME_LIST)
-        tests = self._get_test_methods(matches)
-
-        # Setup for the class.
-        setup_fail = False
-        try:
-            if self._setup_class() is False:
-                self.log.error("Failed to setup %s.", self.TAG)
-                self._block_all_test_cases(tests)
-                setup_fail = True
-        except signals.TestAbortClass:
-            self.log.exception('Test class %s aborted' % self.TAG)
-            setup_fail = True
-        except Exception as e:
-            self.log.exception("Failed to setup %s.", self.TAG)
-            self._block_all_test_cases(tests)
-            setup_fail = True
-        if setup_fail:
-            self._exec_func(self._teardown_class)
-            self.log.info("Summary for test class %s: %s", self.TAG,
-                          self.results.summary_str())
-            return self.results
-
-        # Run tests in order.
-        test_case_iterations = self.user_params.get(
-            keys.Config.key_test_case_iterations.value, 1)
-        if any([
-                substr in self.__class__.__name__
-                for substr in ['Preflight', 'Postflight']
-        ]):
-            test_case_iterations = 1
-        try:
-            for test_name, test_func in tests:
-                for _ in range(test_case_iterations):
-                    self.exec_one_testcase(test_name, test_func)
-            return self.results
-        except signals.TestAbortClass:
-            self.log.exception('Test class %s aborted' % self.TAG)
-            return self.results
-        except signals.TestAbortAll as e:
-            # Piggy-back test results on this exception object so we don't lose
-            # results from this test class.
-            setattr(e, "results", self.results)
-            raise e
-        finally:
-            self._exec_func(self._teardown_class)
-            self.log.info("Summary for test class %s: %s", self.TAG,
-                          self.results.summary_str())
-
-    def _ad_take_bugreport(self, ad, test_name, begin_time):
-        for i in range(3):
-            try:
-                ad.take_bug_report(test_name, begin_time)
-                return True
-            except Exception as e:
-                ad.log.error("bugreport attempt %s error: %s", i + 1, e)
-
-    def _ad_take_extra_logs(self, ad, test_name, begin_time):
-        result = True
-        if getattr(ad, "qxdm_log", False):
-            # Gather qxdm log modified 3 minutes earlier than test start time
-            if begin_time:
-                qxdm_begin_time = begin_time - 1000 * 60 * 3
-            else:
-                qxdm_begin_time = None
-            try:
-                ad.get_qxdm_logs(test_name, qxdm_begin_time)
-            except Exception as e:
-                ad.log.error("Failed to get QXDM log for %s with error %s",
-                             test_name, e)
-                result = False
-
-        try:
-            ad.check_crash_report(test_name, begin_time, log_crash_report=True)
-        except Exception as e:
-            ad.log.error("Failed to check crash report for %s with error %s",
-                         test_name, e)
-            result = False
-        return result
-
-    def _skip_bug_report(self, test_name):
-        """A function to check whether we should skip creating a bug report.
-
-        Args:
-            test_name: The test case name
-
-        Returns: True if bug report is to be skipped.
-        """
-        if "no_bug_report_on_fail" in self.user_params:
-            return True
-
-        # If the current test class or test case is found in the set of
-        # problematic tests, we skip bugreport and other failure artifact
-        # creation.
-        class_name = self.__class__.__name__
-        quiet_tests = self.user_params.get('quiet_tests', [])
-        if class_name in quiet_tests:
-            self.log.info(
-                "Skipping bug report, as directed for this test class.")
-            return True
-        full_test_name = '%s.%s' % (class_name, test_name)
-        if full_test_name in quiet_tests:
-            self.log.info(
-                "Skipping bug report, as directed for this test case.")
-            return True
-
-        # Once we hit a certain log path size, it's not going to get smaller.
-        # We cache the result so we don't have to keep doing directory walks.
-        if self.size_limit_reached:
-            return True
-        try:
-            max_log_size = int(
-                self.user_params.get("soft_output_size_limit") or "invalid")
-            log_path = getattr(logging, "log_path", None)
-            if log_path:
-                curr_log_size = utils.get_directory_size(log_path)
-                if curr_log_size > max_log_size:
-                    self.log.info(
-                        "Skipping bug report, as we've reached the size limit."
-                    )
-                    self.size_limit_reached = True
-                    return True
-        except ValueError:
-            pass
-        return False
-
-    def _take_bug_report(self, test_name, begin_time):
-        if self._skip_bug_report(test_name):
-            return
-
-        executor = ThreadPoolExecutor(max_workers=10)
-        for ad in getattr(self, 'android_devices', []):
-            executor.submit(self._ad_take_bugreport, ad, test_name, begin_time)
-            executor.submit(self._ad_take_extra_logs, ad, test_name,
-                            begin_time)
-        executor.shutdown()
-
-    def _reboot_device(self, ad):
-        ad.log.info("Rebooting device.")
-        ad = ad.reboot()
-
-    def _cleanup_logger_sessions(self):
-        for (mylogger, session) in self.logger_sessions:
-            self.log.info("Resetting a diagnostic session %s, %s", mylogger,
-                          session)
-            mylogger.reset()
-        self.logger_sessions = []
-
-    def _pull_diag_logs(self, test_name, begin_time):
-        for (mylogger, session) in self.logger_sessions:
-            self.log.info("Pulling diagnostic session %s", mylogger)
-            mylogger.stop(session)
-            diag_path = os.path.join(
-                self.log_path, logger.epoch_to_log_line_timestamp(begin_time))
-            os.makedirs(diag_path, exist_ok=True)
-            mylogger.pull(session, diag_path)
-
-    def register_test_class_event_subscriptions(self):
-        self.class_subscriptions = subscription_bundle.create_from_instance(
-            self)
-        self.class_subscriptions.register()
-
-    def unregister_test_class_event_subscriptions(self):
-        for package in self.all_subscriptions:
-            package.unregister()
diff --git a/src/antlion/bin/__init__.py b/src/antlion/bin/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/bin/__init__.py
+++ /dev/null
diff --git a/src/antlion/bin/act.py b/src/antlion/bin/act.py
deleted file mode 100755
index 81d0452..0000000
--- a/src/antlion/bin/act.py
+++ /dev/null
@@ -1,236 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import os
-import re
-import signal
-import sys
-import traceback
-
-from mobly import config_parser as mobly_config_parser
-
-from antlion import config_parser
-from antlion import keys
-from antlion import signals
-from antlion import test_runner
-from antlion import utils
-from antlion.config_parser import ActsConfigError
-
-
-def _run_test(parsed_config, test_identifiers, repeat=1):
-    """Instantiate and runs test_runner.TestRunner.
-
-    This is the function to start separate processes with.
-
-    Args:
-        parsed_config: A mobly.config_parser.TestRunConfig that is a set of
-                       configs for one test_runner.TestRunner.
-        test_identifiers: A list of tuples, each identifies what test case to
-                          run on what test class.
-        repeat: Number of times to iterate the specified tests.
-
-    Returns:
-        True if all tests passed without any error, False otherwise.
-    """
-    runner = _create_test_runner(parsed_config, test_identifiers)
-    try:
-        for i in range(repeat):
-            runner.run()
-        return runner.results.is_all_pass
-    except signals.TestAbortAll:
-        return True
-    except:
-        print("Exception when executing %s, iteration %s." %
-              (runner.testbed_name, i))
-        print(traceback.format_exc())
-    finally:
-        runner.stop()
-
-
-def _create_test_runner(parsed_config, test_identifiers):
-    """Instantiates one test_runner.TestRunner object and register termination
-    signal handlers that properly shut down the test_runner.TestRunner run.
-
-    Args:
-        parsed_config: A mobly.config_parser.TestRunConfig that is a set of
-                       configs for one test_runner.TestRunner.
-        test_identifiers: A list of tuples, each identifies what test case to
-                          run on what test class.
-
-    Returns:
-        A test_runner.TestRunner object.
-    """
-    try:
-        t = test_runner.TestRunner(parsed_config, test_identifiers)
-    except:
-        print("Failed to instantiate test runner, abort.")
-        print(traceback.format_exc())
-        sys.exit(1)
-    # Register handler for termination signals.
-    handler = config_parser.gen_term_signal_handler([t])
-    signal.signal(signal.SIGTERM, handler)
-    signal.signal(signal.SIGINT, handler)
-    return t
-
-
-def _run_tests(parsed_configs, test_identifiers, repeat):
-    """Executes requested tests sequentially.
-
-    Requested test runs will commence one after another according to the order
-    of their corresponding configs.
-
-    Args:
-        parsed_configs: A list of mobly.config_parser.TestRunConfig, each is a
-                        set of configs for one test_runner.TestRunner.
-        test_identifiers: A list of tuples, each identifies what test case to
-                          run on what test class.
-        repeat: Number of times to iterate the specified tests.
-
-    Returns:
-        True if all test runs executed successfully, False otherwise.
-    """
-    ok = True
-    for c in parsed_configs:
-        try:
-            ret = _run_test(c, test_identifiers, repeat)
-            ok = ok and ret
-        except Exception as e:
-            print("Exception occurred when executing test bed %s. %s" %
-                  (c.testbed_name, e))
-    return ok
-
-
-def main():
-    """This is the default implementation of a cli entry point for ACTS test
-    execution.
-
-    Or you could implement your own cli entry point using acts.config_parser
-    functions and acts.test_runner.execute_one_test_class.
-    """
-    parser = argparse.ArgumentParser(
-        description=("Specify tests to run. If nothing specified, "
-                     "run all test cases found."))
-    parser.add_argument('-c',
-                        '--config',
-                        type=str,
-                        required=True,
-                        metavar="<PATH>",
-                        help="Path to the test configuration file.")
-    parser.add_argument(
-        '-ci',
-        '--campaign_iterations',
-        metavar="<CAMPAIGN_ITERATIONS>",
-        nargs='?',
-        type=int,
-        const=1,
-        default=1,
-        help="Number of times to run the campaign or a group of test cases.")
-    parser.add_argument('-tb',
-                        '--testbed',
-                        nargs='+',
-                        type=str,
-                        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
-                        help="Specify which test beds to run tests on.")
-    parser.add_argument('-lp',
-                        '--logpath',
-                        type=str,
-                        metavar="<PATH>",
-                        help="Root path under which all logs will be placed.")
-    parser.add_argument(
-        '-tp',
-        '--testpaths',
-        nargs='*',
-        type=str,
-        metavar="<PATH> <PATH>",
-        help="One or more non-recursive test class search paths.")
-
-    group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument('-tc',
-                       '--testclass',
-                       nargs='+',
-                       type=str,
-                       metavar="[TestClass1 TestClass2:test_xxx ...]",
-                       help="A list of test classes/cases to run.")
-    group.add_argument(
-        '-tf',
-        '--testfile',
-        nargs=1,
-        type=str,
-        metavar="<PATH>",
-        help=("Path to a file containing a comma delimited list of test "
-              "classes to run."))
-    parser.add_argument('-ti',
-                        '--test_case_iterations',
-                        metavar="<TEST_CASE_ITERATIONS>",
-                        nargs='?',
-                        type=int,
-                        help="Number of times to run every test case.")
-
-    args = parser.parse_args(sys.argv[1:])
-    test_list = None
-    if args.testfile:
-        test_list = config_parser.parse_test_file(args.testfile[0])
-    elif args.testclass:
-        test_list = args.testclass
-    if re.search(r'\.ya?ml$', args.config):
-        parsed_configs = mobly_config_parser.load_test_config_file(
-            args.config, args.testbed)
-    else:
-        parsed_configs = config_parser.load_test_config_file(
-            args.config, args.testbed)
-
-    for test_run_config in parsed_configs:
-        if args.testpaths:
-            tp_key = keys.Config.key_test_paths.value
-            test_run_config.controller_configs[tp_key] = args.testpaths
-        if args.logpath:
-            test_run_config.log_path = args.logpath
-        if args.test_case_iterations:
-            ti_key = keys.Config.key_test_case_iterations.value
-            test_run_config.user_params[ti_key] = args.test_case_iterations
-
-        # Sets the --testpaths flag to the default test directory if left unset.
-        testpath_key = keys.Config.key_test_paths.value
-        if (testpath_key not in test_run_config.controller_configs
-                or test_run_config.controller_configs[testpath_key] is None):
-            test_run_config.controller_configs[testpath_key] = [
-                os.path.join(os.path.dirname(__file__), '../tests/'),
-            ]
-
-        for path in test_run_config.controller_configs[testpath_key]:
-            path = utils.abs_path(path)
-
-        # TODO(markdr): Find a way to merge this with the validation done in
-        # Mobly's load_test_config_file.
-        if not test_run_config.log_path:
-            raise ActsConfigError("Required key %s missing in test config." %
-                                  keys.Config.key_log_path.value)
-        test_run_config.log_path = utils.abs_path(test_run_config.log_path)
-
-    # Prepare args for test runs
-    test_identifiers = config_parser.parse_test_list(test_list)
-
-    exec_result = _run_tests(parsed_configs, test_identifiers,
-                             args.campaign_iterations)
-    if exec_result is False:
-        # return 1 upon test failure.
-        sys.exit(1)
-    sys.exit(0)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/src/antlion/config_parser.py b/src/antlion/config_parser.py
deleted file mode 100755
index 0cfb308..0000000
--- a/src/antlion/config_parser.py
+++ /dev/null
@@ -1,279 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import itertools
-import os
-import sys
-
-import mobly.config_parser as mobly_config_parser
-
-from antlion import keys
-from antlion import utils
-
-# An environment variable defining the base location for ACTS logs.
-_ENV_ACTS_LOGPATH = 'ACTS_LOGPATH'
-# An environment variable that enables test case failures to log stack traces.
-_ENV_TEST_FAILURE_TRACEBACKS = 'ACTS_TEST_FAILURE_TRACEBACKS'
-# An environment variable defining the test search paths for ACTS.
-_ENV_ACTS_TESTPATHS = 'ACTS_TESTPATHS'
-_PATH_SEPARATOR = ':'
-
-
-class ActsConfigError(Exception):
-    """Raised when there is a problem in test configuration file."""
-
-
-def _validate_test_config(test_config):
-    """Validates the raw configuration loaded from the config file.
-
-    Making sure all the required fields exist.
-    """
-    for k in keys.Config.reserved_keys.value:
-        # TODO(markdr): Remove this continue after merging this with the
-        # validation done in Mobly's load_test_config_file.
-        if (k == keys.Config.key_test_paths.value
-                or k == keys.Config.key_log_path.value):
-            continue
-
-        if k not in test_config:
-            raise ActsConfigError("Required key %s missing in test config." %
-                                  k)
-
-
-def _validate_testbed_name(name):
-    """Validates the name of a test bed.
-
-    Since test bed names are used as part of the test run id, it needs to meet
-    certain requirements.
-
-    Args:
-        name: The test bed's name specified in config file.
-
-    Raises:
-        If the name does not meet any criteria, ActsConfigError is raised.
-    """
-    if not name:
-        raise ActsConfigError("Test bed names can't be empty.")
-    if not isinstance(name, str):
-        raise ActsConfigError("Test bed names have to be string.")
-    for l in name:
-        if l not in utils.valid_filename_chars:
-            raise ActsConfigError(
-                "Char '%s' is not allowed in test bed names." % l)
-
-
-def _update_file_paths(config, config_path):
-    """ Checks if the path entries are valid.
-
-    If the file path is invalid, assume it is a relative path and append
-    that to the config file path.
-
-    Args:
-        config : the config object to verify.
-        config_path : The path to the config file, which can be used to
-                      generate absolute paths from relative paths in configs.
-
-    Raises:
-        If the file path is invalid, ActsConfigError is raised.
-    """
-    # Check the file_path_keys and update if it is a relative path.
-    for file_path_key in keys.Config.file_path_keys.value:
-        if file_path_key in config:
-            config_file = config[file_path_key]
-            if type(config_file) is str:
-                if not os.path.isfile(config_file):
-                    config_file = os.path.join(config_path, config_file)
-                if not os.path.isfile(config_file):
-                    raise ActsConfigError(
-                        "Unable to load config %s from test "
-                        "config file.", config_file)
-                config[file_path_key] = config_file
-
-
-def _validate_testbed_configs(testbed_configs, config_path):
-    """Validates the testbed configurations.
-
-    Args:
-        testbed_configs: A list of testbed configuration json objects.
-        config_path : The path to the config file, which can be used to
-                      generate absolute paths from relative paths in configs.
-
-    Raises:
-        If any part of the configuration is invalid, ActsConfigError is raised.
-    """
-    # Cross checks testbed configs for resource conflicts.
-    for name, config in testbed_configs.items():
-        _update_file_paths(config, config_path)
-        _validate_testbed_name(name)
-
-
-def gen_term_signal_handler(test_runners):
-    def termination_sig_handler(signal_num, frame):
-        print('Received sigterm %s.' % signal_num)
-        for t in test_runners:
-            t.stop()
-        sys.exit(1)
-
-    return termination_sig_handler
-
-
-def _parse_one_test_specifier(item):
-    """Parse one test specifier from command line input.
-
-    Args:
-        item: A string that specifies a test class or test cases in one test
-            class to run.
-
-    Returns:
-        A tuple of a string and a list of strings. The string is the test class
-        name, the list of strings is a list of test case names. The list can be
-        None.
-    """
-    tokens = item.split(':')
-    if len(tokens) > 2:
-        raise ActsConfigError("Syntax error in test specifier %s" % item)
-    if len(tokens) == 1:
-        # This should be considered a test class name
-        test_cls_name = tokens[0]
-        return test_cls_name, None
-    elif len(tokens) == 2:
-        # This should be considered a test class name followed by
-        # a list of test case names.
-        test_cls_name, test_case_names = tokens
-        clean_names = [elem.strip() for elem in test_case_names.split(',')]
-        return test_cls_name, clean_names
-
-
-def parse_test_list(test_list):
-    """Parse user provided test list into internal format for test_runner.
-
-    Args:
-        test_list: A list of test classes/cases.
-    """
-    result = []
-    for elem in test_list:
-        result.append(_parse_one_test_specifier(elem))
-    return result
-
-
-def load_test_config_file(test_config_path, tb_filters=None):
-    """Processes the test configuration file provided by the user.
-
-    Loads the configuration file into a json object, unpacks each testbed
-    config into its own TestRunConfig object, and validate the configuration in
-    the process.
-
-    Args:
-        test_config_path: Path to the test configuration file.
-        tb_filters: A subset of test bed names to be pulled from the config
-                    file. If None, then all test beds will be selected.
-
-    Returns:
-        A list of mobly.config_parser.TestRunConfig objects to be passed to
-        test_runner.TestRunner.
-    """
-    configs = utils.load_config(test_config_path)
-
-    testbeds = configs[keys.Config.key_testbed.value]
-    if type(testbeds) is list:
-        tb_dict = dict()
-        for testbed in testbeds:
-            tb_dict[testbed[keys.Config.key_testbed_name.value]] = testbed
-        testbeds = tb_dict
-    elif type(testbeds) is dict:
-        # For compatibility, make sure the entry name is the same as
-        # the testbed's "name" entry
-        for name, testbed in testbeds.items():
-            testbed[keys.Config.key_testbed_name.value] = name
-
-    if tb_filters:
-        tbs = {}
-        for name in tb_filters:
-            if name in testbeds:
-                tbs[name] = testbeds[name]
-            else:
-                raise ActsConfigError(
-                    'Expected testbed named "%s", but none was found. Check '
-                    'if you have the correct testbed names.' % name)
-        testbeds = tbs
-
-    if (keys.Config.key_log_path.value not in configs
-            and _ENV_ACTS_LOGPATH in os.environ):
-        print('Using environment log path: %s' %
-              (os.environ[_ENV_ACTS_LOGPATH]))
-        configs[keys.Config.key_log_path.value] = os.environ[_ENV_ACTS_LOGPATH]
-    if (keys.Config.key_test_paths.value not in configs
-            and _ENV_ACTS_TESTPATHS in os.environ):
-        print('Using environment test paths: %s' %
-              (os.environ[_ENV_ACTS_TESTPATHS]))
-        configs[keys.Config.key_test_paths.
-                value] = os.environ[_ENV_ACTS_TESTPATHS].split(_PATH_SEPARATOR)
-    if (keys.Config.key_test_failure_tracebacks not in configs
-            and _ENV_TEST_FAILURE_TRACEBACKS in os.environ):
-        configs[keys.Config.key_test_failure_tracebacks.
-                value] = os.environ[_ENV_TEST_FAILURE_TRACEBACKS]
-
-    # TODO: See if there is a better way to do this: b/29836695
-    config_path, _ = os.path.split(utils.abs_path(test_config_path))
-    configs[keys.Config.key_config_path.value] = config_path
-    _validate_test_config(configs)
-    _validate_testbed_configs(testbeds, config_path)
-    # Unpack testbeds into separate json objects.
-    configs.pop(keys.Config.key_testbed.value)
-    test_run_configs = []
-
-    for _, testbed in testbeds.items():
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = testbed[
-            keys.Config.key_testbed_name.value]
-        test_run_config.controller_configs = testbed
-        test_run_config.controller_configs[
-            keys.Config.key_test_paths.value] = configs.get(
-                keys.Config.key_test_paths.value, None)
-        test_run_config.log_path = configs.get(keys.Config.key_log_path.value,
-                                               None)
-        if test_run_config.log_path is not None:
-            test_run_config.log_path = utils.abs_path(test_run_config.log_path)
-
-        user_param_pairs = []
-        for item in itertools.chain(configs.items(), testbed.items()):
-            if item[0] not in keys.Config.reserved_keys.value:
-                user_param_pairs.append(item)
-        test_run_config.user_params = dict(user_param_pairs)
-
-        test_run_configs.append(test_run_config)
-    return test_run_configs
-
-
-def parse_test_file(fpath):
-    """Parses a test file that contains test specifiers.
-
-    Args:
-        fpath: A string that is the path to the test file to parse.
-
-    Returns:
-        A list of strings, each is a test specifier.
-    """
-    with open(fpath, 'r') as f:
-        tf = []
-        for line in f:
-            line = line.strip()
-            if not line:
-                continue
-            if len(tf) and (tf[-1].endswith(':') or tf[-1].endswith(',')):
-                tf[-1] += line
-            else:
-                tf.append(line)
-        return tf
diff --git a/src/antlion/context.py b/src/antlion/context.py
deleted file mode 100644
index 5fe1417..0000000
--- a/src/antlion/context.py
+++ /dev/null
@@ -1,354 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import logging
-import os
-
-from antlion.event import event_bus
-from antlion.event.event import Event
-from antlion.event.event import TestCaseBeginEvent
-from antlion.event.event import TestCaseEndEvent
-from antlion.event.event import TestCaseEvent
-from antlion.event.event import TestClassBeginEvent
-from antlion.event.event import TestClassEndEvent
-from antlion.event.event import TestClassEvent
-
-
-class ContextLevel(enum.IntEnum):
-    ROOT = 0
-    TESTCLASS = 1
-    TESTCASE = 2
-
-
-def get_current_context(depth=None):
-    """Get the current test context at the specified depth.
-    Pulls the most recently created context, with a level at or below the given
-    depth, from the _contexts stack.
-
-    Args:
-        depth: The desired context level. For example, the TESTCLASS level would
-            yield the current test class context, even if the test is currently
-            within a test case.
-
-    Returns: An instance of TestContext.
-    """
-    if depth is None:
-        return _contexts[-1]
-    return _contexts[min(depth, len(_contexts)-1)]
-
-
-def get_context_for_event(event):
-    """Creates and returns a TestContext from the given event.
-    A TestClassContext is created for a TestClassEvent, and a TestCaseContext
-    is created for a TestCaseEvent.
-
-    Args:
-        event: An instance of TestCaseEvent or TestClassEvent.
-
-    Returns: An instance of TestContext corresponding to the event.
-
-    Raises: TypeError if event is neither a TestCaseEvent nor TestClassEvent
-    """
-    if isinstance(event, TestCaseEvent):
-        return _get_context_for_test_case_event(event)
-    if isinstance(event, TestClassEvent):
-        return _get_context_for_test_class_event(event)
-    raise TypeError('Unrecognized event type: %s %s', event, event.__class__)
-
-
-def _get_context_for_test_case_event(event):
-    """Generate a TestCaseContext from the given TestCaseEvent."""
-    return TestCaseContext(event.test_class, event.test_case)
-
-
-def _get_context_for_test_class_event(event):
-    """Generate a TestClassContext from the given TestClassEvent."""
-    return TestClassContext(event.test_class)
-
-
-class NewContextEvent(Event):
-    """The event posted when a test context has changed."""
-
-
-class NewTestClassContextEvent(NewContextEvent):
-    """The event posted when the test class context has changed."""
-
-
-class NewTestCaseContextEvent(NewContextEvent):
-    """The event posted when the test case context has changed."""
-
-
-def _update_test_class_context(event):
-    """Pushes a new TestClassContext to the _contexts stack upon a
-    TestClassBeginEvent. Pops the most recent context off the stack upon a
-    TestClassEndEvent. Posts the context change to the event bus.
-
-    Args:
-        event: An instance of TestClassBeginEvent or TestClassEndEvent.
-    """
-    if isinstance(event, TestClassBeginEvent):
-        _contexts.append(_get_context_for_test_class_event(event))
-    if isinstance(event, TestClassEndEvent):
-        if _contexts:
-            _contexts.pop()
-    event_bus.post(NewTestClassContextEvent())
-
-
-def _update_test_case_context(event):
-    """Pushes a new TestCaseContext to the _contexts stack upon a
-    TestCaseBeginEvent. Pops the most recent context off the stack upon a
-    TestCaseEndEvent. Posts the context change to the event bus.
-
-    Args:
-        event: An instance of TestCaseBeginEvent or TestCaseEndEvent.
-    """
-    if isinstance(event, TestCaseBeginEvent):
-        _contexts.append(_get_context_for_test_case_event(event))
-    if isinstance(event, TestCaseEndEvent):
-        if _contexts:
-            _contexts.pop()
-    event_bus.post(NewTestCaseContextEvent())
-
-
-event_bus.register(TestClassEvent, _update_test_class_context)
-event_bus.register(TestCaseBeginEvent, _update_test_case_context, order=-100)
-event_bus.register(TestCaseEndEvent, _update_test_case_context, order=100)
-
-
-class TestContext(object):
-    """An object representing the current context in which a test is executing.
-
-    The context encodes the current state of the test runner with respect to a
-    particular scenario in which code is being executed. For example, if some
-    code is being executed as part of a test case, then the context should
-    encode information about that test case such as its name or enclosing
-    class.
-
-    The subcontext specifies a relative path in which certain outputs,
-    e.g. logcat, should be kept for the given context.
-
-    The full output path is given by
-    <base_output_path>/<context_dir>/<subcontext>.
-
-    Attributes:
-        _base_output_paths: a dictionary mapping a logger's name to its base
-                            output path
-        _subcontexts: a dictionary mapping a logger's name to its
-                      subcontext-level output directory
-    """
-
-    _base_output_paths = {}
-    _subcontexts = {}
-
-    def get_base_output_path(self, log_name=None):
-        """Gets the base output path for this logger.
-
-        The base output path is interpreted as the reporting root for the
-        entire test runner.
-
-        If a path has been added with add_base_output_path, it is returned.
-        Otherwise, a default is determined by _get_default_base_output_path().
-
-        Args:
-            log_name: The name of the logger.
-
-        Returns:
-            The output path.
-        """
-        if log_name in self._base_output_paths:
-            return self._base_output_paths[log_name]
-        return self._get_default_base_output_path()
-
-    @classmethod
-    def add_base_output_path(cls, log_name, base_output_path):
-        """Store the base path for this logger.
-
-        Args:
-            log_name: The name of the logger.
-            base_output_path: The base path of output files for this logger.
-            """
-        cls._base_output_paths[log_name] = base_output_path
-
-    def get_subcontext(self, log_name=None):
-        """Gets the subcontext for this logger.
-
-        The subcontext is interpreted as the directory, relative to the
-        context-level path, where all outputs of the given logger are stored.
-
-        If a path has been added with add_subcontext, it is returned.
-        Otherwise, the empty string is returned.
-
-        Args:
-            log_name: The name of the logger.
-
-        Returns:
-            The output path.
-        """
-        return self._subcontexts.get(log_name, '')
-
-    @classmethod
-    def add_subcontext(cls, log_name, subcontext):
-        """Store the subcontext path for this logger.
-
-        Args:
-            log_name: The name of the logger.
-            subcontext: The relative subcontext path of output files for this
-                        logger.
-        """
-        cls._subcontexts[log_name] = subcontext
-
-    def get_full_output_path(self, log_name=None):
-        """Gets the full output path for this context.
-
-        The full path represents the absolute path to the output directory,
-        as given by <base_output_path>/<context_dir>/<subcontext>
-
-        Args:
-            log_name: The name of the logger. Used to specify the base output
-                      path and the subcontext.
-
-        Returns:
-            The output path.
-        """
-
-        path = os.path.join(self.get_base_output_path(log_name),
-                            self._get_default_context_dir(),
-                            self.get_subcontext(log_name))
-        os.makedirs(path, exist_ok=True)
-        return path
-
-    @property
-    def identifier(self):
-        raise NotImplementedError()
-
-    def _get_default_base_output_path(self):
-        """Gets the default base output path.
-
-        This will attempt to use the ACTS logging path set up in the global
-        logger.
-
-        Returns:
-            The logging path.
-
-        Raises:
-            EnvironmentError: If the ACTS logger has not been initialized.
-        """
-        try:
-            return logging.log_path
-        except AttributeError as e:
-            raise EnvironmentError(
-                'The ACTS logger has not been set up and'
-                ' "base_output_path" has not been set.') from e
-
-    def _get_default_context_dir(self):
-        """Gets the default output directory for this context."""
-        raise NotImplementedError()
-
-
-class RootContext(TestContext):
-    """A TestContext that represents a test run."""
-
-    @property
-    def identifier(self):
-        return 'root'
-
-    def _get_default_context_dir(self):
-        """Gets the default output directory for this context.
-
-        Logs at the root level context are placed directly in the base level
-        directory, so no context-level path exists."""
-        return ''
-
-
-class TestClassContext(TestContext):
-    """A TestContext that represents a test class.
-
-    Attributes:
-        test_class: The test class instance that this context represents.
-    """
-
-    def __init__(self, test_class):
-        """Initializes a TestClassContext for the given test class.
-
-        Args:
-            test_class: A test class object. Must be an instance of the test
-                        class, not the class object itself.
-        """
-        self.test_class = test_class
-
-    @property
-    def test_class_name(self):
-        return self.test_class.__class__.__name__
-
-    @property
-    def identifier(self):
-        return self.test_class_name
-
-    def _get_default_context_dir(self):
-        """Gets the default output directory for this context.
-
-        For TestClassContexts, this will be the name of the test class. This is
-        in line with the ACTS logger itself.
-        """
-        return self.test_class_name
-
-
-class TestCaseContext(TestContext):
-    """A TestContext that represents a test case.
-
-    Attributes:
-        test_case: The string name of the test case.
-        test_class: The test class instance enclosing the test case.
-    """
-
-    def __init__(self, test_class, test_case):
-        """Initializes a TestCaseContext for the given test case.
-
-        Args:
-            test_class: A test class object. Must be an instance of the test
-                        class, not the class object itself.
-            test_case: The string name of the test case.
-        """
-        self.test_class = test_class
-        self.test_case = test_case
-
-    @property
-    def test_case_name(self):
-        return self.test_case
-
-    @property
-    def test_class_name(self):
-        return self.test_class.__class__.__name__
-
-    @property
-    def identifier(self):
-        return '%s.%s' % (self.test_class_name, self.test_case_name)
-
-    def _get_default_context_dir(self):
-        """Gets the default output directory for this context.
-
-        For TestCaseContexts, this will be the name of the test class followed
-        by the name of the test case. This is in line with the ACTS logger
-        itself.
-        """
-        return os.path.join(
-            self.test_class_name,
-            self.test_case_name)
-
-
-# stack for keeping track of the current test context
-_contexts = [RootContext()]
diff --git a/src/antlion/controllers/__init__.py b/src/antlion/controllers/__init__.py
deleted file mode 100644
index 640393c..0000000
--- a/src/antlion/controllers/__init__.py
+++ /dev/null
@@ -1,31 +0,0 @@
-"""Modules under antlion.controllers provide interfaces to hardware/software
-resources that ACTS manages.
-
-Top level controllers module are controller modules that need to be explicitly
-specified by users in test configuration files. Top level controller modules
-should have the following module level functions:
-
-def create(configs, logger):
-    '''Instantiates the controller class with the input configs.
-    Args:
-        configs: A list of dicts each representing config for one controller
-            object.
-        logger: The main logger used in the current test run.
-    Returns:
-        A list of controller objects.
-
-def destroy(objs):
-    '''Destroys a list of controller objects created by the "create" function
-    and releases all the resources.
-
-    Args:
-        objs: A list of controller objects created from this module.
-    '''
-"""
-"""This is a list of all the top level controller modules"""
-__all__ = [
-    "android_device", "attenuator", "bluetooth_pts_device", "monsoon",
-    "access_point", "iperf_server", "packet_sender", "arduino_wifi_dongle",
-    "packet_capture", "fuchsia_device", "pdu", "openwrt_ap", "tigertail",
-    "asus_axe11000_ap"
-]
diff --git a/src/antlion/controllers/abstract_inst.py b/src/antlion/controllers/abstract_inst.py
deleted file mode 100644
index d55c3a5..0000000
--- a/src/antlion/controllers/abstract_inst.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Abstract Instrument Library."""
-
-import socket
-import requests
-from antlion import logger
-
-
-class SocketInstrumentError(Exception):
-    """Abstract Instrument Error Class, via Socket and SCPI."""
-
-    def __init__(self, error, command=None):
-        """Init method for Socket Instrument Error.
-
-        Args:
-            error: Exception error.
-            command: Additional information on command,
-                Type, Str.
-        """
-        super(SocketInstrumentError, self).__init__(error)
-        self._error_code = error
-        self._error_message = self._error_code
-        if command is not None:
-            self._error_message = 'Command {} returned the error: {}.'.format(
-                repr(command), repr(self._error_message))
-
-    def __str__(self):
-        return self._error_message
-
-
-class SocketInstrument(object):
-    """Abstract Instrument Class, via Socket and SCPI."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for Socket Instrument.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        self._socket_timeout = 120
-        self._socket_buffer_size = 1024
-
-        self._ip_addr = ip_addr
-        self._ip_port = ip_port
-
-        self._escseq = '\n'
-        self._codefmt = 'utf-8'
-
-        self._logger = logger.create_tagged_trace_logger(
-            '%s:%s' % (self._ip_addr, self._ip_port))
-
-        self._socket = None
-
-    def _connect_socket(self):
-        """Init and Connect to socket."""
-        try:
-            self._socket = socket.create_connection(
-                (self._ip_addr, self._ip_port), timeout=self._socket_timeout)
-
-            infmsg = 'Opened Socket connection to {}:{} with handle {}.'.format(
-                repr(self._ip_addr), repr(self._ip_port), repr(self._socket))
-            self._logger.debug(infmsg)
-
-        except socket.timeout:
-            errmsg = 'Socket timeout while connecting to instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = 'Socket error while connecting to instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-    def _send(self, cmd):
-        """Send command via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-        """
-        if not self._socket:
-            self._logger.warning('Socket instrument is not connected')
-            self._connect_socket()
-
-        cmd_es = cmd + self._escseq
-
-        try:
-            self._socket.sendall(cmd_es.encode(self._codefmt))
-            self._logger.debug('Sent %r to %r:%r.', cmd, self._ip_addr,
-                               self._ip_port)
-
-        except socket.timeout:
-            errmsg = ('Socket timeout while sending command {} '
-                      'to instrument.').format(repr(cmd))
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = ('Socket error while sending command {} '
-                      'to instrument.').format(repr(cmd))
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except Exception as err:
-            errmsg = ('Error {} while sending command {} '
-                      'to instrument.').format(repr(cmd), repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-    def _recv(self):
-        """Receive response via Socket.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        if not self._socket:
-            self._logger.warning('Socket instrument is not connected')
-            self._connect_socket()
-
-        resp = ''
-
-        try:
-            while True:
-                resp_tmp = self._socket.recv(self._socket_buffer_size)
-                resp_tmp = resp_tmp.decode(self._codefmt)
-                resp += resp_tmp
-                if len(resp_tmp) < self._socket_buffer_size:
-                    break
-
-        except socket.timeout:
-            errmsg = 'Socket timeout while receiving response from instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except socket.error:
-            errmsg = 'Socket error while receiving response from instrument.'
-            self._logger.exception(errmsg)
-            raise SocketInstrumentError(errmsg)
-
-        except Exception as err:
-            errmsg = ('Error {} while receiving response '
-                      'from instrument').format(repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-        resp = resp.rstrip(self._escseq)
-
-        self._logger.debug('Received %r from %r:%r.', resp, self._ip_addr,
-                           self._ip_port)
-
-        return resp
-
-    def _close_socket(self):
-        """Close Socket Instrument."""
-        if not self._socket:
-            return
-
-        try:
-            self._socket.shutdown(socket.SHUT_RDWR)
-            self._socket.close()
-            self._socket = None
-            self._logger.debug('Closed Socket Instrument %r:%r.',
-                               self._ip_addr, self._ip_port)
-
-        except Exception as err:
-            errmsg = 'Error {} while closing instrument.'.format(repr(err))
-            self._logger.exception(errmsg)
-            raise
-
-    def _query(self, cmd):
-        """query instrument via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        self._send(cmd + ';*OPC?')
-        resp = self._recv()
-        return resp
-
-
-class RequestInstrument(object):
-    """Abstract Instrument Class, via Request."""
-
-    def __init__(self, ip_addr):
-        """Init method for request instrument.
-
-        Args:
-            ip_addr: IP Address.
-                Type, Str.
-        """
-        self._request_timeout = 120
-        self._request_protocol = 'http'
-        self._ip_addr = ip_addr
-        self._escseq = '\r\n'
-
-        self._logger = logger.create_tagged_trace_logger(self._ip_addr)
-
-    def _query(self, cmd):
-        """query instrument via request.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via request,
-                Type, Str.
-        """
-        request_cmd = '{}://{}/{}'.format(self._request_protocol,
-                                          self._ip_addr, cmd)
-        resp_raw = requests.get(request_cmd, timeout=self._request_timeout)
-
-        resp = resp_raw.text
-        for char_del in self._escseq:
-            resp = resp.replace(char_del, '')
-
-        self._logger.debug('Sent %r to %r, and get %r.', cmd, self._ip_addr,
-                           resp)
-
-        return resp
diff --git a/src/antlion/controllers/access_point.py b/src/antlion/controllers/access_point.py
deleted file mode 100755
index d9116ce..0000000
--- a/src/antlion/controllers/access_point.py
+++ /dev/null
@@ -1,949 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import ipaddress
-import os
-import time
-
-from typing import FrozenSet, Set, TYPE_CHECKING
-
-from antlion import logger
-from antlion import utils
-from antlion.controllers import pdu
-from antlion.controllers.ap_lib import ap_get_interface
-from antlion.controllers.ap_lib import ap_iwconfig
-from antlion.controllers.ap_lib import bridge_interface
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.controllers.ap_lib import dhcp_server
-from antlion.controllers.ap_lib import hostapd
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import radvd
-from antlion.controllers.ap_lib import radvd_config
-from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
-from antlion.controllers.utils_lib.commands import ip
-from antlion.controllers.utils_lib.commands import route
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.libs.proc import job
-
-if TYPE_CHECKING:
-    from antlion.controllers.ap_lib.radvd import Radvd
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'AccessPoint'
-ACTS_CONTROLLER_REFERENCE_NAME = 'access_points'
-_BRCTL = 'brctl'
-
-LIFETIME = 180
-PROC_NET_SNMP6 = '/proc/net/snmp6'
-SCAPY_INSTALL_COMMAND = 'sudo python setup.py install'
-RA_MULTICAST_ADDR = '33:33:00:00:00:01'
-RA_SCRIPT = 'sendra.py'
-
-
-def create(configs):
-    """Creates ap controllers from a json config.
-
-    Creates an ap controller from either a list, or a single
-    element. The element can either be just the hostname or a dictionary
-    containing the hostname and username of the ap to connect to over ssh.
-
-    Args:
-        The json configs that represent this controller.
-
-    Returns:
-        A new AccessPoint.
-    """
-    return [AccessPoint(c) for c in configs]
-
-
-def destroy(aps):
-    """Destroys a list of access points.
-
-    Args:
-        aps: The list of access points to destroy.
-    """
-    for ap in aps:
-        ap.close()
-
-
-def get_info(aps):
-    """Get information on a list of access points.
-
-    Args:
-        aps: A list of AccessPoints.
-
-    Returns:
-        A list of all aps hostname.
-    """
-    return [ap.ssh_settings.hostname for ap in aps]
-
-
-def setup_ap(
-        access_point,
-        profile_name,
-        channel,
-        ssid,
-        mode=None,
-        preamble=None,
-        beacon_interval=None,
-        dtim_period=None,
-        frag_threshold=None,
-        rts_threshold=None,
-        force_wmm=None,
-        hidden=False,
-        security=None,
-        pmf_support=None,
-        additional_ap_parameters=None,
-        password=None,
-        n_capabilities=None,
-        ac_capabilities=None,
-        vht_bandwidth=None,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-        setup_bridge=False,
-        is_ipv6_enabled=False,
-        is_nat_enabled=True):
-    """Creates a hostapd profile and runs it on an ap. This is a convenience
-    function that allows us to start an ap with a single function, without first
-    creating a hostapd config.
-
-    Args:
-        access_point: An ACTS access_point controller
-        profile_name: The profile name of one of the hostapd ap presets.
-        channel: What channel to set the AP to.
-        preamble: Whether to set short or long preamble (True or False)
-        beacon_interval: The beacon interval (int)
-        dtim_period: Length of dtim period (int)
-        frag_threshold: Fragmentation threshold (int)
-        rts_threshold: RTS threshold (int)
-        force_wmm: Enable WMM or not (True or False)
-        hidden: Advertise the SSID or not (True or False)
-        security: What security to enable.
-        pmf_support: int, whether pmf is not disabled, enabled, or required
-        additional_ap_parameters: Additional parameters to send the AP.
-        password: Password to connect to WLAN if necessary.
-        check_connectivity: Whether to check for internet connectivity.
-        wnm_features: WNM features to enable on the AP.
-        setup_bridge: Whether to bridge the LAN interface WLAN interface.
-            Only one WLAN interface can be bridged with the LAN interface
-            and none of the guest networks can be bridged.
-        is_ipv6_enabled: If True, start a IPv6 router advertisement daemon
-        is_nat_enabled: If True, start NAT on the AP to allow the DUT to be able
-            to access the internet if the WAN port is connected to the internet.
-
-    Returns:
-        An identifier for each ssid being started. These identifiers can be
-        used later by this controller to control the ap.
-
-    Raises:
-        Error: When the ap can't be brought up.
-    """
-    ap = hostapd_ap_preset.create_ap_preset(profile_name=profile_name,
-                                            iface_wlan_2g=access_point.wlan_2g,
-                                            iface_wlan_5g=access_point.wlan_5g,
-                                            channel=channel,
-                                            ssid=ssid,
-                                            mode=mode,
-                                            short_preamble=preamble,
-                                            beacon_interval=beacon_interval,
-                                            dtim_period=dtim_period,
-                                            frag_threshold=frag_threshold,
-                                            rts_threshold=rts_threshold,
-                                            force_wmm=force_wmm,
-                                            hidden=hidden,
-                                            bss_settings=[],
-                                            security=security,
-                                            pmf_support=pmf_support,
-                                            n_capabilities=n_capabilities,
-                                            ac_capabilities=ac_capabilities,
-                                            vht_bandwidth=vht_bandwidth,
-                                            wnm_features=wnm_features)
-    return access_point.start_ap(
-        hostapd_config=ap,
-        radvd_config=radvd_config.RadvdConfig() if is_ipv6_enabled else None,
-        setup_bridge=setup_bridge,
-        is_nat_enabled=is_nat_enabled,
-        additional_parameters=additional_ap_parameters)
-
-
-class Error(Exception):
-    """Error raised when there is a problem with the access point."""
-
-
-_ApInstance = collections.namedtuple('_ApInstance', ['hostapd', 'subnet'])
-
-# These ranges were split this way since each physical radio can have up
-# to 8 SSIDs so for the 2GHz radio the DHCP range will be
-# 192.168.1 - 8 and the 5Ghz radio will be 192.168.9 - 16
-_AP_2GHZ_SUBNET_STR_DEFAULT = '192.168.1.0/24'
-_AP_5GHZ_SUBNET_STR_DEFAULT = '192.168.9.0/24'
-
-# The last digit of the ip for the bridge interface
-BRIDGE_IP_LAST = '100'
-
-
-class AccessPoint(object):
-    """An access point controller.
-
-    Attributes:
-        ssh: The ssh connection to this ap.
-        ssh_settings: The ssh settings being used by the ssh connection.
-        dhcp_settings: The dhcp server settings being used.
-    """
-
-    def __init__(self, configs):
-        """
-        Args:
-            configs: configs for the access point from config file.
-        """
-        self.ssh_settings = settings.from_config(configs['ssh_config'])
-        self.log = logger.create_logger(
-            lambda msg: f'[Access Point|{self.ssh_settings.hostname}] {msg}')
-        self.device_pdu_config = configs.get('PduDevice', None)
-        self.identifier = self.ssh_settings.hostname
-
-        if 'ap_subnet' in configs:
-            self._AP_2G_SUBNET_STR = configs['ap_subnet']['2g']
-            self._AP_5G_SUBNET_STR = configs['ap_subnet']['5g']
-        else:
-            self._AP_2G_SUBNET_STR = _AP_2GHZ_SUBNET_STR_DEFAULT
-            self._AP_5G_SUBNET_STR = _AP_5GHZ_SUBNET_STR_DEFAULT
-
-        self._AP_2G_SUBNET = dhcp_config.Subnet(
-            ipaddress.ip_network(self._AP_2G_SUBNET_STR))
-        self._AP_5G_SUBNET = dhcp_config.Subnet(
-            ipaddress.ip_network(self._AP_5G_SUBNET_STR))
-
-        self.ssh = connection.SshConnection(self.ssh_settings)
-
-        # Singleton utilities for running various commands.
-        self._ip_cmd = ip.LinuxIpCommand(self.ssh)
-        self._route_cmd = route.LinuxRouteCommand(self.ssh)
-
-        # A map from network interface name to _ApInstance objects representing
-        # the hostapd instance running against the interface.
-        self._aps = dict()
-        self._dhcp = None
-        self._dhcp_bss = dict()
-        self._radvd: Radvd = None
-        self.bridge = bridge_interface.BridgeInterface(self)
-        self.iwconfig = ap_iwconfig.ApIwconfig(self)
-
-        # Check to see if wan_interface is specified in acts_config for tests
-        # isolated from the internet and set this override.
-        self.interfaces = ap_get_interface.ApInterfaces(
-            self, configs.get('wan_interface'))
-
-        # Get needed interface names and initialize the unnecessary ones.
-        self.wan = self.interfaces.get_wan_interface()
-        self.wlan = self.interfaces.get_wlan_interface()
-        self.wlan_2g = self.wlan[0]
-        self.wlan_5g = self.wlan[1]
-        self.lan = self.interfaces.get_lan_interface()
-        self._initial_ap()
-        self.scapy_install_path = None
-        self.setup_bridge = False
-
-    def _initial_ap(self):
-        """Initial AP interfaces.
-
-        Bring down hostapd if instance is running, bring down all bridge
-        interfaces.
-        """
-        # This is necessary for Gale/Whirlwind flashed with dev channel image
-        # Unused interfaces such as existing hostapd daemon, guest, mesh
-        # interfaces need to be brought down as part of the AP initialization
-        # process, otherwise test would fail.
-        try:
-            self.ssh.run('stop wpasupplicant')
-        except job.Error:
-            self.log.info('No wpasupplicant running')
-        try:
-            self.ssh.run('stop hostapd')
-        except job.Error:
-            self.log.info('No hostapd running')
-        # Bring down all wireless interfaces
-        for iface in self.wlan:
-            WLAN_DOWN = f'ip link set {iface} down'
-            self.ssh.run(WLAN_DOWN)
-        # Bring down all bridge interfaces
-        bridge_interfaces = self.interfaces.get_bridge_interface()
-        if bridge_interfaces:
-            for iface in bridge_interfaces:
-                BRIDGE_DOWN = f'ip link set {iface} down'
-                BRIDGE_DEL = f'brctl delbr {iface}'
-                self.ssh.run(BRIDGE_DOWN)
-                self.ssh.run(BRIDGE_DEL)
-
-    def start_ap(self,
-                 hostapd_config: hostapd_config.HostapdConfig,
-                 radvd_config=None,
-                 setup_bridge=False,
-                 is_nat_enabled=True,
-                 additional_parameters=None):
-        """Starts as an ap using a set of configurations.
-
-        This will start an ap on this host. To start an ap the controller
-        selects a network interface to use based on the configs given. It then
-        will start up hostapd on that interface. Next a subnet is created for
-        the network interface and dhcp server is refreshed to give out ips
-        for that subnet for any device that connects through that interface.
-
-        Args:
-            hostapd_config: hostapd_config.HostapdConfig, The configurations
-                to use when starting up the ap.
-            radvd_config: radvd_config.RadvdConfig, The IPv6 configuration
-                to use when starting up the ap.
-            setup_bridge: Whether to bridge the LAN interface WLAN interface.
-                Only one WLAN interface can be bridged with the LAN interface
-                and none of the guest networks can be bridged.
-            is_nat_enabled: If True, start NAT on the AP to allow the DUT to be
-                able to access the internet if the WAN port is connected to the
-                internet.
-            additional_parameters: A dictionary of parameters that can sent
-                directly into the hostapd config file.  This can be used for
-                debugging and or adding one off parameters into the config.
-
-        Returns:
-            An identifier for each ssid being started. These identifiers can be
-            used later by this controller to control the ap.
-
-        Raises:
-            Error: When the ap can't be brought up.
-        """
-        if hostapd_config.frequency < 5000:
-            interface = self.wlan_2g
-            subnet = self._AP_2G_SUBNET
-        else:
-            interface = self.wlan_5g
-            subnet = self._AP_5G_SUBNET
-
-        # radvd requires the interface to have a IPv6 link-local address.
-        if radvd_config:
-            self.ssh.run(f'sysctl -w net.ipv6.conf.{interface}.disable_ipv6=0')
-            self.ssh.run(f'sysctl -w net.ipv6.conf.{interface}.forwarding=1')
-
-        # In order to handle dhcp servers on any interface, the initiation of
-        # the dhcp server must be done after the wlan interfaces are figured
-        # out as opposed to being in __init__
-        self._dhcp = dhcp_server.DhcpServer(self.ssh, interface=interface)
-
-        # For multi bssid configurations the mac address
-        # of the wireless interface needs to have enough space to mask out
-        # up to 8 different mac addresses. So in for one interface the range is
-        # hex 0-7 and for the other the range is hex 8-f.
-        interface_mac_orig = None
-        cmd = f"ip link show {interface}|grep ether|awk -F' ' '{{print $2}}'"
-        interface_mac_orig = self.ssh.run(cmd)
-        if interface == self.wlan_5g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + '0'
-            last_octet = 1
-        if interface == self.wlan_2g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + '8'
-            last_octet = 9
-        if interface in self._aps:
-            raise ValueError('No WiFi interface available for AP on '
-                             f'channel {hostapd_config.channel}')
-
-        apd = hostapd.Hostapd(self.ssh, interface)
-        new_instance = _ApInstance(hostapd=apd, subnet=subnet)
-        self._aps[interface] = new_instance
-
-        # Turn off the DHCP server, we're going to change its settings.
-        self.stop_dhcp()
-        # Clear all routes to prevent old routes from interfering.
-        self._route_cmd.clear_routes(net_interface=interface)
-        # Add IPv6 link-local route so packets destined to the AP will be
-        # processed by the AP. This is necessary if an iperf server is running
-        # on the AP, but not for traffic handled by the Linux networking stack
-        # such as ping.
-        if radvd_config:
-            self._route_cmd.add_route(interface, 'fe80::/64')
-
-        self._dhcp_bss = dict()
-        if hostapd_config.bss_lookup:
-            # The self._dhcp_bss dictionary is created to hold the key/value
-            # pair of the interface name and the ip scope that will be
-            # used for the particular interface.  The a, b, c, d
-            # variables below are the octets for the ip address.  The
-            # third octet is then incremented for each interface that
-            # is requested.  This part is designed to bring up the
-            # hostapd interfaces and not the DHCP servers for each
-            # interface.
-            counter = 1
-            for bss in hostapd_config.bss_lookup:
-                if interface_mac_orig:
-                    hostapd_config.bss_lookup[bss].bssid = (
-                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:])
-                self._route_cmd.clear_routes(net_interface=str(bss))
-                if interface is self.wlan_2g:
-                    starting_ip_range = self._AP_2G_SUBNET_STR
-                else:
-                    starting_ip_range = self._AP_5G_SUBNET_STR
-                a, b, c, d = starting_ip_range.split('.')
-                self._dhcp_bss[bss] = dhcp_config.Subnet(
-                    ipaddress.ip_network(f'{a}.{b}.{int(c) + counter}.{d}'))
-                counter = counter + 1
-                last_octet = last_octet + 1
-
-        apd.start(hostapd_config, additional_parameters=additional_parameters)
-
-        # The DHCP serer requires interfaces to have ips and routes before
-        # the server will come up.
-        interface_ip = ipaddress.ip_interface(
-            f'{subnet.router}/{subnet.network.netmask}')
-        if setup_bridge is True:
-            bridge_interface_name = 'eth_test'
-            self.create_bridge(bridge_interface_name, [interface, self.lan])
-            self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip)
-        else:
-            self._ip_cmd.set_ipv4_address(interface, interface_ip)
-        if hostapd_config.bss_lookup:
-            # This loop goes through each interface that was setup for
-            # hostapd and assigns the DHCP scopes that were defined but
-            # not used during the hostapd loop above.  The k and v
-            # variables represent the interface name, k, and dhcp info, v.
-            for k, v in self._dhcp_bss.items():
-                bss_interface_ip = ipaddress.ip_interface(
-                    f'{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}'
-                )
-                self._ip_cmd.set_ipv4_address(str(k), bss_interface_ip)
-
-        # Restart the DHCP server with our updated list of subnets.
-        configured_subnets = self.get_configured_subnets()
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=configured_subnets)
-        self.start_dhcp(dhcp_conf=dhcp_conf)
-        if is_nat_enabled:
-            self.start_nat()
-            self.enable_forwarding()
-        else:
-            self.stop_nat()
-            self.enable_forwarding()
-        if radvd_config:
-            radvd_interface = bridge_interface_name if setup_bridge else interface
-            self._radvd = radvd.Radvd(self.ssh, radvd_interface)
-            self._radvd.start(radvd_config)
-        else:
-            self._radvd = None
-
-        bss_interfaces = [bss for bss in hostapd_config.bss_lookup]
-        bss_interfaces.append(interface)
-
-        return bss_interfaces
-
-    def get_configured_subnets(self):
-        """Get the list of configured subnets on the access point.
-
-        This allows consumers of the access point objects create custom DHCP
-        configs with the correct subnets.
-
-        Returns: a list of dhcp_config.Subnet objects
-        """
-        configured_subnets = [x.subnet for x in self._aps.values()]
-        for k, v in self._dhcp_bss.items():
-            configured_subnets.append(v)
-        return configured_subnets
-
-    def start_dhcp(self, dhcp_conf):
-        """Start a DHCP server for the specified subnets.
-
-        This allows consumers of the access point objects to control DHCP.
-
-        Args:
-            dhcp_conf: A dhcp_config.DhcpConfig object.
-
-        Raises:
-            Error: Raised when a dhcp server error is found.
-        """
-        self._dhcp.start(config=dhcp_conf)
-
-    def stop_dhcp(self):
-        """Stop DHCP for this AP object.
-
-        This allows consumers of the access point objects to control DHCP.
-        """
-        self._dhcp.stop()
-
-    def get_dhcp_logs(self):
-        """Get DHCP logs for this AP object.
-
-        This allows consumers of the access point objects to validate DHCP
-        behavior.
-
-        Returns:
-            A string of the dhcp server logs, or None is a DHCP server has not
-            been started.
-        """
-        if self._dhcp:
-            return self._dhcp.get_logs()
-        return None
-
-    def get_hostapd_logs(self):
-        """Get hostapd logs for all interfaces on AP object.
-
-        This allows consumers of the access point objects to validate hostapd
-        behavior.
-
-        Returns: A dict with {interface: log} from hostapd instances.
-        """
-        hostapd_logs = dict()
-        for identifier in self._aps:
-            hostapd_logs[identifier] = self._aps.get(
-                identifier).hostapd.pull_logs()
-        return hostapd_logs
-
-    def get_radvd_logs(self):
-        """Get radvd logs for this AP object.
-
-        This allows consumers of the access point objects to validate radvd
-        behavior.
-
-        Returns:
-            A string of the radvd logs, or None is a radvd server has not been
-            started.
-        """
-        if self._radvd:
-            return self._radvd.pull_logs()
-        return None
-
-    def enable_forwarding(self):
-        """Enable IPv4 and IPv6 forwarding on the AP.
-
-        When forwarding is enabled, the access point is able to route IP packets
-        between devices in the same subnet.
-        """
-        self.ssh.run('echo 1 > /proc/sys/net/ipv4/ip_forward')
-        self.ssh.run('echo 1 > /proc/sys/net/ipv6/conf/all/forwarding')
-
-    def start_nat(self):
-        """Start NAT on the AP.
-
-        This allows consumers of the access point objects to enable NAT
-        on the AP.
-
-        Note that this is currently a global setting, since we don't
-        have per-interface masquerade rules.
-        """
-        # The following three commands are needed to enable NAT between
-        # the WAN and LAN/WLAN ports.  This means anyone connecting to the
-        # WLAN/LAN ports will be able to access the internet if the WAN port
-        # is connected to the internet.
-        self.ssh.run('iptables -t nat -F')
-        self.ssh.run(
-            f'iptables -t nat -A POSTROUTING -o {self.wan} -j MASQUERADE')
-
-    def stop_nat(self):
-        """Stop NAT on the AP.
-
-        This allows consumers of the access point objects to disable NAT on the
-        AP.
-
-        Note that this is currently a global setting, since we don't have
-        per-interface masquerade rules.
-        """
-        self.ssh.run('iptables -t nat -F')
-
-    def create_bridge(self, bridge_name, interfaces):
-        """Create the specified bridge and bridge the specified interfaces.
-
-        Args:
-            bridge_name: The name of the bridge to create.
-            interfaces: A list of interfaces to add to the bridge.
-        """
-
-        # Create the bridge interface
-        self.ssh.run(f'brctl addbr {bridge_name}')
-
-        for interface in interfaces:
-            self.ssh.run(f'brctl addif {bridge_name} {interface}')
-
-        self.ssh.run(f'ip link set {bridge_name} up')
-
-    def remove_bridge(self, bridge_name):
-        """Removes the specified bridge
-
-        Args:
-            bridge_name: The name of the bridge to remove.
-        """
-        # Check if the bridge exists.
-        #
-        # Cases where it may not are if we failed to initialize properly
-        #
-        # Or if we're doing 2.4Ghz and 5Ghz SSIDs and we've already torn
-        # down the bridge once, but we got called for each band.
-        result = self.ssh.run(f'brctl show {bridge_name}', ignore_status=True)
-
-        # If the bridge exists, we'll get an exit_status of 0, indicating
-        # success, so we can continue and remove the bridge.
-        if result.exit_status == 0:
-            self.ssh.run(f'ip link set {bridge_name} down')
-            self.ssh.run(f'brctl delbr {bridge_name}')
-
-    def get_bssid_from_ssid(self, ssid, band):
-        """Gets the BSSID from a provided SSID
-
-        Args:
-            ssid: An SSID string.
-            band: 2G or 5G Wifi band.
-        Returns: The BSSID if on the AP or None if SSID could not be found.
-        """
-        if band == hostapd_constants.BAND_2G:
-            interfaces = [self.wlan_2g, ssid]
-        else:
-            interfaces = [self.wlan_5g, ssid]
-
-        # Get the interface name associated with the given ssid.
-        for interface in interfaces:
-            iw_output = self.ssh.run(
-                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'")
-            if 'command failed: No such device' in iw_output.stderr:
-                continue
-            else:
-                # If the configured ssid is equal to the given ssid, we found
-                # the right interface.
-                if iw_output.stdout == ssid:
-                    iw_output = self.ssh.run(
-                        f"iw dev {interface} info|grep addr|awk -F' ' '{{print $2}}'"
-                    )
-                    return iw_output.stdout
-        return None
-
-    def stop_ap(self, identifier):
-        """Stops a running ap on this controller.
-
-        Args:
-            identifier: The identify of the ap that should be taken down.
-        """
-
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
-
-        instance = self._aps.get(identifier)
-
-        if self._radvd:
-            self._radvd.stop()
-        try:
-            self.stop_dhcp()
-        except dhcp_server.NoInterfaceError:
-            pass
-        self.stop_nat()
-        instance.hostapd.stop()
-        self._ip_cmd.clear_ipv4_addresses(identifier)
-
-        del self._aps[identifier]
-        bridge_interfaces = self.interfaces.get_bridge_interface()
-        if bridge_interfaces:
-            for iface in bridge_interfaces:
-                BRIDGE_DOWN = f'ip link set {iface} down'
-                BRIDGE_DEL = f'brctl delbr {iface}'
-                self.ssh.run(BRIDGE_DOWN)
-                self.ssh.run(BRIDGE_DEL)
-
-    def stop_all_aps(self):
-        """Stops all running aps on this device."""
-
-        for ap in list(self._aps.keys()):
-            self.stop_ap(ap)
-
-    def close(self):
-        """Called to take down the entire access point.
-
-        When called will stop all aps running on this host, shutdown the dhcp
-        server, and stop the ssh connection.
-        """
-
-        if self._aps:
-            self.stop_all_aps()
-        self.ssh.close()
-
-    def generate_bridge_configs(self, channel):
-        """Generate a list of configs for a bridge between LAN and WLAN.
-
-        Args:
-            channel: the channel WLAN interface is brought up on
-            iface_lan: the LAN interface to bridge
-        Returns:
-            configs: tuple containing iface_wlan, iface_lan and bridge_ip
-        """
-
-        if channel < 15:
-            iface_wlan = self.wlan_2g
-            subnet_str = self._AP_2G_SUBNET_STR
-        else:
-            iface_wlan = self.wlan_5g
-            subnet_str = self._AP_5G_SUBNET_STR
-
-        iface_lan = self.lan
-
-        a, b, c, _ = subnet_str.strip('/24').split('.')
-        bridge_ip = f'{a}.{b}.{c}.{BRIDGE_IP_LAST}'
-
-        configs = (iface_wlan, iface_lan, bridge_ip)
-
-        return configs
-
-    def install_scapy(self, scapy_path, send_ra_path):
-        """Install scapy
-
-        Args:
-            scapy_path: path where scapy tar file is located on server
-            send_ra_path: path where sendra path is located on server
-        """
-        self.scapy_install_path = self.ssh.run('mktemp -d').stdout.rstrip()
-        self.log.info(f'Scapy install path: {self.scapy_install_path}')
-        self.ssh.send_file(scapy_path, self.scapy_install_path)
-        self.ssh.send_file(send_ra_path, self.scapy_install_path)
-
-        scapy = os.path.join(self.scapy_install_path,
-                             scapy_path.split('/')[-1])
-
-        self.ssh.run(f'tar -xvf {scapy} -C {self.scapy_install_path}')
-        self.ssh.run(f'cd {self.scapy_install_path}; {SCAPY_INSTALL_COMMAND}')
-
-    def cleanup_scapy(self):
-        """ Cleanup scapy """
-        if self.scapy_install_path:
-            cmd = f'rm -rf {self.scapy_install_path}'
-            self.log.info(f'Cleaning up scapy {cmd}')
-            output = self.ssh.run(cmd)
-            self.scapy_install_path = None
-
-    def send_ra(self,
-                iface,
-                mac=RA_MULTICAST_ADDR,
-                interval=1,
-                count=None,
-                lifetime=LIFETIME,
-                rtt=0):
-        """Invoke scapy and send RA to the device.
-
-        Args:
-          iface: string of the WiFi interface to use for sending packets.
-          mac: string HWAddr/MAC address to send the packets to.
-          interval: int Time to sleep between consecutive packets.
-          count: int Number of packets to be sent.
-          lifetime: int original RA's router lifetime in seconds.
-          rtt: retrans timer of the RA packet
-        """
-        scapy_command = os.path.join(self.scapy_install_path, RA_SCRIPT)
-        options = f' -m {mac} -i {interval} -c {count} -l {lifetime} -in {iface} -rtt {rtt}'
-        cmd = scapy_command + options
-        self.log.info(f'Scapy cmd: {cmd}')
-        self.ssh.run(cmd)
-
-    def get_icmp6intype134(self):
-        """Read the value of Icmp6InType134 and return integer.
-
-        Returns:
-            Integer value >0 if grep is successful; 0 otherwise.
-        """
-        ra_count_str = self.ssh.run(
-            f'grep Icmp6InType134 {PROC_NET_SNMP6} || true').stdout
-        if ra_count_str:
-            return int(ra_count_str.split()[1])
-
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=56,
-             additional_ping_params=None):
-        """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)
-        """
-        return utils.ping(self.ssh,
-                          dest_ip,
-                          count=count,
-                          interval=interval,
-                          timeout=timeout,
-                          size=size,
-                          additional_ping_params=additional_ping_params)
-
-    def can_ping(self,
-                 dest_ip,
-                 count=1,
-                 interval=1000,
-                 timeout=1000,
-                 size=56,
-                 additional_ping_params=None):
-        """Returns whether ap can ping dest_ip (see utils.can_ping)"""
-        return utils.can_ping(self.ssh,
-                              dest_ip,
-                              count=count,
-                              interval=interval,
-                              timeout=timeout,
-                              size=size,
-                              additional_ping_params=additional_ping_params)
-
-    def hard_power_cycle(self,
-                         pdus,
-                         unreachable_timeout=30,
-                         ping_timeout=60,
-                         ssh_timeout=30,
-                         hostapd_configs=None):
-        """Kills, then restores power to AccessPoint, verifying it goes down and
-        comes back online cleanly.
-
-        Args:
-            pdus: list, PduDevices in the testbed
-            unreachable_timeout: int, time to wait for AccessPoint to become
-                unreachable
-            ping_timeout: int, time to wait for AccessPoint to responsd to pings
-            ssh_timeout: int, time to wait for AccessPoint to allow SSH
-            hostapd_configs (optional): list, containing hostapd settings. If
-                present, these networks will be spun up after the AP has
-                rebooted. This list can either contain HostapdConfig objects, or
-                    dictionaries with the start_ap params
-                    (i.e  { 'hostapd_config': <HostapdConfig>,
-                            'setup_bridge': <bool>,
-                            'additional_parameters': <dict> } ).
-        Raise:
-            Error, if no PduDevice is provided in AccessPoint config.
-            ConnectionError, if AccessPoint fails to go offline or come back.
-        """
-        if not self.device_pdu_config:
-            raise Error('No PduDevice provided in AccessPoint config.')
-
-        if hostapd_configs is None:
-            hostapd_configs = []
-
-        self.log.info(f'Power cycling')
-        ap_pdu, ap_pdu_port = pdu.get_pdu_port_for_device(
-            self.device_pdu_config, pdus)
-
-        self.log.info(f'Killing power')
-        ap_pdu.off(str(ap_pdu_port))
-
-        self.log.info('Verifying AccessPoint is unreachable.')
-        timeout = time.time() + unreachable_timeout
-        while time.time() < timeout:
-            if not utils.can_ping(job, self.ssh_settings.hostname):
-                self.log.info('AccessPoint is unreachable as expected.')
-                break
-            else:
-                self.log.debug(
-                    'AccessPoint is still responding to pings. Retrying in 1 '
-                    'second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError(
-                f'Failed to bring down AccessPoint ({self.ssh_settings.hostname})'
-            )
-        self._aps.clear()
-
-        self.log.info(f'Restoring power')
-        ap_pdu.on(str(ap_pdu_port))
-
-        self.log.info('Waiting for AccessPoint to respond to pings.')
-        timeout = time.time() + ping_timeout
-        while time.time() < timeout:
-            if utils.can_ping(job, self.ssh_settings.hostname):
-                self.log.info('AccessPoint responded to pings.')
-                break
-            else:
-                self.log.debug('AccessPoint is not responding to pings. '
-                               'Retrying in 1 second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError(
-                f'Timed out waiting for AccessPoint ({self.ssh_settings.hostname}) '
-                'to respond to pings.')
-
-        self.log.info('Waiting for AccessPoint to allow ssh connection.')
-        timeout = time.time() + ssh_timeout
-        while time.time() < timeout:
-            try:
-                self.ssh.run('echo')
-            except connection.Error:
-                self.log.debug('AccessPoint is not allowing ssh connection. '
-                               'Retrying in 1 second.')
-                time.sleep(1)
-            else:
-                self.log.info('AccessPoint available via ssh.')
-                break
-        else:
-            raise ConnectionError(
-                f'Timed out waiting for AccessPoint ({self.ssh_settings.hostname}) '
-                'to allow ssh connection.')
-
-        # Allow 5 seconds for OS to finish getting set up
-        time.sleep(5)
-        self._initial_ap()
-        self.log.info('Power cycled successfully')
-
-        for settings in hostapd_configs:
-            if type(settings) == hostapd_config.HostapdConfig:
-                config = settings
-                setup_bridge = False
-                additional_parameters = None
-
-            elif type(settings) == dict:
-                config = settings['hostapd_config']
-                setup_bridge = settings.get('setup_bridge', False)
-                additional_parameters = settings.get('additional_parameters',
-                                                     None)
-            else:
-                raise TypeError(
-                    'Items in hostapd_configs list must either be '
-                    'hostapd.HostapdConfig objects or dictionaries.')
-
-            self.log.info(f'Restarting network {config.ssid}')
-            self.start_ap(config,
-                          setup_bridge=setup_bridge,
-                          additional_parameters=additional_parameters)
-
-    def channel_switch(self, identifier, channel_num):
-        """Switch to a different channel on the given AP."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
-        instance = self._aps.get(identifier)
-        self.log.info(f'channel switch to channel {channel_num}')
-        instance.hostapd.channel_switch(channel_num)
-
-    def get_current_channel(self, identifier):
-        """Find the current channel on the given AP."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
-        instance = self._aps.get(identifier)
-        return instance.hostapd.get_current_channel()
-
-    def get_stas(self, identifier) -> Set[str]:
-        """Return MAC addresses of all associated STAs on the given AP."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
-        instance = self._aps.get(identifier)
-        return instance.hostapd.get_stas()
-
-    def get_sta_extended_capabilities(self, identifier,
-                                      sta_mac: str) -> ExtendedCapabilities:
-        """Get extended capabilities for the given STA, as seen by the AP."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f'Invalid identifier {identifier} given')
-        instance = self._aps.get(identifier)
-        return instance.hostapd.get_sta_extended_capabilities(sta_mac)
-
-    def send_bss_transition_management_req(
-            self, identifier, sta_mac: str,
-            request: BssTransitionManagementRequest):
-        """Send a BSS Transition Management request to an associated STA."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError('Invalid identifier {identifier} given')
-        instance = self._aps.get(identifier)
-        return instance.hostapd.send_bss_transition_management_req(
-            sta_mac, request)
diff --git a/src/antlion/controllers/adb.py b/src/antlion/controllers/adb.py
deleted file mode 100644
index 9acfa1e..0000000
--- a/src/antlion/controllers/adb.py
+++ /dev/null
@@ -1,288 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import re
-import shlex
-import shutil
-
-from antlion.controllers.adb_lib.error import AdbCommandError
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.libs.proc import job
-
-DEFAULT_ADB_TIMEOUT = 60
-DEFAULT_ADB_PULL_TIMEOUT = 180
-
-ADB_REGEX = re.compile('adb:')
-# Uses a regex to be backwards compatible with previous versions of ADB
-# (N and above add the serial to the error msg).
-DEVICE_NOT_FOUND_REGEX = re.compile('error: device (?:\'.*?\' )?not found')
-DEVICE_OFFLINE_REGEX = re.compile('error: device offline')
-# Raised when adb forward commands fail to forward a port.
-CANNOT_BIND_LISTENER_REGEX = re.compile('error: cannot bind listener:')
-# Expected output is "Android Debug Bridge version 1.0.XX
-ADB_VERSION_REGEX = re.compile('Android Debug Bridge version 1.0.(\d+)')
-GREP_REGEX = re.compile('grep(\s+)')
-
-ROOT_USER_ID = '0'
-SHELL_USER_ID = '2000'
-
-
-def parsing_parcel_output(output):
-    """Parsing the adb output in Parcel format.
-
-    Parsing the adb output in format:
-      Result: Parcel(
-        0x00000000: 00000000 00000014 00390038 00340031 '........8.9.1.4.'
-        0x00000010: 00300038 00300030 00300030 00340032 '8.0.0.0.0.0.2.4.'
-        0x00000020: 00350034 00330035 00320038 00310033 '4.5.5.3.8.2.3.1.'
-        0x00000030: 00000000                            '....            ')
-    """
-    output = ''.join(re.findall(r"'(.*)'", output))
-    return re.sub(r'[.\s]', '', output)
-
-
-class AdbProxy(object):
-    """Proxy class for ADB.
-
-    For syntactic reasons, the '-' in adb commands need to be replaced with
-    '_'. Can directly execute adb commands on an object:
-    >> adb = AdbProxy(<serial>)
-    >> adb.start_server()
-    >> adb.devices() # will return the console output of "adb devices".
-    """
-
-    def __init__(self, serial="", ssh_connection=None):
-        """Construct an instance of AdbProxy.
-
-        Args:
-            serial: str serial number of Android device from `adb devices`
-            ssh_connection: SshConnection instance if the Android device is
-                            connected to a remote host that we can reach via SSH.
-        """
-        self.serial = serial
-        self._server_local_port = None
-        adb_path = shutil.which('adb')
-        adb_cmd = [shlex.quote(adb_path)]
-        if serial:
-            adb_cmd.append("-s %s" % serial)
-        if ssh_connection is not None:
-            # Kill all existing adb processes on the remote host (if any)
-            # Note that if there are none, then pkill exits with non-zero status
-            ssh_connection.run("pkill adb", ignore_status=True)
-            # Copy over the adb binary to a temp dir
-            temp_dir = ssh_connection.run("mktemp -d").stdout.strip()
-            ssh_connection.send_file(adb_path, temp_dir)
-            # Start up a new adb server running as root from the copied binary.
-            remote_adb_cmd = "%s/adb %s root" % (temp_dir, "-s %s" % serial
-                                                 if serial else "")
-            ssh_connection.run(remote_adb_cmd)
-            # Proxy a local port to the adb server port
-            local_port = ssh_connection.create_ssh_tunnel(5037)
-            self._server_local_port = local_port
-
-        if self._server_local_port:
-            adb_cmd.append("-P %d" % local_port)
-        self.adb_str = " ".join(adb_cmd)
-        self._ssh_connection = ssh_connection
-
-    def get_user_id(self):
-        """Returns the adb user. Either 2000 (shell) or 0 (root)."""
-        return self.shell('id -u')
-
-    def is_root(self, user_id=None):
-        """Checks if the user is root.
-
-        Args:
-            user_id: if supplied, the id to check against.
-        Returns:
-            True if the user is root. False otherwise.
-        """
-        if not user_id:
-            user_id = self.get_user_id()
-        return user_id == ROOT_USER_ID
-
-    def ensure_root(self):
-        """Ensures the user is root after making this call.
-
-        Note that this will still fail if the device is a user build, as root
-        is not accessible from a user build.
-
-        Returns:
-            False if the device is a user build. True otherwise.
-        """
-        self.ensure_user(ROOT_USER_ID)
-        return self.is_root()
-
-    def ensure_user(self, user_id=SHELL_USER_ID):
-        """Ensures the user is set to the given user.
-
-        Args:
-            user_id: The id of the user.
-        """
-        if self.is_root(user_id):
-            self.root()
-        else:
-            self.unroot()
-        self.wait_for_device()
-        return self.get_user_id() == user_id
-
-    def _exec_cmd(self, cmd, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
-        """Executes adb commands in a new shell.
-
-        This is specific to executing adb commands.
-
-        Args:
-            cmd: A string or list that is the adb command to execute.
-
-        Returns:
-            The stdout of the adb command.
-
-        Raises:
-            AdbError for errors in ADB operations.
-            AdbCommandError for errors from commands executed through ADB.
-        """
-        if isinstance(cmd, list):
-            cmd = ' '.join(cmd)
-        result = job.run(cmd, ignore_status=True, timeout=timeout)
-        ret, out, err = result.exit_status, result.stdout, result.stderr
-
-        if any(pattern.match(err) for pattern in
-               [ADB_REGEX, DEVICE_OFFLINE_REGEX, DEVICE_NOT_FOUND_REGEX,
-                CANNOT_BIND_LISTENER_REGEX]):
-            raise AdbError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
-        if "Result: Parcel" in out:
-            return parsing_parcel_output(out)
-        if ignore_status or (ret == 1 and GREP_REGEX.search(cmd)):
-            return out or err
-        if ret != 0:
-            raise AdbCommandError(cmd=cmd, stdout=out, stderr=err, ret_code=ret)
-        return out
-
-    def _exec_adb_cmd(self, name, arg_str, **kwargs):
-        return self._exec_cmd(' '.join((self.adb_str, name, arg_str)),
-                              **kwargs)
-
-    def _exec_cmd_nb(self, cmd, **kwargs):
-        """Executes adb commands in a new shell, non blocking.
-
-        Args:
-            cmds: A string that is the adb command to execute.
-
-        """
-        return job.run_async(cmd, **kwargs)
-
-    def _exec_adb_cmd_nb(self, name, arg_str, **kwargs):
-        return self._exec_cmd_nb(' '.join((self.adb_str, name, arg_str)),
-                                 **kwargs)
-
-    def tcp_forward(self, host_port, device_port):
-        """Starts tcp forwarding from localhost to this android device.
-
-        Args:
-            host_port: Port number to use on localhost
-            device_port: Port number to use on the android device.
-
-        Returns:
-            Forwarded port on host as int or command output string on error
-        """
-        if self._ssh_connection:
-            # We have to hop through a remote host first.
-            #  1) Find some free port on the remote host's localhost
-            #  2) Setup forwarding between that remote port and the requested
-            #     device port
-            remote_port = self._ssh_connection.find_free_port()
-            host_port = self._ssh_connection.create_ssh_tunnel(
-                remote_port, local_port=host_port)
-        output = self.forward("tcp:%d tcp:%d" % (host_port, device_port),
-                              ignore_status=True)
-        # If hinted_port is 0, the output will be the selected port.
-        # Otherwise, there will be no output upon successfully
-        # forwarding the hinted port.
-        if not output:
-            return host_port
-        try:
-            output_int = int(output)
-        except ValueError:
-            return output
-        return output_int
-
-    def remove_tcp_forward(self, host_port):
-        """Stop tcp forwarding a port from localhost to this android device.
-
-        Args:
-            host_port: Port number to use on localhost
-        """
-        if self._ssh_connection:
-            remote_port = self._ssh_connection.close_ssh_tunnel(host_port)
-            if remote_port is None:
-                logging.warning("Cannot close unknown forwarded tcp port: %d",
-                                host_port)
-                return
-            # The actual port we need to disable via adb is on the remote host.
-            host_port = remote_port
-        self.forward("--remove tcp:%d" % host_port)
-
-    def getprop(self, prop_name):
-        """Get a property of the device.
-
-        This is a convenience wrapper for "adb shell getprop xxx".
-
-        Args:
-            prop_name: A string that is the name of the property to get.
-
-        Returns:
-            A string that is the value of the property, or None if the property
-            doesn't exist.
-        """
-        return self.shell("getprop %s" % prop_name)
-
-    # TODO: This should be abstracted out into an object like the other shell
-    # command.
-    def shell(self, command, ignore_status=False, timeout=DEFAULT_ADB_TIMEOUT):
-        return self._exec_adb_cmd(
-            'shell',
-            shlex.quote(command),
-            ignore_status=ignore_status,
-            timeout=timeout)
-
-    def shell_nb(self, command):
-        return self._exec_adb_cmd_nb('shell', shlex.quote(command))
-
-    def __getattr__(self, name):
-        def adb_call(*args, **kwargs):
-            clean_name = name.replace('_', '-')
-            if clean_name in ['pull', 'push', 'remount'] and 'timeout' not in kwargs:
-                kwargs['timeout'] = DEFAULT_ADB_PULL_TIMEOUT
-            arg_str = ' '.join(str(elem) for elem in args)
-            return self._exec_adb_cmd(clean_name, arg_str, **kwargs)
-
-        return adb_call
-
-    def get_version_number(self):
-        """Returns the version number of ADB as an int (XX in 1.0.XX).
-
-        Raises:
-            AdbError if the version number is not found/parsable.
-        """
-        version_output = self.version()
-        match = re.search(ADB_VERSION_REGEX, version_output)
-
-        if not match:
-            logging.error('Unable to capture ADB version from adb version '
-                          'output: %s' % version_output)
-            raise AdbError('adb version', version_output, '', '')
-        return int(match.group(1))
diff --git a/src/antlion/controllers/adb_lib/error.py b/src/antlion/controllers/adb_lib/error.py
deleted file mode 100644
index 6fcae7d..0000000
--- a/src/antlion/controllers/adb_lib/error.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import error
-
-
-class AdbError(error.ActsError):
-    """Raised when there is an error in adb operations."""
-
-    def __init__(self, cmd, stdout, stderr, ret_code):
-        super().__init__()
-        self.cmd = cmd
-        self.stdout = stdout
-        self.stderr = stderr
-        self.ret_code = ret_code
-
-    def __str__(self):
-        return ("Error executing adb cmd '%s'. ret: %d, stdout: %s, stderr: %s"
-                ) % (self.cmd, self.ret_code, self.stdout, self.stderr)
-
-
-class AdbCommandError(AdbError):
-    """Raised when there is an error in the command being run through ADB."""
diff --git a/src/antlion/controllers/amarisoft_lib/OWNERS b/src/antlion/controllers/amarisoft_lib/OWNERS
deleted file mode 100644
index edee4ef..0000000
--- a/src/antlion/controllers/amarisoft_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-markusliu@google.com
-mollychang@google.com
-angelayu@google.com
-zoeyliu@google.com
diff --git a/src/antlion/controllers/amarisoft_lib/amarisoft_client.py b/src/antlion/controllers/amarisoft_lib/amarisoft_client.py
deleted file mode 100644
index 5386f81..0000000
--- a/src/antlion/controllers/amarisoft_lib/amarisoft_client.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import json
-import logging
-from typing import Any, Mapping, Optional, Tuple
-
-from antlion.controllers.amarisoft_lib import ssh_utils
-import immutabledict
-import websockets
-
-_CONFIG_DIR_MAPPING = immutabledict.immutabledict({
-    'enb': '/config/enb.cfg',
-    'mme': '/config/mme.cfg',
-    'ims': '/config/ims.cfg',
-    'mbms': '/config/mbmsgw.cfg',
-    'ots': '/config/ots.cfg'
-})
-
-
-class MessageFailureError(Exception):
-  """Raises an error when the message execution fail."""
-
-
-class AmariSoftClient(ssh_utils.RemoteClient):
-  """The SSH client class interacts with Amarisoft.
-
-    A simulator used to simulate the base station can output different signals
-    according to the network configuration settings.
-    For example: T Mobile NSA LTE band 66 + NR band 71.
-  """
-
-  async def _send_message_to_callbox(self, uri: str,
-                                     msg: str) -> Tuple[str, str]:
-    """Implements async function for send message to the callbox.
-
-    Args:
-      uri: The uri of specific websocket interface.
-      msg: The message to be send to callbox.
-
-    Returns:
-      The response from callbox.
-    """
-    async with websockets.connect(
-        uri, extra_headers={'origin': 'Test'}) as websocket:
-      await websocket.send(msg)
-      head = await websocket.recv()
-      body = await websocket.recv()
-    return head, body
-
-  def send_message(self, port: str, msg: str) -> Tuple[str, str]:
-    """Sends a message to the callbox.
-
-    Args:
-      port: The port of specific websocket interface.
-      msg: The message to be send to callbox.
-
-    Returns:
-      The response from callbox.
-    """
-    return asyncio.get_event_loop().run_until_complete(
-        self._send_message_to_callbox(f'ws://{self.host}:{port}/', msg))
-
-  def verify_response(self, func: str, head: str,
-                      body: str) -> Tuple[Mapping[str, Any], Mapping[str, Any]]:
-    """Makes sure there are no error messages in Amarisoft's response.
-
-    If a message produces an error, response will have an error string field
-    representing the error.
-    For example:
-      {
-        "message": "ready",
-        "message_id": <message id>,
-        "error": <error message>,
-        "type": "ENB",
-        "name: <name>,
-      }
-
-    Args:
-      func: The message send to Amarisoft.
-      head: Responsed message head.
-      body: Responsed message body.
-
-    Returns:
-      Standard output of the shell command.
-
-    Raises:
-       MessageFailureError: Raised when an error occurs in the response message.
-    """
-    loaded_head = json.loads(head)
-    loaded_body = json.loads(body)
-
-    if loaded_head.get('message') != 'ready':
-      raise MessageFailureError(
-          f'Fail to get response from callbox, message: {loaded_head["error"]}')
-    if 'error' in loaded_body:
-      raise MessageFailureError(
-          f'Fail to excute {func} with error message: {loaded_body["error"]}')
-    if loaded_body.get('message') != func:
-      raise MessageFailureError(
-          f'The message sent was {loaded_body["message"]} instead of {func}.')
-    return loaded_head, loaded_body
-
-  def lte_service_stop(self) -> None:
-    """Stops to output signal."""
-    self.run_cmd('systemctl stop lte')
-
-  def lte_service_start(self):
-    """Starts to output signal."""
-    self.run_cmd('systemctl start lte')
-
-  def lte_service_restart(self):
-    """Restarts to output signal."""
-    self.run_cmd('systemctl restart lte')
-
-  def lte_service_enable(self):
-    """lte service remains enable until next reboot."""
-    self.run_cmd('systemctl enable lte')
-
-  def lte_service_disable(self):
-    """lte service remains disable until next reboot."""
-    self.run_cmd('systemctl disable lte')
-
-  def lte_service_is_active(self) -> bool:
-    """Checks lte service is active or not.
-
-    Returns:
-      True if service active, False otherwise.
-    """
-    return not any('inactive' in line
-                   for line in self.run_cmd('systemctl is-active lte'))
-
-  def set_config_dir(self, cfg_type: str, path: str) -> None:
-    """Sets the path of target configuration file.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb ...etc.)
-      path: The path of target configuration. (e.g.
-        /root/lteenb-linux-2020-12-14)
-    """
-    path_old = self.get_config_dir(cfg_type)
-    if path != path_old:
-      logging.info('set new path %s (was %s)', path, path_old)
-      self.run_cmd(f'ln -sfn {path} /root/{cfg_type}')
-    else:
-      logging.info('path %s does not change.', path_old)
-
-  def get_config_dir(self, cfg_type: str) -> Optional[str]:
-    """Gets the path of target configuration.
-
-    Args:
-      cfg_type: Target configuration type. (e.g. mme, enb...etc.)
-
-    Returns:
-      The path of configuration.
-    """
-    result = self.run_cmd(f'readlink /root/{cfg_type}')
-    if result:
-      path = result[0].strip()
-    else:
-      logging.warning('%s path not found.', cfg_type)
-      return None
-    return path
-
-  def set_config_file(self, cfg_type: str, cfg_file: str) -> None:
-    """Sets the configuration to be executed.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb...etc.)
-      cfg_file: The configuration to be executed. (e.g.
-        /root/lteenb-linux-2020-12-14/config/gnb.cfg )
-
-    Raises:
-      FileNotFoundError: Raised when a file or directory is requested but
-      doesn’t exist.
-    """
-    cfg_link = self.get_config_dir(cfg_type) + _CONFIG_DIR_MAPPING[cfg_type]
-    if not self.is_file_exist(cfg_file):
-      raise FileNotFoundError("The command file doesn't exist")
-    self.run_cmd(f'ln -sfn {cfg_file} {cfg_link}')
-
-  def get_config_file(self, cfg_type: str) -> Optional[str]:
-    """Gets the current configuration of specific configuration type.
-
-    Args:
-      cfg_type: The type of target configuration. (e.g. mme, enb...etc.)
-
-    Returns:
-      The current configuration with absolute path.
-    """
-    cfg_path = self.get_config_dir(cfg_type) + _CONFIG_DIR_MAPPING[cfg_type]
-    if cfg_path:
-      result = self.run_cmd(f'readlink {cfg_path}')
-      if result:
-        return result[0].strip()
-
-  def get_all_config_dir(self) -> Mapping[str, str]:
-    """Gets all configuration directions.
-
-    Returns:
-      All configuration directions.
-    """
-    config_dir = {}
-    for cfg_type in ('ots', 'enb', 'mme', 'mbms'):
-      config_dir[cfg_type] = self.get_config_dir(cfg_type)
-      logging.debug('get path of %s: %s', cfg_type, config_dir[cfg_type])
-    return config_dir
diff --git a/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py b/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py
deleted file mode 100644
index c62bf2a..0000000
--- a/src/antlion/controllers/amarisoft_lib/amarisoft_constants.py
+++ /dev/null
@@ -1,14 +0,0 @@
-"""Constants for test."""

-

-

-# ports of lte service websocket interface

-class PortNumber:

-  URI_MME = '9000'

-  URI_ENB = '9001'

-  URI_UE = '9002'

-  URI_IMS = '9003'

-  URI_MBMS = '9004'

-  URI_PROBE = '9005'

-  URI_LICENSE = '9006'

-  URI_MON = '9007'

-  URI_VIEW = '9008'

diff --git a/src/antlion/controllers/amarisoft_lib/config_utils.py b/src/antlion/controllers/amarisoft_lib/config_utils.py
deleted file mode 100644
index 8d3b603..0000000
--- a/src/antlion/controllers/amarisoft_lib/config_utils.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import os
-import immutabledict
-
-from antlion.controllers.amarisoft_lib import amarisoft_client
-
-TEMPLATE_PATH = os.path.dirname(os.path.abspath(__file__)) + '/config_templates'
-TEMPLATE_PATH_ENB = f'{TEMPLATE_PATH}/enb/'
-TEMPLATE_PATH_MME = f'{TEMPLATE_PATH}/mme/'
-
-_CLIENT_CONFIG_DIR_MAPPING = immutabledict.immutabledict({
-    'enb': '/config/mhtest_enb.cfg',
-    'mme': '/config/mhtest_mme.cfg',
-})
-
-
-class EnbCfg():
-  """MME configuration templates."""
-  ENB_GENERIC = 'enb-single-generic.cfg'
-  GNB_NSA_GENERIC = 'gnb-nsa-lte-ho-generic.cfg'
-  GNB_SA_GENERIC = 'gnb-sa-lte-ho-generic.cfg'
-
-
-class MmeCfg():
-  """MME configuration templates."""
-  MME_GENERIC = 'mme-generic.cfg'
-
-
-class SpecTech(enum.Enum):
-  """Spectrum usage techniques."""
-  FDD = 0
-  TDD = 1
-
-
-class ConfigUtils():
-  """Utilities for set Amarisoft configs.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def upload_enb_template(self, cfg: str) -> bool:
-    """Loads ENB configuration.
-
-    Args:
-      cfg: The ENB configuration to be loaded.
-
-    Returns:
-      True if the ENB configuration was loaded successfully, False otherwise.
-    """
-    cfg_template = TEMPLATE_PATH_ENB + cfg
-    if not os.path.isfile(cfg_template):
-      return False
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    self.remote.run_cmd('rm -f ' + cfg_path)
-    self.remote.sftp_upload(cfg_template, cfg_path)
-    self.remote.set_config_file('enb', cfg_path)
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    return True
-
-  def upload_mme_template(self, cfg: str) -> bool:
-    """Loads MME configuration.
-
-    Args:
-      cfg: The MME configuration to be loaded.
-
-    Returns:
-      True if the ENB configuration was loaded successfully, False otherwise.
-    """
-    cfg_template = TEMPLATE_PATH_MME + cfg
-    if not os.path.isfile(cfg_template):
-      return False
-    cfg_path = self.remote.get_config_dir(
-        'mme') + _CLIENT_CONFIG_DIR_MAPPING['mme']
-    self.remote.run_cmd('rm -f ' + cfg_path)
-    self.remote.sftp_upload(cfg_template, cfg_path)
-    self.remote.set_config_file('mme', cfg_path)
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    return True
-
-  def enb_set_plmn(self, plmn: str) -> bool:
-    """Sets the PLMN in ENB configuration.
-
-    Args:
-      plmn: The PLMN to be set. ex: 311480
-
-    Returns:
-      True if set PLMN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define PLMN \"00101\"'
-    string_to = f'#define PLMN \"{plmn}\"'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def mme_set_plmn(self, plmn: str) -> bool:
-    """Sets the PLMN in MME configuration.
-
-    Args:
-      plmn: The PLMN to be set. ex:'311480'
-
-    Returns:
-      True if set PLMN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'mme') + _CLIENT_CONFIG_DIR_MAPPING['mme']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define PLMN \"00101\"'
-    string_to = f'#define PLMN \"{plmn}\"'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_fdd_arfcn(self, arfcn: int) -> bool:
-    """Sets the FDD ARFCN in ENB configuration.
-
-    Args:
-      arfcn: The arfcn to be set. ex: 1400
-
-    Returns:
-      True if set FDD ARFCN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define FDD_CELL_earfcn 1400'
-    string_to = f'#define FDD_CELL_earfcn {arfcn}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_tdd_arfcn(self, arfcn: int) -> bool:
-    """Sets the TDD ARFCN in ENB configuration.
-
-    Args:
-      arfcn: The arfcn to be set. ex: 1400
-
-    Returns:
-      True if set FDD ARFCN successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define TDD_CELL_earfcn 40620'
-    string_to = f'#define TDD_CELL_earfcn {arfcn}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
-
-  def enb_set_spectrum_tech(self, tech: int) -> bool:
-    """Sets the spectrum usage techniques in ENB configuration.
-
-    Args:
-      tech: the spectrum usage techniques. ex: SpecTech.FDD.name
-
-    Returns:
-      True if set spectrum usage techniques successfully, False otherwise.
-    """
-    cfg_path = self.remote.get_config_dir(
-        'enb') + _CLIENT_CONFIG_DIR_MAPPING['enb']
-    if not self.remote.is_file_exist(cfg_path):
-      return False
-    string_from = '#define TDD 0'
-    string_to = f'#define TDD {tech}'
-    self.remote.run_cmd(f'sed -i \'s/\\r//g\' {cfg_path}')
-    self.remote.run_cmd(
-        f'sed -i \':a;N;$!ba;s/{string_from}/{string_to}/g\' {cfg_path}')
-    return True
diff --git a/src/antlion/controllers/amarisoft_lib/ims.py b/src/antlion/controllers/amarisoft_lib/ims.py
deleted file mode 100644
index ee575c4..0000000
--- a/src/antlion/controllers/amarisoft_lib/ims.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-from typing import Any, Mapping, Optional, Union
-
-from antlion.controllers.amarisoft_lib import amarisoft_client
-from antlion.controllers.amarisoft_lib import amarisoft_constants as const
-
-
-class ImsFunctions():
-  """Utilities for Amarisoft's IMS Remote API.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def make_call(self,
-              impi: str,
-              impu: str,
-              contact: str,
-              sip_file: str = 'mt_call_qos.sdp',
-              caller: str = 'Amarisoft',
-              duration: int = 30) -> None:
-    """Performs MT call from callbox to test device.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call.
-      impu: IMPU (IP Multimedia Public identity) of user to call.
-      contact: Contact SIP uri of user to call.
-      sip_file: Define file to use as sdp.
-      caller: The number/ID is displayed as the caller.
-      duration: If set, call duration in seconds (The server will close the
-        dialog).
-    """
-    msg = {}
-    msg['message'] = 'mt_call'
-    msg['impi'] = impi
-    msg['impu'] = impu
-    msg['contact'] = contact
-    msg['sip_file'] = sip_file
-    msg['caller'] = caller
-    msg['duration'] = duration
-    dump_msg = json.dumps(msg)
-    logging.debug('mt_call dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('mt_call', head, body)
-
-  def send_sms(self,
-               text: str,
-               impi: str,
-               sender: Optional[str] = 'Amarisoft') -> None:
-    """Sends SMS to assigned device which connect to Amarisoft.
-
-    Args:
-      text: SMS text to send.
-      impi: IMPI (IP Multimedia Private identity) of user.
-      sender: Sets SMS sender.
-    """
-    msg = {}
-    msg['message'] = 'sms'
-    msg['text'] = text
-    msg['impi'] = impi
-    msg['sender'] = sender
-    dump_msg = json.dumps(msg)
-    logging.debug('send_sms dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('sms', head, body)
-
-  def send_mms(self, filename: str, sender: str, receiver: str) -> None:
-    """Sends MMS to assigned device which connect to Amarisoft.
-
-    Args:
-      filename: File name with absolute path to send. Extensions jpg, jpeg, png,
-        gif and txt are supported.
-      sender: IMPI (IP Multimedia Private identity) of user.
-      receiver: IMPU (IP Multimedia Public identity) of user.
-    """
-    msg = {}
-    msg['message'] = 'mms'
-    msg['filename'] = filename
-    msg['sender'] = sender
-    msg['receiver'] = receiver
-    dump_msg = json.dumps(msg)
-    logging.debug('send_mms dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    self.remote.verify_response('mms', head, body)
-
-  def users_get(self, registered_only: bool = True) -> Mapping[str, Any]:
-    """Gets users state.
-
-    Args:
-      registered_only: If set, only registered user will be dumped.
-
-    Returns:
-      The user information.
-    """
-    msg = {}
-    msg['message'] = 'users_get'
-    msg['registered_only'] = registered_only
-    dump_msg = json.dumps(msg)
-    logging.debug('users_get dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_IMS, dump_msg)
-    _, loaded_body = self.remote.verify_response('users_get', head, body)
-    return loaded_body
-
-  def get_impu(self, impi) -> Union[str, None]:
-    """Obtains the IMPU of the target user according to IMPI.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call. ex:
-        "310260123456785@ims.mnc260.mcc310.3gppnetwork.org"
-
-    Returns:
-      The IMPU of target user.
-    """
-    body = self.users_get(True)
-    for index in range(len(body['users'])):
-      if impi in body['users'][index]['impi']:
-        impu = body['users'][index]['bindings'][0]['impu'][1]
-        return impu
-    return None
-
-  def get_uri(self, impi) -> Union[str, None]:
-    """Obtains the URI of the target user according to IMPI.
-
-    Args:
-      impi: IMPI (IP Multimedia Private identity) of user to call. ex:
-        "310260123456785@ims.mnc260.mcc310.3gppnetwork.org"
-
-    Returns:
-      The URI of target user.
-    """
-    body = self.users_get(True)
-    for index in range(len(body['users'])):
-      if impi in body['users'][index]['impi']:
-        uri = body['users'][index]['bindings'][0]['uri']
-        return uri
-    return None
diff --git a/src/antlion/controllers/amarisoft_lib/mme.py b/src/antlion/controllers/amarisoft_lib/mme.py
deleted file mode 100644
index 6f7ee42..0000000
--- a/src/antlion/controllers/amarisoft_lib/mme.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-
-from antlion.controllers.amarisoft_lib import amarisoft_constants as const
-from antlion.controllers.amarisoft_lib import amarisoft_client
-
-
-class MmeFunctions():
-  """Utilities for Amarisoft's MME Remote API.
-
-  Attributes:
-    remote: An amarisoft client.
-  """
-
-  def __init__(self, remote: amarisoft_client.AmariSoftClient):
-    self.remote = remote
-
-  def pws_write(self, local_id: str, n50: bool = False):
-    """Broadcasts emergency alert message.
-
-    Args:
-      local_id: ID of the message as defined by local identifier in MME
-        configuration file.
-      n50: If True, N50 interface is used, otherwise SBC interface is used. (see TS 23.041)
-    """
-    msg = {}
-    msg['message'] = 'pws_write'
-    msg['local_id'] = local_id
-    msg['nf'] = n50
-    dump_msg = json.dumps(msg)
-    logging.debug('pws_write dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('pws_write', head, body)
-
-  def pws_kill(self, local_id: str, n50: bool = False):
-    """Stops broadcasts emergency alert message.
-
-    Args:
-      local_id: ID of the message as defined by local identifier in MME
-        configuration file.
-      n50: If True, N50 interface is used, otherwise SBC interface is used. (see TS 23.041)
-    """
-    msg = {}
-    msg['message'] = 'pws_kill'
-    msg['local_id'] = local_id
-    msg['nf'] = n50
-    dump_msg = json.dumps(msg)
-    logging.debug('pws_kill dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('pws_kill', head, body)
-
-  def ue_del(self, imsi: str):
-    """Remove UE from the UE database and force disconnect if necessary.
-
-    Args:
-      imsi: IMSI of the UE to delete.
-    """
-    msg = {}
-    msg['message'] = 'ue_del'
-    msg['imsi'] = imsi
-    dump_msg = json.dumps(msg)
-    logging.debug('ue_del dump msg = %s', dump_msg)
-    head, body = self.remote.send_message(const.PortNumber.URI_MME, dump_msg)
-    self.remote.verify_response('ue_del', head, body)
diff --git a/src/antlion/controllers/amarisoft_lib/ssh_utils.py b/src/antlion/controllers/amarisoft_lib/ssh_utils.py
deleted file mode 100644
index fccc1d7..0000000
--- a/src/antlion/controllers/amarisoft_lib/ssh_utils.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from typing import Sequence
-
-import paramiko
-
-COMMAND_RETRY_TIMES = 3
-
-
-class RunCommandError(Exception):
-  """Raises an error when run command fail."""
-
-
-class NotConnectedError(Exception):
-  """Raises an error when run command without SSH connect."""
-
-
-class RemoteClient:
-  """The SSH client class interacts with the test machine.
-
-  Attributes:
-    host: A string representing the IP address of amarisoft.
-    port: A string representing the default port of SSH.
-    username: A string representing the username of amarisoft.
-    password: A string representing the password of amarisoft.
-    ssh: A SSH client.
-    sftp: A SFTP client.
-  """
-
-  def __init__(self,
-               host: str,
-               username: str,
-               password: str,
-               port: str = '22') -> None:
-    self.host = host
-    self.port = port
-    self.username = username
-    self.password = password
-    self.ssh = paramiko.SSHClient()
-    self.sftp = None
-
-  def ssh_is_connected(self) -> bool:
-    """Checks SSH connect or not.
-
-    Returns:
-      True if SSH is connected, False otherwise.
-    """
-    return self.ssh and self.ssh.get_transport().is_active()
-
-  def ssh_close(self) -> bool:
-    """Closes the SSH connection.
-
-    Returns:
-      True if ssh session closed, False otherwise.
-    """
-    for _ in range(COMMAND_RETRY_TIMES):
-      if self.ssh_is_connected():
-        self.ssh.close()
-      else:
-        return True
-    return False
-
-  def connect(self) -> bool:
-    """Creats SSH connection.
-
-    Returns:
-      True if success, False otherwise.
-    """
-    for _ in range(COMMAND_RETRY_TIMES):
-      try:
-        self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-        self.ssh.connect(self.host, self.port, self.username, self.password)
-        self.ssh.get_transport().set_keepalive(1)
-        self.sftp = paramiko.SFTPClient.from_transport(self.ssh.get_transport())
-        return True
-      except Exception:  # pylint: disable=broad-except
-        self.ssh_close()
-    return False
-
-  def run_cmd(self, cmd: str) -> Sequence[str]:
-    """Runs shell command.
-
-    Args:
-      cmd: Command to be executed.
-
-    Returns:
-      Standard output of the shell command.
-
-    Raises:
-       RunCommandError: Raise error when command failed.
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-
-    logging.debug('ssh remote -> %s', cmd)
-    _, stdout, stderr = self.ssh.exec_command(cmd)
-    err = stderr.readlines()
-    if err:
-      logging.error('command failed.')
-      raise RunCommandError(err)
-    return stdout.readlines()
-
-  def is_file_exist(self, file: str) -> bool:
-    """Checks target file exist.
-
-    Args:
-        file: Target file with absolute path.
-
-    Returns:
-        True if file exist, false otherwise.
-    """
-    return any('exist' in line for line in self.run_cmd(
-        f'if [ -f "{file}" ]; then echo -e "exist"; fi'))
-
-  def sftp_upload(self, src: str, dst: str) -> bool:
-    """Uploads a local file to remote side.
-
-    Args:
-      src: The target file with absolute path.
-      dst: The absolute path to put the file with file name.
-      For example:
-        upload('/usr/local/google/home/zoeyliu/Desktop/sample_config.yml',
-        '/root/sample_config.yml')
-
-    Returns:
-      True if file upload success, False otherwise.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-
-    logging.info('[local] %s -> [remote] %s', src, dst)
-    self.sftp.put(src, dst)
-    return self.is_file_exist(dst)
-
-  def sftp_download(self, src: str, dst: str) -> bool:
-    """Downloads a file to local.
-
-    Args:
-      src: The target file with absolute path.
-      dst: The absolute path to put the file.
-
-    Returns:
-      True if file download success, False otherwise.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-
-    logging.info('[remote] %s -> [local] %s', src, dst)
-    self.sftp.get(src, dst)
-    return self.is_file_exist(dst)
-
-  def sftp_list_dir(self, path: str) -> Sequence[str]:
-    """Lists the names of the entries in the given path.
-
-    Args:
-      path: The path of the list.
-
-    Returns:
-      The names of the entries in the given path.
-
-    Raises:
-       NotConnectedError: Raised when run command without SSH connect.
-    """
-    if not self.ssh_is_connected():
-      raise NotConnectedError('ssh remote has not been established')
-    if not self.sftp:
-      raise NotConnectedError('sftp remote has not been established')
-    return sorted(self.sftp.listdir(path))
-
diff --git a/src/antlion/controllers/android_device.py b/src/antlion/controllers/android_device.py
deleted file mode 100755
index 1668d82..0000000
--- a/src/antlion/controllers/android_device.py
+++ /dev/null
@@ -1,1778 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import logging
-import math
-import os
-import re
-import shutil
-import socket
-import time
-from datetime import datetime
-
-from antlion import context
-from antlion import logger as acts_logger
-from antlion import tracelogger
-from antlion import utils
-from antlion.controllers import adb
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers import fastboot
-from antlion.controllers.android_lib import errors
-from antlion.controllers.android_lib import events as android_events
-from antlion.controllers.android_lib import logcat
-from antlion.controllers.android_lib import services
-from antlion.controllers.sl4a_lib import sl4a_manager
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.event import event_bus
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = "AndroidDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "android_devices"
-
-ANDROID_DEVICE_PICK_ALL_TOKEN = "*"
-# Key name for SL4A extra params in config file
-ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY = "sl4a_client_port"
-ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY = "sl4a_forwarded_port"
-ANDROID_DEVICE_SL4A_SERVER_PORT_KEY = "sl4a_server_port"
-# Key name for adb logcat extra params in config file.
-ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY = "adb_logcat_param"
-ANDROID_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-ANDROID_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-CRASH_REPORT_PATHS = ("/data/tombstones/", "/data/vendor/ramdump/",
-                      "/data/ramdump/", "/data/vendor/ssrdump",
-                      "/data/vendor/ramdump/bluetooth", "/data/vendor/log/cbd")
-CRASH_REPORT_SKIPS = ("RAMDUMP_RESERVED", "RAMDUMP_STATUS", "RAMDUMP_OUTPUT",
-                      "bluetooth")
-ALWAYS_ON_LOG_PATH = "/data/vendor/radio/logs/always-on"
-DEFAULT_QXDM_LOG_PATH = "/data/vendor/radio/diag_logs"
-DEFAULT_SDM_LOG_PATH = "/data/vendor/slog/"
-DEFAULT_SCREENSHOT_PATH = "/sdcard/Pictures/screencap"
-BUG_REPORT_TIMEOUT = 1800
-PULL_TIMEOUT = 300
-PORT_RETRY_COUNT = 3
-ADB_ROOT_RETRY_COUNT = 2
-ADB_ROOT_RETRY_INTERVAL = 10
-IPERF_TIMEOUT = 60
-SL4A_APK_NAME = "com.googlecode.android_scripting"
-WAIT_FOR_DEVICE_TIMEOUT = 180
-ENCRYPTION_WINDOW = "CryptKeeper"
-DEFAULT_DEVICE_PASSWORD = "1111"
-RELEASE_ID_REGEXES = [re.compile(r'\w+\.\d+\.\d+'), re.compile(r'N\w+')]
-
-
-def create(configs):
-    """Creates AndroidDevice controller objects.
-
-    Args:
-        configs: A list of dicts, each representing a configuration for an
-                 Android device.
-
-    Returns:
-        A list of AndroidDevice objects.
-    """
-    if not configs:
-        raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_EMPTY_CONFIG_MSG)
-    elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
-        ads = get_all_instances()
-    elif not isinstance(configs, list):
-        raise errors.AndroidDeviceConfigError(
-            ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
-    elif isinstance(configs[0], str):
-        # Configs is a list of serials.
-        ads = get_instances(configs)
-    else:
-        # Configs is a list of dicts.
-        ads = get_instances_with_configs(configs)
-
-    ads[0].log.info('The primary device under test is "%s".' % ads[0].serial)
-
-    for ad in ads:
-        if not ad.is_connected():
-            raise errors.AndroidDeviceError(
-                ("Android device %s is specified in config"
-                 " but is not attached.") % ad.serial,
-                serial=ad.serial)
-    _start_services_on_ads(ads)
-    for ad in ads:
-        if ad.droid:
-            utils.set_location_service(ad, False)
-            utils.sync_device_time(ad)
-    return ads
-
-
-def destroy(ads):
-    """Cleans up AndroidDevice objects.
-
-    Args:
-        ads: A list of AndroidDevice objects.
-    """
-    for ad in ads:
-        try:
-            ad.clean_up()
-        except:
-            ad.log.exception("Failed to clean up properly.")
-
-
-def get_info(ads):
-    """Get information on a list of AndroidDevice objects.
-
-    Args:
-        ads: A list of AndroidDevice objects.
-
-    Returns:
-        A list of dict, each representing info for an AndroidDevice objects.
-    """
-    device_info = []
-    for ad in ads:
-        info = {"serial": ad.serial, "model": ad.model}
-        info.update(ad.build_info)
-        device_info.append(info)
-    return device_info
-
-
-def _start_services_on_ads(ads):
-    """Starts long running services on multiple AndroidDevice objects.
-
-    If any one AndroidDevice object fails to start services, cleans up all
-    existing AndroidDevice objects and their services.
-
-    Args:
-        ads: A list of AndroidDevice objects whose services to start.
-    """
-    running_ads = []
-    for ad in ads:
-        running_ads.append(ad)
-        try:
-            ad.start_services()
-        except:
-            ad.log.exception('Failed to start some services, abort!')
-            destroy(running_ads)
-            raise
-
-
-def _parse_device_list(device_list_str, key):
-    """Parses a byte string representing a list of devices. The string is
-    generated by calling either adb or fastboot.
-
-    Args:
-        device_list_str: Output of adb or fastboot.
-        key: The token that signifies a device in device_list_str.
-
-    Returns:
-        A list of android device serial numbers.
-    """
-    return re.findall(r"(\S+)\t%s" % key, device_list_str)
-
-
-def list_adb_devices():
-    """List all android devices connected to the computer that are detected by
-    adb.
-
-    Returns:
-        A list of android device serials. Empty if there's none.
-    """
-    out = adb.AdbProxy().devices()
-    return _parse_device_list(out, "device")
-
-
-def list_fastboot_devices():
-    """List all android devices connected to the computer that are in in
-    fastboot mode. These are detected by fastboot.
-
-    Returns:
-        A list of android device serials. Empty if there's none.
-    """
-    out = fastboot.FastbootProxy().devices()
-    return _parse_device_list(out, "fastboot")
-
-
-def get_instances(serials):
-    """Create AndroidDevice instances from a list of serials.
-
-    Args:
-        serials: A list of android device serials.
-
-    Returns:
-        A list of AndroidDevice objects.
-    """
-    results = []
-    for s in serials:
-        results.append(AndroidDevice(s))
-    return results
-
-
-def get_instances_with_configs(configs):
-    """Create AndroidDevice instances from a list of json configs.
-
-    Each config should have the required key-value pair "serial".
-
-    Args:
-        configs: A list of dicts each representing the configuration of one
-            android device.
-
-    Returns:
-        A list of AndroidDevice objects.
-    """
-    results = []
-    for c in configs:
-        try:
-            serial = c.pop('serial')
-        except KeyError:
-            raise errors.AndroidDeviceConfigError(
-                "Required value 'serial' is missing in AndroidDevice config %s."
-                % c)
-        client_port = 0
-        if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c:
-            try:
-                client_port = int(c.pop(ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY))
-            except ValueError:
-                raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY, c))
-        server_port = None
-        if ANDROID_DEVICE_SL4A_SERVER_PORT_KEY in c:
-            try:
-                server_port = int(c.pop(ANDROID_DEVICE_SL4A_SERVER_PORT_KEY))
-            except ValueError:
-                raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_SERVER_PORT_KEY, c))
-        forwarded_port = 0
-        if ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY in c:
-            try:
-                forwarded_port = int(
-                    c.pop(ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY))
-            except ValueError:
-                raise errors.AndroidDeviceConfigError(
-                    "'%s' is not a valid number for config %s" %
-                    (ANDROID_DEVICE_SL4A_FORWARDED_PORT_KEY, c))
-        ssh_config = c.pop('ssh_config', None)
-        ssh_connection = None
-        if ssh_config is not None:
-            ssh_settings = settings.from_config(ssh_config)
-            ssh_connection = connection.SshConnection(ssh_settings)
-        ad = AndroidDevice(serial,
-                           ssh_connection=ssh_connection,
-                           client_port=client_port,
-                           forwarded_port=forwarded_port,
-                           server_port=server_port)
-        ad.load_config(c)
-        results.append(ad)
-    return results
-
-
-def get_all_instances(include_fastboot=False):
-    """Create AndroidDevice instances for all attached android devices.
-
-    Args:
-        include_fastboot: Whether to include devices in bootloader mode or not.
-
-    Returns:
-        A list of AndroidDevice objects each representing an android device
-        attached to the computer.
-    """
-    if include_fastboot:
-        serial_list = list_adb_devices() + list_fastboot_devices()
-        return get_instances(serial_list)
-    return get_instances(list_adb_devices())
-
-
-def filter_devices(ads, func):
-    """Finds the AndroidDevice instances from a list that match certain
-    conditions.
-
-    Args:
-        ads: A list of AndroidDevice instances.
-        func: A function that takes an AndroidDevice object and returns True
-            if the device satisfies the filter condition.
-
-    Returns:
-        A list of AndroidDevice instances that satisfy the filter condition.
-    """
-    results = []
-    for ad in ads:
-        if func(ad):
-            results.append(ad)
-    return results
-
-
-def get_device(ads, **kwargs):
-    """Finds a unique AndroidDevice instance from a list that has specific
-    attributes of certain values.
-
-    Example:
-        get_device(android_devices, label="foo", phone_number="1234567890")
-        get_device(android_devices, model="angler")
-
-    Args:
-        ads: A list of AndroidDevice instances.
-        kwargs: keyword arguments used to filter AndroidDevice instances.
-
-    Returns:
-        The target AndroidDevice instance.
-
-    Raises:
-        AndroidDeviceError is raised if none or more than one device is
-        matched.
-    """
-
-    def _get_device_filter(ad):
-        for k, v in kwargs.items():
-            if not hasattr(ad, k):
-                return False
-            elif getattr(ad, k) != v:
-                return False
-        return True
-
-    filtered = filter_devices(ads, _get_device_filter)
-    if not filtered:
-        raise ValueError(
-            "Could not find a target device that matches condition: %s." %
-            kwargs)
-    elif len(filtered) == 1:
-        return filtered[0]
-    else:
-        serials = [ad.serial for ad in filtered]
-        raise ValueError("More than one device matched: %s" % serials)
-
-
-def take_bug_reports(ads, test_name, begin_time):
-    """Takes bug reports on a list of android devices.
-
-    If you want to take a bug report, call this function with a list of
-    android_device objects in on_fail. But reports will be taken on all the
-    devices in the list concurrently. Bug report takes a relative long
-    time to take, so use this cautiously.
-
-    Args:
-        ads: A list of AndroidDevice instances.
-        test_name: Name of the test case that triggered this bug report.
-        begin_time: Logline format timestamp taken when the test started.
-    """
-
-    def take_br(test_name, begin_time, ad):
-        ad.take_bug_report(test_name, begin_time)
-
-    args = [(test_name, begin_time, ad) for ad in ads]
-    utils.concurrent_exec(take_br, args)
-
-
-class AndroidDevice:
-    """Class representing an android device.
-
-    Each object of this class represents one Android device in ACTS, including
-    handles to adb, fastboot, and sl4a clients. In addition to direct adb
-    commands, this object also uses adb port forwarding to talk to the Android
-    device.
-
-    Attributes:
-        serial: A string that's the serial number of the Android device.
-        log_path: A string that is the path where all logs collected on this
-                  android device should be stored.
-        log: A logger adapted from root logger with added token specific to an
-             AndroidDevice instance.
-        adb_logcat_process: A process that collects the adb logcat.
-        adb: An AdbProxy object used for interacting with the device via adb.
-        fastboot: A FastbootProxy object used for interacting with the device
-                  via fastboot.
-        client_port: Preferred client port number on the PC host side for SL4A
-        forwarded_port: Preferred server port number forwarded from Android
-                        to the host PC via adb for SL4A connections
-        server_port: Preferred server port used by SL4A on Android device
-
-    """
-
-    def __init__(self,
-                 serial='',
-                 ssh_connection=None,
-                 client_port=0,
-                 forwarded_port=0,
-                 server_port=None):
-        self.serial = serial
-        # logging.log_path only exists when this is used in an ACTS test run.
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_dir = 'AndroidDevice%s' % serial
-        self.log_path = os.path.join(log_path_base, self.log_dir)
-        self.client_port = client_port
-        self.forwarded_port = forwarded_port
-        self.server_port = server_port
-        self.log = tracelogger.TraceLogger(
-            AndroidDeviceLoggerAdapter(logging.getLogger(),
-                                       {'serial': serial}))
-        self._event_dispatchers = {}
-        self._services = []
-        self.register_service(services.AdbLogcatService(self))
-        self.register_service(services.Sl4aService(self))
-        self.adb_logcat_process = None
-        self.adb = adb.AdbProxy(serial, ssh_connection=ssh_connection)
-        self.fastboot = fastboot.FastbootProxy(serial,
-                                               ssh_connection=ssh_connection)
-        if not self.is_bootloader:
-            self.root_adb()
-        self._ssh_connection = ssh_connection
-        self.skip_sl4a = False
-        self.crash_report = None
-        self.data_accounting = collections.defaultdict(int)
-        self._sl4a_manager = sl4a_manager.create_sl4a_manager(self.adb)
-        self.last_logcat_timestamp = None
-        # Device info cache.
-        self._user_added_device_info = {}
-        self._sdk_api_level = None
-
-    def clean_up(self):
-        """Cleans up the AndroidDevice object and releases any resources it
-        claimed.
-        """
-        self.stop_services()
-        for service in self._services:
-            service.unregister()
-        self._services.clear()
-        if self._ssh_connection:
-            self._ssh_connection.close()
-
-    def recreate_services(self, serial):
-        """Clean up the AndroidDevice object and re-create adb/sl4a services.
-
-        Unregister the existing services and re-create adb and sl4a services,
-        call this method when the connection break after certain API call
-        (e.g., enable USB tethering by #startTethering)
-
-        Args:
-            serial: the serial number of the AndroidDevice
-        """
-        # Clean the old services
-        for service in self._services:
-            service.unregister()
-        self._services.clear()
-        if self._ssh_connection:
-            self._ssh_connection.close()
-        self._sl4a_manager.stop_service()
-
-        # Wait for old services to stop
-        time.sleep(5)
-
-        # Re-create the new adb and sl4a services
-        self.register_service(services.AdbLogcatService(self))
-        self.register_service(services.Sl4aService(self))
-        self.adb.wait_for_device()
-        self.terminate_all_sessions()
-        self.start_services()
-
-    def register_service(self, service):
-        """Registers the service on the device. """
-        service.register()
-        self._services.append(service)
-
-    # TODO(angli): This function shall be refactored to accommodate all services
-    # and not have hard coded switch for SL4A when b/29157104 is done.
-    def start_services(self, skip_setup_wizard=True):
-        """Starts long running services on the android device.
-
-        1. Start adb logcat capture.
-        2. Start SL4A if not skipped.
-
-        Args:
-            skip_setup_wizard: Whether or not to skip the setup wizard.
-        """
-        if skip_setup_wizard:
-            self.exit_setup_wizard()
-
-        event_bus.post(android_events.AndroidStartServicesEvent(self))
-
-    def stop_services(self):
-        """Stops long running services on the android device.
-
-        Stop adb logcat and terminate sl4a sessions if exist.
-        """
-        event_bus.post(android_events.AndroidStopServicesEvent(self),
-                       ignore_errors=True)
-
-    def is_connected(self):
-        out = self.adb.devices()
-        devices = _parse_device_list(out, "device")
-        return self.serial in devices
-
-    @property
-    def build_info(self):
-        """Get the build info of this Android device, including build id and
-        build type.
-
-        This is not available if the device is in bootloader mode.
-
-        Returns:
-            A dict with the build info of this Android device, or None if the
-            device is in bootloader mode.
-        """
-        if self.is_bootloader:
-            self.log.error("Device is in fastboot mode, could not get build "
-                           "info.")
-            return
-
-        build_id = self.adb.getprop("ro.build.id")
-        incremental_build_id = self.adb.getprop("ro.build.version.incremental")
-        valid_build_id = False
-        for regex in RELEASE_ID_REGEXES:
-            if re.match(regex, build_id):
-                valid_build_id = True
-                break
-        if not valid_build_id:
-            build_id = incremental_build_id
-
-        info = {
-            "build_id": build_id,
-            "incremental_build_id": incremental_build_id,
-            "build_type": self.adb.getprop("ro.build.type")
-        }
-        return info
-
-    @property
-    def device_info(self):
-        """Information to be pulled into controller info.
-
-        The latest serial, model, and build_info are included. Additional info
-        can be added via `add_device_info`.
-        """
-        info = {
-            'serial': self.serial,
-            'model': self.model,
-            'build_info': self.build_info,
-            'user_added_info': self._user_added_device_info,
-            'flavor': self.flavor
-        }
-        return info
-
-    def add_device_info(self, name, info):
-        """Add custom device info to the user_added_info section.
-
-        Adding the same info name the second time will override existing info.
-
-        Args:
-          name: string, name of this info.
-          info: serializable, content of the info.
-        """
-        self._user_added_device_info.update({name: info})
-
-    def sdk_api_level(self):
-        if self._sdk_api_level is not None:
-            return self._sdk_api_level
-        if self.is_bootloader:
-            self.log.error(
-                'Device is in fastboot mode. Cannot get build info.')
-            return
-        self._sdk_api_level = int(
-            self.adb.shell('getprop ro.build.version.sdk'))
-        return self._sdk_api_level
-
-    @property
-    def is_bootloader(self):
-        """True if the device is in bootloader mode.
-        """
-        return self.serial in list_fastboot_devices()
-
-    @property
-    def is_adb_root(self):
-        """True if adb is running as root for this device.
-        """
-        try:
-            return "0" == self.adb.shell("id -u")
-        except AdbError:
-            # Wait a bit and retry to work around adb flakiness for this cmd.
-            time.sleep(0.2)
-            return "0" == self.adb.shell("id -u")
-
-    @property
-    def model(self):
-        """The Android code name for the device."""
-        # If device is in bootloader mode, get mode name from fastboot.
-        if self.is_bootloader:
-            out = self.fastboot.getvar("product").strip()
-            # "out" is never empty because of the "total time" message fastboot
-            # writes to stderr.
-            lines = out.split('\n', 1)
-            if lines:
-                tokens = lines[0].split(' ')
-                if len(tokens) > 1:
-                    return tokens[1].lower()
-            return None
-        model = self.adb.getprop("ro.build.product").lower()
-        if model == "sprout":
-            return model
-        else:
-            return self.adb.getprop("ro.product.name").lower()
-
-    @property
-    def flavor(self):
-        """Returns the specific flavor of Android build the device is using."""
-        return self.adb.getprop("ro.build.flavor").lower()
-
-    @property
-    def droid(self):
-        """Returns the RPC Service of the first Sl4aSession created."""
-        if len(self._sl4a_manager.sessions) > 0:
-            session_id = sorted(self._sl4a_manager.sessions.keys())[0]
-            return self._sl4a_manager.sessions[session_id].rpc_client
-        else:
-            return None
-
-    @property
-    def ed(self):
-        """Returns the event dispatcher of the first Sl4aSession created."""
-        if len(self._sl4a_manager.sessions) > 0:
-            session_id = sorted(self._sl4a_manager.sessions.keys())[0]
-            return self._sl4a_manager.sessions[
-                session_id].get_event_dispatcher()
-        else:
-            return None
-
-    @property
-    def sl4a_sessions(self):
-        """Returns a dictionary of session ids to sessions."""
-        return list(self._sl4a_manager.sessions)
-
-    @property
-    def is_adb_logcat_on(self):
-        """Whether there is an ongoing adb logcat collection.
-        """
-        if self.adb_logcat_process:
-            if self.adb_logcat_process.is_running():
-                return True
-            else:
-                # if skip_sl4a is true, there is no sl4a session
-                # if logcat died due to device reboot and sl4a session has
-                # not restarted there is no droid.
-                if self.droid:
-                    self.droid.logI('Logcat died')
-                self.log.info("Logcat to %s died", self.log_path)
-                return False
-        return False
-
-    @property
-    def device_log_path(self):
-        """Returns the directory for all Android device logs for the current
-        test context and serial.
-        """
-        return context.get_current_context().get_full_output_path(self.serial)
-
-    def update_sdk_api_level(self):
-        self._sdk_api_level = None
-        self.sdk_api_level()
-
-    def load_config(self, config):
-        """Add attributes to the AndroidDevice object based on json config.
-
-        Args:
-            config: A dictionary representing the configs.
-
-        Raises:
-            AndroidDeviceError is raised if the config is trying to overwrite
-            an existing attribute.
-        """
-        for k, v in config.items():
-            # skip_sl4a value can be reset from config file
-            if hasattr(self, k) and k != "skip_sl4a":
-                raise errors.AndroidDeviceError(
-                    "Attempting to set existing attribute %s on %s" %
-                    (k, self.serial),
-                    serial=self.serial)
-            setattr(self, k, v)
-
-    def root_adb(self):
-        """Change adb to root mode for this device if allowed.
-
-        If executed on a production build, adb will not be switched to root
-        mode per security restrictions.
-        """
-        if self.is_adb_root:
-            return
-
-        for attempt in range(ADB_ROOT_RETRY_COUNT):
-            try:
-                self.log.debug('Enabling ADB root mode: attempt %d.' % attempt)
-                self.adb.root()
-            except AdbError:
-                if attempt == ADB_ROOT_RETRY_COUNT:
-                    raise
-                time.sleep(ADB_ROOT_RETRY_INTERVAL)
-        self.adb.wait_for_device()
-
-    def get_droid(self, handle_event=True):
-        """Create an sl4a connection to the device.
-
-        Return the connection handler 'droid'. By default, another connection
-        on the same session is made for EventDispatcher, and the dispatcher is
-        returned to the caller as well.
-        If sl4a server is not started on the device, try to start it.
-
-        Args:
-            handle_event: True if this droid session will need to handle
-                events.
-
-        Returns:
-            droid: Android object used to communicate with sl4a on the android
-                device.
-            ed: An optional EventDispatcher to organize events for this droid.
-
-        Examples:
-            Don't need event handling:
-            >>> ad = AndroidDevice()
-            >>> droid = ad.get_droid(False)
-
-            Need event handling:
-            >>> ad = AndroidDevice()
-            >>> droid, ed = ad.get_droid()
-        """
-        self.log.debug(
-            "Creating RPC client_port={}, forwarded_port={}, server_port={}".
-            format(self.client_port, self.forwarded_port, self.server_port))
-        session = self._sl4a_manager.create_session(
-            client_port=self.client_port,
-            forwarded_port=self.forwarded_port,
-            server_port=self.server_port)
-        droid = session.rpc_client
-        if handle_event:
-            ed = session.get_event_dispatcher()
-            return droid, ed
-        return droid
-
-    def get_package_pid(self, package_name):
-        """Gets the pid for a given package. Returns None if not running.
-        Args:
-            package_name: The name of the package.
-        Returns:
-            The first pid found under a given package name. None if no process
-            was found running the package.
-        Raises:
-            AndroidDeviceError if the output of the phone's process list was
-            in an unexpected format.
-        """
-        for cmd in ("ps -A", "ps"):
-            try:
-                out = self.adb.shell('%s | grep "S %s"' % (cmd, package_name),
-                                     ignore_status=True)
-                if package_name not in out:
-                    continue
-                try:
-                    pid = int(out.split()[1])
-                    self.log.info('apk %s has pid %s.', package_name, pid)
-                    return pid
-                except (IndexError, ValueError) as e:
-                    # Possible ValueError from string to int cast.
-                    # Possible IndexError from split.
-                    self.log.warning(
-                        'Command \"%s\" returned output line: '
-                        '\"%s\".\nError: %s', cmd, out, e)
-            except Exception as e:
-                self.log.warning(
-                    'Device fails to check if %s running with \"%s\"\n'
-                    'Exception %s', package_name, cmd, e)
-        self.log.debug("apk %s is not running", package_name)
-        return None
-
-    def get_dispatcher(self, droid):
-        """Return an EventDispatcher for an sl4a session
-
-        Args:
-            droid: Session to create EventDispatcher for.
-
-        Returns:
-            ed: An EventDispatcher for specified session.
-        """
-        return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher()
-
-    def _is_timestamp_in_range(self, target, log_begin_time, log_end_time):
-        low = acts_logger.logline_timestamp_comparator(log_begin_time,
-                                                       target) <= 0
-        high = acts_logger.logline_timestamp_comparator(log_end_time,
-                                                        target) >= 0
-        return low and high
-
-    def cat_adb_log(self,
-                    tag,
-                    begin_time,
-                    end_time=None,
-                    dest_path="AdbLogExcerpts"):
-        """Takes an excerpt of the adb logcat log from a certain time point to
-        current time.
-
-        Args:
-            tag: An identifier of the time period, usually the name of a test.
-            begin_time: Epoch time of the beginning of the time period.
-            end_time: Epoch time of the ending of the time period, default None
-            dest_path: Destination path of the excerpt file.
-        """
-        log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
-        if end_time is None:
-            log_end_time = acts_logger.get_log_line_timestamp()
-        else:
-            log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
-        self.log.debug("Extracting adb log from logcat.")
-        logcat_path = os.path.join(self.device_log_path,
-                                   'adblog_%s_debug.txt' % self.serial)
-        if not os.path.exists(logcat_path):
-            self.log.warning("Logcat file %s does not exist." % logcat_path)
-            return
-        adb_excerpt_dir = os.path.join(self.log_path, dest_path)
-        os.makedirs(adb_excerpt_dir, exist_ok=True)
-        out_name = '%s,%s.txt' % (acts_logger.normalize_log_line_timestamp(
-            log_begin_time), self.serial)
-        tag_len = utils.MAX_FILENAME_LEN - len(out_name)
-        out_name = '%s,%s' % (tag[:tag_len], out_name)
-        adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name)
-        with open(adb_excerpt_path, 'w', encoding='utf-8') as out:
-            in_file = logcat_path
-            with open(in_file, 'r', encoding='utf-8', errors='replace') as f:
-                while True:
-                    line = None
-                    try:
-                        line = f.readline()
-                        if not line:
-                            break
-                    except:
-                        continue
-                    line_time = line[:acts_logger.log_line_timestamp_len]
-                    if not acts_logger.is_valid_logline_timestamp(line_time):
-                        continue
-                    if self._is_timestamp_in_range(line_time, log_begin_time,
-                                                   log_end_time):
-                        if not line.endswith('\n'):
-                            line += '\n'
-                        out.write(line)
-        return adb_excerpt_path
-
-    def search_logcat(self,
-                      matching_string,
-                      begin_time=None,
-                      end_time=None,
-                      logcat_path=None):
-        """Search logcat message with given string.
-
-        Args:
-            matching_string: matching_string to search.
-            begin_time: only the lines with time stamps later than begin_time
-                will be searched.
-            end_time: only the lines with time stamps earlier than end_time
-                will be searched.
-            logcat_path: the path of a specific file in which the search should
-                be performed. If None the path will be the default device log
-                path.
-
-        Returns:
-            A list of dictionaries with full log message, time stamp string,
-            time object and message ID. For example:
-            [{"log_message": "05-03 17:39:29.898   968  1001 D"
-                              "ActivityManager: Sending BOOT_COMPLETE user #0",
-              "time_stamp": "2017-05-03 17:39:29.898",
-              "datetime_obj": datetime object,
-              "message_id": None}]
-
-            [{"log_message": "08-12 14:26:42.611043  2360  2510 D RILJ    : "
-                             "[0853]< DEACTIVATE_DATA_CALL  [PHONE0]",
-              "time_stamp": "2020-08-12 14:26:42.611043",
-              "datetime_obj": datetime object},
-              "message_id": "0853"}]
-        """
-        if not logcat_path:
-            logcat_path = os.path.join(self.device_log_path,
-                                       'adblog_%s_debug.txt' % self.serial)
-        if not os.path.exists(logcat_path):
-            self.log.warning("Logcat file %s does not exist." % logcat_path)
-            return
-        output = job.run("grep '%s' %s" % (matching_string, logcat_path),
-                         ignore_status=True)
-        if not output.stdout or output.exit_status != 0:
-            return []
-        if begin_time:
-            if not isinstance(begin_time, datetime):
-                log_begin_time = acts_logger.epoch_to_log_line_timestamp(
-                    begin_time)
-                begin_time = datetime.strptime(log_begin_time,
-                                               "%Y-%m-%d %H:%M:%S.%f")
-        if end_time:
-            if not isinstance(end_time, datetime):
-                log_end_time = acts_logger.epoch_to_log_line_timestamp(
-                    end_time)
-                end_time = datetime.strptime(log_end_time,
-                                             "%Y-%m-%d %H:%M:%S.%f")
-        result = []
-        logs = re.findall(r'(\S+\s\S+)(.*)', output.stdout)
-        for log in logs:
-            time_stamp = log[0]
-            time_obj = datetime.strptime(time_stamp, "%Y-%m-%d %H:%M:%S.%f")
-
-            if begin_time and time_obj < begin_time:
-                continue
-
-            if end_time and time_obj > end_time:
-                continue
-
-            res = re.findall(r'.*\[(\d+)\]', log[1])
-            try:
-                message_id = res[0]
-            except:
-                message_id = None
-
-            result.append({
-                "log_message": "".join(log),
-                "time_stamp": time_stamp,
-                "datetime_obj": time_obj,
-                "message_id": message_id
-            })
-        return result
-
-    def start_adb_logcat(self):
-        """Starts a standing adb logcat collection in separate subprocesses and
-        save the logcat in a file.
-        """
-        if self.is_adb_logcat_on:
-            self.log.warning(
-                'Android device %s already has a running adb logcat thread. ' %
-                self.serial)
-            return
-        # Disable adb log spam filter. Have to stop and clear settings first
-        # because 'start' doesn't support --clear option before Android N.
-        self.adb.shell("logpersist.stop --clear", ignore_status=True)
-        self.adb.shell("logpersist.start", ignore_status=True)
-        if hasattr(self, 'adb_logcat_param'):
-            extra_params = self.adb_logcat_param
-        else:
-            extra_params = "-b all"
-
-        self.adb_logcat_process = logcat.create_logcat_keepalive_process(
-            self.serial, self.log_dir, extra_params)
-        self.adb_logcat_process.start()
-
-    def stop_adb_logcat(self):
-        """Stops the adb logcat collection subprocess.
-        """
-        if not self.is_adb_logcat_on:
-            self.log.warning(
-                'Android device %s does not have an ongoing adb logcat ' %
-                self.serial)
-            return
-        # Set the last timestamp to the current timestamp. This may cause
-        # a race condition that allows the same line to be logged twice,
-        # but it does not pose a problem for our logging purposes.
-        self.adb_logcat_process.stop()
-        self.adb_logcat_process = None
-
-    def get_apk_uid(self, apk_name):
-        """Get the uid of the given apk.
-
-        Args:
-        apk_name: Name of the package, e.g., com.android.phone.
-
-        Returns:
-        Linux UID for the apk.
-        """
-        output = self.adb.shell("dumpsys package %s | grep userId=" % apk_name,
-                                ignore_status=True)
-        result = re.search(r"userId=(\d+)", output)
-        if result:
-            return result.group(1)
-        else:
-            None
-
-    def get_apk_version(self, package_name):
-        """Get the version of the given apk.
-
-        Args:
-            package_name: Name of the package, e.g., com.android.phone.
-
-        Returns:
-            Version of the given apk.
-        """
-        try:
-            output = self.adb.shell("dumpsys package %s | grep versionName" %
-                                    package_name)
-            pattern = re.compile(r"versionName=(.+)", re.I)
-            result = pattern.findall(output)
-            if result:
-                return result[0]
-        except Exception as e:
-            self.log.warning("Fail to get the version of package %s: %s",
-                             package_name, e)
-        self.log.debug("apk %s is not found", package_name)
-        return None
-
-    def is_apk_installed(self, package_name):
-        """Check if the given apk is already installed.
-
-        Args:
-        package_name: Name of the package, e.g., com.android.phone.
-
-        Returns:
-        True if package is installed. False otherwise.
-        """
-
-        try:
-            return bool(
-                self.adb.shell(
-                    '(pm list packages | grep -w "package:%s") || true' %
-                    package_name))
-
-        except Exception as err:
-            self.log.error(
-                'Could not determine if %s is installed. '
-                'Received error:\n%s', package_name, err)
-            return False
-
-    def is_sl4a_installed(self):
-        return self.is_apk_installed(SL4A_APK_NAME)
-
-    def is_apk_running(self, package_name):
-        """Check if the given apk is running.
-
-        Args:
-            package_name: Name of the package, e.g., com.android.phone.
-
-        Returns:
-        True if package is installed. False otherwise.
-        """
-        for cmd in ("ps -A", "ps"):
-            try:
-                out = self.adb.shell('%s | grep "S %s"' % (cmd, package_name),
-                                     ignore_status=True)
-                if package_name in out:
-                    self.log.info("apk %s is running", package_name)
-                    return True
-            except Exception as e:
-                self.log.warning(
-                    "Device fails to check is %s running by %s "
-                    "Exception %s", package_name, cmd, e)
-                continue
-        self.log.debug("apk %s is not running", package_name)
-        return False
-
-    def is_sl4a_running(self):
-        return self.is_apk_running(SL4A_APK_NAME)
-
-    def force_stop_apk(self, package_name):
-        """Force stop the given apk.
-
-        Args:
-        package_name: Name of the package, e.g., com.android.phone.
-
-        Returns:
-        True if package is installed. False otherwise.
-        """
-        try:
-            self.adb.shell('am force-stop %s' % package_name,
-                           ignore_status=True)
-        except Exception as e:
-            self.log.warning("Fail to stop package %s: %s", package_name, e)
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        """Takes a bug report on the device and stores it in a file.
-
-        Args:
-            test_name: Name of the test case that triggered this bug report.
-            begin_time: Epoch time when the test started. If none is specified,
-                the current time will be used.
-        """
-        self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
-        new_br = True
-        try:
-            stdout = self.adb.shell("bugreportz -v")
-            # This check is necessary for builds before N, where adb shell's ret
-            # code and stderr are not propagated properly.
-            if "not found" in stdout:
-                new_br = False
-        except AdbError:
-            new_br = False
-        br_path = self.device_log_path
-        os.makedirs(br_path, exist_ok=True)
-        epoch = begin_time if begin_time else utils.get_current_epoch_time()
-        time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch))
-        out_name = "AndroidDevice%s_%s" % (self.serial, time_stamp)
-        out_name = "%s.zip" % out_name if new_br else "%s.txt" % out_name
-        full_out_path = os.path.join(br_path, out_name)
-        # in case device restarted, wait for adb interface to return
-        self.wait_for_boot_completion()
-        if test_name:
-            self.log.info("Taking bugreport for %s.", test_name)
-        else:
-            self.log.info("Taking bugreport.")
-        if new_br:
-            out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
-            if not out.startswith("OK"):
-                raise errors.AndroidDeviceError(
-                    'Failed to take bugreport on %s: %s' % (self.serial, out),
-                    serial=self.serial)
-            br_out_path = out.split(':')[1].strip().split()[0]
-            self.adb.pull("%s %s" % (br_out_path, full_out_path))
-        else:
-            self.adb.bugreport(" > {}".format(full_out_path),
-                               timeout=BUG_REPORT_TIMEOUT)
-        if test_name:
-            self.log.info("Bugreport for %s taken at %s.", test_name,
-                          full_out_path)
-        else:
-            self.log.info("Bugreport taken at %s.", test_name, full_out_path)
-        self.adb.wait_for_device(timeout=WAIT_FOR_DEVICE_TIMEOUT)
-
-    def get_file_names(self,
-                       directory,
-                       begin_time=None,
-                       skip_files=[],
-                       match_string=None):
-        """Get files names with provided directory."""
-        cmd = "find %s -type f" % directory
-        if begin_time:
-            current_time = utils.get_current_epoch_time()
-            seconds = int(math.ceil((current_time - begin_time) / 1000.0))
-            cmd = "%s -mtime -%ss" % (cmd, seconds)
-        if match_string:
-            cmd = "%s -iname %s" % (cmd, match_string)
-        for skip_file in skip_files:
-            cmd = "%s ! -iname %s" % (cmd, skip_file)
-        out = self.adb.shell(cmd, ignore_status=True)
-        if not out or "No such" in out or "Permission denied" in out or \
-            "Not a directory" in out:
-            return []
-        files = out.split("\n")
-        self.log.debug("Find files in directory %s: %s", directory, files)
-        return files
-
-    @property
-    def external_storage_path(self):
-        """
-        The $EXTERNAL_STORAGE path on the device. Most commonly set to '/sdcard'
-        """
-        return self.adb.shell('echo $EXTERNAL_STORAGE')
-
-    def file_exists(self, file_path):
-        """Returns whether a file exists on a device.
-
-        Args:
-            file_path: The path of the file to check for.
-        """
-        cmd = '(test -f %s && echo yes) || echo no' % file_path
-        result = self.adb.shell(cmd)
-        if result == 'yes':
-            return True
-        elif result == 'no':
-            return False
-        raise ValueError('Couldn\'t determine if %s exists. '
-                         'Expected yes/no, got %s' % (file_path, result[cmd]))
-
-    def pull_files(self, device_paths, host_path=None):
-        """Pull files from devices.
-
-        Args:
-            device_paths: List of paths on the device to pull from.
-            host_path: Destination path
-        """
-        if isinstance(device_paths, str):
-            device_paths = [device_paths]
-        if not host_path:
-            host_path = self.log_path
-        for device_path in device_paths:
-            self.log.info('Pull from device: %s -> %s' %
-                          (device_path, host_path))
-            self.adb.pull("%s %s" % (device_path, host_path),
-                          timeout=PULL_TIMEOUT)
-
-    def check_crash_report(self,
-                           test_name=None,
-                           begin_time=None,
-                           log_crash_report=False):
-        """check crash report on the device."""
-        crash_reports = []
-        for crash_path in CRASH_REPORT_PATHS:
-            try:
-                cmd = 'cd %s' % crash_path
-                self.adb.shell(cmd)
-            except Exception as e:
-                self.log.debug("received exception %s", e)
-                continue
-            crashes = self.get_file_names(crash_path,
-                                          skip_files=CRASH_REPORT_SKIPS,
-                                          begin_time=begin_time)
-            if crash_path == "/data/tombstones/" and crashes:
-                tombstones = crashes[:]
-                for tombstone in tombstones:
-                    if self.adb.shell(
-                            'cat %s | grep "crash_dump failed to dump process"'
-                            % tombstone):
-                        crashes.remove(tombstone)
-            if crashes:
-                crash_reports.extend(crashes)
-        if crash_reports and log_crash_report:
-            crash_log_path = os.path.join(self.device_log_path,
-                                          "Crashes_%s" % self.serial)
-            os.makedirs(crash_log_path, exist_ok=True)
-            self.pull_files(crash_reports, crash_log_path)
-        return crash_reports
-
-    def get_qxdm_logs(self, test_name="", begin_time=None):
-        """Get qxdm logs."""
-        # Sleep 10 seconds for the buffered log to be written in qxdm log file
-        time.sleep(10)
-        log_path = getattr(self, "qxdm_log_path", DEFAULT_QXDM_LOG_PATH)
-        qxdm_logs = self.get_file_names(log_path,
-                                        begin_time=begin_time,
-                                        match_string="*.qmdl")
-        if qxdm_logs:
-            qxdm_log_path = os.path.join(self.device_log_path,
-                                         "QXDM_%s" % self.serial)
-            os.makedirs(qxdm_log_path, exist_ok=True)
-
-            self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path)
-            self.pull_files(qxdm_logs, qxdm_log_path)
-
-            self.adb.pull("/firmware/image/qdsp6m.qdb %s" % qxdm_log_path,
-                          timeout=PULL_TIMEOUT,
-                          ignore_status=True)
-            # Zip Folder
-            utils.zip_directory('%s.zip' % qxdm_log_path, qxdm_log_path)
-            shutil.rmtree(qxdm_log_path)
-        else:
-            self.log.error("Didn't find QXDM logs in %s." % log_path)
-        if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(self.device_log_path,
-                                          "OMADM_%s" % self.serial)
-            os.makedirs(omadm_log_path, exist_ok=True)
-            self.log.info("Pull OMADM Log")
-            self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s" %
-                omadm_log_path,
-                timeout=PULL_TIMEOUT,
-                ignore_status=True)
-
-    def get_sdm_logs(self, test_name="", begin_time=None):
-        """Get sdm logs."""
-        # Sleep 10 seconds for the buffered log to be written in sdm log file
-        time.sleep(10)
-        log_paths = [
-            ALWAYS_ON_LOG_PATH,
-            getattr(self, "sdm_log_path", DEFAULT_SDM_LOG_PATH)
-        ]
-        sdm_logs = []
-        for path in log_paths:
-            sdm_logs += self.get_file_names(path,
-                                            begin_time=begin_time,
-                                            match_string="*.sdm*")
-        if sdm_logs:
-            sdm_log_path = os.path.join(self.device_log_path,
-                                        "SDM_%s" % self.serial)
-            os.makedirs(sdm_log_path, exist_ok=True)
-            self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path)
-            self.pull_files(sdm_logs, sdm_log_path)
-        else:
-            self.log.error("Didn't find SDM logs in %s." % log_paths)
-        if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(self.device_log_path,
-                                          "OMADM_%s" % self.serial)
-            os.makedirs(omadm_log_path, exist_ok=True)
-            self.log.info("Pull OMADM Log")
-            self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s" %
-                omadm_log_path,
-                timeout=PULL_TIMEOUT,
-                ignore_status=True)
-
-    def start_new_session(self, max_connections=None, server_port=None):
-        """Start a new session in sl4a.
-
-        Also caches the droid in a dict with its uid being the key.
-
-        Returns:
-            An Android object used to communicate with sl4a on the android
-                device.
-
-        Raises:
-            Sl4aException: Something is wrong with sl4a and it returned an
-            existing uid to a new session.
-        """
-        session = self._sl4a_manager.create_session(
-            max_connections=max_connections, server_port=server_port)
-
-        self._sl4a_manager.sessions[session.uid] = session
-        return session.rpc_client
-
-    def terminate_all_sessions(self):
-        """Terminate all sl4a sessions on the AndroidDevice instance.
-
-        Terminate all sessions and clear caches.
-        """
-        self._sl4a_manager.terminate_all_sessions()
-
-    def run_iperf_client_nb(self,
-                            server_host,
-                            extra_args="",
-                            timeout=IPERF_TIMEOUT,
-                            log_file_path=None):
-        """Start iperf client on the device asynchronously.
-
-        Return status as true if iperf client start successfully.
-        And data flow information as results.
-
-        Args:
-            server_host: Address of the iperf server.
-            extra_args: A string representing extra arguments for iperf client,
-                e.g. "-i 1 -t 30".
-            log_file_path: The complete file path to log the results.
-
-        """
-        cmd = "iperf3 -c {} {}".format(server_host, extra_args)
-        if log_file_path:
-            cmd += " --logfile {} &".format(log_file_path)
-        self.adb.shell_nb(cmd)
-
-    def run_iperf_client(self,
-                         server_host,
-                         extra_args="",
-                         timeout=IPERF_TIMEOUT):
-        """Start iperf client on the device.
-
-        Return status as true if iperf client start successfully.
-        And data flow information as results.
-
-        Args:
-            server_host: Address of the iperf server.
-            extra_args: A string representing extra arguments for iperf client,
-                e.g. "-i 1 -t 30".
-
-        Returns:
-            status: true if iperf client start successfully.
-            results: results have data flow information
-        """
-        out = self.adb.shell("iperf3 -c {} {}".format(server_host, extra_args),
-                             timeout=timeout)
-        clean_out = out.split('\n')
-        if "error" in clean_out[0].lower():
-            return False, clean_out
-        return True, clean_out
-
-    def run_iperf_server(self, extra_args=""):
-        """Start iperf server on the device
-
-        Return status as true if iperf server started successfully.
-
-        Args:
-            extra_args: A string representing extra arguments for iperf server.
-
-        Returns:
-            status: true if iperf server started successfully.
-            results: results have output of command
-        """
-        out = self.adb.shell("iperf3 -s {}".format(extra_args))
-        clean_out = out.split('\n')
-        if "error" in clean_out[0].lower():
-            return False, clean_out
-        return True, clean_out
-
-    def wait_for_boot_completion(self, timeout=900.0):
-        """Waits for Android framework to broadcast ACTION_BOOT_COMPLETED.
-
-        Args:
-            timeout: Seconds to wait for the device to boot. Default value is
-            15 minutes.
-        """
-        timeout_start = time.time()
-
-        self.log.debug("ADB waiting for device")
-        self.adb.wait_for_device(timeout=timeout)
-        self.log.debug("Waiting for  sys.boot_completed")
-        while time.time() < timeout_start + timeout:
-            try:
-                completed = self.adb.getprop("sys.boot_completed")
-                if completed == '1':
-                    self.log.debug("Device has rebooted")
-                    return
-            except AdbError:
-                # adb shell calls may fail during certain period of booting
-                # process, which is normal. Ignoring these errors.
-                pass
-            time.sleep(5)
-        raise errors.AndroidDeviceError(
-            'Device %s booting process timed out.' % self.serial,
-            serial=self.serial)
-
-    def reboot(self,
-               stop_at_lock_screen=False,
-               timeout=180,
-               wait_after_reboot_complete=1):
-        """Reboots the device.
-
-        Terminate all sl4a sessions, reboot the device, wait for device to
-        complete booting, and restart an sl4a session if restart_sl4a is True.
-
-        Args:
-            stop_at_lock_screen: whether to unlock after reboot. Set to False
-                if want to bring the device to reboot up to password locking
-                phase. Sl4a checking need the device unlocked after rebooting.
-            timeout: time in seconds to wait for the device to complete
-                rebooting.
-            wait_after_reboot_complete: time in seconds to wait after the boot
-                completion.
-        """
-        if self.is_bootloader:
-            self.fastboot.reboot()
-            return
-        self.stop_services()
-        self.log.info("Rebooting")
-        self.adb.reboot()
-
-        timeout_start = time.time()
-        # b/111791239: Newer versions of android sometimes return early after
-        # `adb reboot` is called. This means subsequent calls may make it to
-        # the device before the reboot goes through, return false positives for
-        # getprops such as sys.boot_completed.
-        while time.time() < timeout_start + timeout:
-            try:
-                self.adb.get_state()
-                time.sleep(.1)
-            except AdbError:
-                # get_state will raise an error if the device is not found. We
-                # want the device to be missing to prove the device has kicked
-                # off the reboot.
-                break
-        self.wait_for_boot_completion(timeout=(timeout - time.time() +
-                                               timeout_start))
-
-        self.log.debug('Wait for a while after boot completion.')
-        time.sleep(wait_after_reboot_complete)
-        self.root_adb()
-        skip_sl4a = self.skip_sl4a
-        self.skip_sl4a = self.skip_sl4a or stop_at_lock_screen
-        self.start_services()
-        self.skip_sl4a = skip_sl4a
-
-    def restart_runtime(self):
-        """Restarts android runtime.
-
-        Terminate all sl4a sessions, restarts runtime, wait for framework
-        complete restart, and restart an sl4a session if restart_sl4a is True.
-        """
-        self.stop_services()
-        self.log.info("Restarting android runtime")
-        self.adb.shell("stop")
-        # Reset the boot completed flag before we restart the framework
-        # to correctly detect when the framework has fully come up.
-        self.adb.shell("setprop sys.boot_completed 0")
-        self.adb.shell("start")
-        self.wait_for_boot_completion()
-        self.root_adb()
-
-        self.start_services()
-
-    def get_ipv4_address(self, interface='wlan0', timeout=5):
-        for timer in range(0, timeout):
-            try:
-                ip_string = self.adb.shell('ifconfig %s|grep inet' % interface)
-                break
-            except adb.AdbError as e:
-                if timer + 1 == timeout:
-                    self.log.warning('Unable to find IP address for %s.' %
-                                     interface)
-                    return None
-                else:
-                    time.sleep(1)
-        result = re.search('addr:(.*) Bcast', ip_string)
-        if result != None:
-            ip_address = result.group(1)
-            try:
-                socket.inet_aton(ip_address)
-                return ip_address
-            except socket.error:
-                return None
-        else:
-            return None
-
-    def get_ipv4_gateway(self, timeout=5):
-        for timer in range(0, timeout):
-            try:
-                gateway_string = self.adb.shell(
-                    'dumpsys wifi | grep mDhcpResults')
-                break
-            except adb.AdbError as e:
-                if timer + 1 == timeout:
-                    self.log.warning('Unable to find gateway')
-                    return None
-                else:
-                    time.sleep(1)
-        result = re.search('Gateway (.*) DNS servers', gateway_string)
-        if result != None:
-            ipv4_gateway = result.group(1)
-            try:
-                socket.inet_aton(ipv4_gateway)
-                return ipv4_gateway
-            except socket.error:
-                return None
-        else:
-            return None
-
-    def send_keycode(self, keycode):
-        self.adb.shell("input keyevent KEYCODE_%s" % keycode)
-
-    def get_my_current_focus_window(self):
-        """Get the current focus window on screen"""
-        output = self.adb.shell(
-            'dumpsys window displays | grep -E mCurrentFocus | grep -v null',
-            ignore_status=True)
-        if not output or "not found" in output or "Can't find" in output:
-            result = ''
-        else:
-            result = output.split(' ')[-1].strip("}")
-        self.log.debug("Current focus window is %s", result)
-        return result
-
-    def get_my_current_focus_app(self):
-        """Get the current focus application"""
-        dumpsys_cmd = [
-            'dumpsys window | grep -E mFocusedApp',
-            'dumpsys window displays | grep -E mFocusedApp'
-        ]
-        for cmd in dumpsys_cmd:
-            output = self.adb.shell(cmd, ignore_status=True)
-            if not output or "not found" in output or "Can't find" in output or (
-                    "mFocusedApp=null" in output):
-                result = ''
-            else:
-                result = output.split(' ')[-2]
-                break
-        self.log.debug("Current focus app is %s", result)
-        return result
-
-    def is_window_ready(self, window_name=None):
-        current_window = self.get_my_current_focus_window()
-        if window_name:
-            return window_name in current_window
-        return current_window and ENCRYPTION_WINDOW not in current_window
-
-    def wait_for_window_ready(self,
-                              window_name=None,
-                              check_interval=5,
-                              check_duration=60):
-        elapsed_time = 0
-        while elapsed_time < check_duration:
-            if self.is_window_ready(window_name=window_name):
-                return True
-            time.sleep(check_interval)
-            elapsed_time += check_interval
-        self.log.info("Current focus window is %s",
-                      self.get_my_current_focus_window())
-        return False
-
-    def is_user_setup_complete(self):
-        return "1" in self.adb.shell("settings get secure user_setup_complete")
-
-    def is_screen_awake(self):
-        """Check if device screen is in sleep mode"""
-        return "Awake" in self.adb.shell("dumpsys power | grep mWakefulness=")
-
-    def is_screen_emergency_dialer(self):
-        """Check if device screen is in emergency dialer mode"""
-        return "EmergencyDialer" in self.get_my_current_focus_window()
-
-    def is_screen_in_call_activity(self):
-        """Check if device screen is in in-call activity notification"""
-        return "InCallActivity" in self.get_my_current_focus_window()
-
-    def is_setupwizard_on(self):
-        """Check if device screen is in emergency dialer mode"""
-        return "setupwizard" in self.get_my_current_focus_app()
-
-    def is_screen_lock_enabled(self):
-        """Check if screen lock is enabled"""
-        cmd = ("dumpsys window policy | grep showing=")
-        out = self.adb.shell(cmd, ignore_status=True)
-        return "true" in out
-
-    def is_waiting_for_unlock_pin(self):
-        """Check if device is waiting for unlock pin to boot up"""
-        current_window = self.get_my_current_focus_window()
-        current_app = self.get_my_current_focus_app()
-        if ENCRYPTION_WINDOW in current_window:
-            self.log.info("Device is in CrpytKeeper window")
-            return True
-        if "StatusBar" in current_window and (
-            (not current_app) or "FallbackHome" in current_app):
-            self.log.info("Device is locked")
-            return True
-        return False
-
-    def ensure_screen_on(self):
-        """Ensure device screen is powered on"""
-        if self.is_screen_lock_enabled():
-            for _ in range(2):
-                self.unlock_screen()
-                time.sleep(1)
-                if self.is_waiting_for_unlock_pin():
-                    self.unlock_screen(password=DEFAULT_DEVICE_PASSWORD)
-                    time.sleep(1)
-                if not self.is_waiting_for_unlock_pin(
-                ) and self.wait_for_window_ready():
-                    return True
-            return False
-        else:
-            self.wakeup_screen()
-            return True
-
-    def wakeup_screen(self):
-        if not self.is_screen_awake():
-            self.log.info("Screen is not awake, wake it up")
-            self.send_keycode("WAKEUP")
-
-    def go_to_sleep(self):
-        if self.is_screen_awake():
-            self.send_keycode("SLEEP")
-
-    def send_keycode_number_pad(self, number):
-        self.send_keycode("NUMPAD_%s" % number)
-
-    def unlock_screen(self, password=None):
-        self.log.info("Unlocking with %s", password or "swipe up")
-        # Bring device to SLEEP so that unlock process can start fresh
-        self.send_keycode("SLEEP")
-        time.sleep(1)
-        self.send_keycode("WAKEUP")
-        if ENCRYPTION_WINDOW not in self.get_my_current_focus_app():
-            self.send_keycode("MENU")
-        if password:
-            self.send_keycode("DEL")
-            for number in password:
-                self.send_keycode_number_pad(number)
-            self.send_keycode("ENTER")
-            self.send_keycode("BACK")
-
-    def screenshot(self, name=""):
-        """Take a screenshot on the device.
-
-        Args:
-            name: additional information of screenshot on the file name.
-        """
-        if name:
-            file_name = "%s_%s" % (DEFAULT_SCREENSHOT_PATH, name)
-        file_name = "%s_%s.png" % (file_name, utils.get_current_epoch_time())
-        self.ensure_screen_on()
-        self.log.info("Log screenshot to %s", file_name)
-        try:
-            self.adb.shell("screencap -p %s" % file_name)
-        except:
-            self.log.error("Fail to log screenshot to %s", file_name)
-
-    def exit_setup_wizard(self):
-        # Handling Android TV's setupwizard is ignored for now.
-        if 'feature:android.hardware.type.television' in self.adb.shell(
-                'pm list features'):
-            return
-        if not self.is_user_setup_complete() or self.is_setupwizard_on():
-            # b/116709539 need this to prevent reboot after skip setup wizard
-            self.adb.shell("am start -a com.android.setupwizard.EXIT",
-                           ignore_status=True)
-            self.adb.shell("pm disable %s" %
-                           self.get_setupwizard_package_name(),
-                           ignore_status=True)
-        # Wait up to 5 seconds for user_setup_complete to be updated
-        end_time = time.time() + 5
-        while time.time() < end_time:
-            if self.is_user_setup_complete() or not self.is_setupwizard_on():
-                return
-
-        # If fail to exit setup wizard, set local.prop and reboot
-        if not self.is_user_setup_complete() and self.is_setupwizard_on():
-            self.adb.shell("echo ro.test_harness=1 > /data/local.prop")
-            self.adb.shell("chmod 644 /data/local.prop")
-            self.reboot(stop_at_lock_screen=True)
-
-    def get_setupwizard_package_name(self):
-        """Finds setupwizard package/.activity
-
-        Bypass setupwizard or setupwraith depending on device.
-
-         Returns:
-            packageName/.ActivityName
-        """
-        packages_to_skip = "'setupwizard|setupwraith'"
-        android_package_name = "com.google.android"
-        package = self.adb.shell(
-            "pm list packages -f | grep -E {} | grep {}".format(
-                packages_to_skip, android_package_name))
-        wizard_package = package.split('=')[1]
-        activity = package.split('=')[0].split('/')[-2]
-        self.log.info("%s/.%sActivity" % (wizard_package, activity))
-        return "%s/.%sActivity" % (wizard_package, activity)
-
-    def push_system_file(self, src_file_path, dst_file_path, push_timeout=300):
-        """Pushes a file onto the read-only file system.
-
-        For speed, the device is left in root mode after this call, and leaves
-        verity disabled. To re-enable verity, call ensure_verity_enabled().
-
-        Args:
-            src_file_path: The path to the system app to install.
-            dst_file_path: The destination of the file.
-            push_timeout: How long to wait for the push to finish.
-        Returns:
-            Whether or not the install was successful.
-        """
-        self.adb.ensure_root()
-        try:
-            self.ensure_verity_disabled()
-            self.adb.remount()
-            out = self.adb.push('%s %s' % (src_file_path, dst_file_path),
-                                timeout=push_timeout)
-            if 'error' in out:
-                self.log.error('Unable to push system file %s to %s due to %s',
-                               src_file_path, dst_file_path, out)
-                return False
-            return True
-        except Exception as e:
-            self.log.error('Unable to push system file %s to %s due to %s',
-                           src_file_path, dst_file_path, e)
-            return False
-
-    def ensure_verity_enabled(self):
-        """Ensures that verity is enabled.
-
-        If verity is not enabled, this call will reboot the phone. Note that
-        this only works on debuggable builds.
-        """
-        user = self.adb.get_user_id()
-        # The below properties will only exist if verity has been enabled.
-        system_verity = self.adb.getprop('partition.system.verified')
-        vendor_verity = self.adb.getprop('partition.vendor.verified')
-        if not system_verity or not vendor_verity:
-            self.adb.ensure_root()
-            self.adb.enable_verity()
-            self.reboot()
-            self.adb.ensure_user(user)
-
-    def ensure_verity_disabled(self):
-        """Ensures that verity is disabled.
-
-        If verity is enabled, this call will reboot the phone.
-        """
-        user = self.adb.get_user_id()
-        # The below properties will only exist if verity has been enabled.
-        system_verity = self.adb.getprop('partition.system.verified')
-        vendor_verity = self.adb.getprop('partition.vendor.verified')
-        if system_verity or vendor_verity:
-            self.adb.ensure_root()
-            self.adb.disable_verity()
-            self.reboot()
-            self.adb.ensure_user(user)
-
-
-class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
-    def process(self, msg, kwargs):
-        msg = "[AndroidDevice|%s] %s" % (self.extra["serial"], msg)
-        return (msg, kwargs)
diff --git a/src/antlion/controllers/android_lib/android_api.py b/src/antlion/controllers/android_lib/android_api.py
deleted file mode 100644
index d58fe46..0000000
--- a/src/antlion/controllers/android_lib/android_api.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import sys
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.libs import version_selector
-
-
-class AndroidApi:
-    OLDEST = 0
-    MINIMUM = 0
-    L = 21
-    L_MR1 = 22
-    M = 23
-    N = 24
-    N_MR1 = 25
-    O = 26
-    O_MR1 = 27
-    P = 28
-    LATEST = sys.maxsize
-    MAX = sys.maxsize
-
-
-def android_api(min_api=AndroidApi.OLDEST, max_api=AndroidApi.LATEST):
-    """Decorates a function to only be called for the given API range.
-
-    Only gets called if the AndroidDevice in the args is within the specified
-    API range. Otherwise, a different function may be called instead. If the
-    API level is out of range, and no other function handles that API level, an
-    error is raise instead.
-
-    Note: In Python3.5 and below, the order of kwargs is not preserved. If your
-          function contains multiple AndroidDevices within the kwargs, and no
-          AndroidDevices within args, you are NOT guaranteed the first
-          AndroidDevice is the same one chosen each time the function runs. Due
-          to this, we do not check for AndroidDevices in kwargs.
-
-    Args:
-         min_api: The minimum API level. Can be an int or an AndroidApi value.
-         max_api: The maximum API level. Can be an int or an AndroidApi value.
-    """
-
-    def get_api_level(*args, **_):
-        for arg in args:
-            if isinstance(arg, AndroidDevice):
-                return arg.sdk_api_level()
-        logging.getLogger().error(
-            'An AndroidDevice was not found in the given '
-            'arguments.')
-        return None
-
-    return version_selector.set_version(get_api_level, min_api, max_api)
diff --git a/src/antlion/controllers/android_lib/logcat.py b/src/antlion/controllers/android_lib/logcat.py
deleted file mode 100644
index 4e2c4fd..0000000
--- a/src/antlion/controllers/android_lib/logcat.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import re
-
-from antlion.libs.proc.process import Process
-from antlion.libs.logging import log_stream
-from antlion.libs.logging.log_stream import LogStyles
-
-TIMESTAMP_REGEX = r'((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)'
-
-
-class TimestampTracker(object):
-    """Stores the last timestamp outputted by the Logcat process."""
-
-    def __init__(self):
-        self._last_timestamp = None
-
-    @property
-    def last_timestamp(self):
-        return self._last_timestamp
-
-    def read_output(self, message):
-        """Reads the message and parses all timestamps from it."""
-        all_timestamps = re.findall(TIMESTAMP_REGEX, message)
-        if len(all_timestamps) > 0:
-            self._last_timestamp = all_timestamps[0]
-
-
-def _get_log_level(message):
-    """Returns the log level for the given message."""
-    if message.startswith('-') or len(message) < 37:
-        return logging.ERROR
-    else:
-        log_level = message[36]
-        if log_level in ('V', 'D'):
-            return logging.DEBUG
-        elif log_level == 'I':
-            return logging.INFO
-        elif log_level == 'W':
-            return logging.WARNING
-        elif log_level == 'E':
-            return logging.ERROR
-    return logging.NOTSET
-
-
-def _log_line_func(log, timestamp_tracker):
-    """Returns a lambda that logs a message to the given logger."""
-
-    def log_line(message):
-        timestamp_tracker.read_output(message)
-        log.log(_get_log_level(message), message)
-
-    return log_line
-
-
-def _on_retry(serial, extra_params, timestamp_tracker):
-    def on_retry(_):
-        begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1)
-        additional_params = extra_params or ''
-
-        return 'adb -s %s logcat -T %s -v year %s' % (
-            serial, begin_at, additional_params)
-
-    return on_retry
-
-
-def create_logcat_keepalive_process(serial, logcat_dir, extra_params=''):
-    """Creates a Logcat Process that automatically attempts to reconnect.
-
-    Args:
-        serial: The serial of the device to read the logcat of.
-        logcat_dir: The directory used for logcat file output.
-        extra_params: Any additional params to be added to the logcat cmdline.
-
-    Returns:
-        A acts.libs.proc.process.Process object.
-    """
-    logger = log_stream.create_logger(
-        'adblog_%s' % serial, log_name=serial, subcontext=logcat_dir,
-        log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG))
-    process = Process('adb -s %s logcat -T 1 -v year %s' %
-                      (serial, extra_params))
-    timestamp_tracker = TimestampTracker()
-    process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
-    process.set_on_terminate_callback(
-        _on_retry(serial, extra_params, timestamp_tracker))
-    return process
diff --git a/src/antlion/controllers/android_lib/services.py b/src/antlion/controllers/android_lib/services.py
deleted file mode 100644
index 6c5f334..0000000
--- a/src/antlion/controllers/android_lib/services.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from antlion.controllers.android_lib import errors
-from antlion.controllers.android_lib import events as android_events
-from antlion.event import event_bus
-
-
-class AndroidService(object):
-    """The base class for Android long-running services.
-
-    The _start method is registered to an AndroidStartServicesEvent, and
-    the _stop method is registered to an AndroidStopServicesEvent.
-
-    Attributes:
-        ad: The AndroidDevice instance associated with the service.
-        serial: The serial of the device.
-        _registration_ids: List of registration IDs for the event subscriptions.
-    """
-
-    def __init__(self, ad):
-        self.ad = ad
-        self._registration_ids = []
-
-    @property
-    def serial(self):
-        return self.ad.serial
-
-    def register(self):
-        """Registers the _start and _stop methods to their corresponding
-        events.
-        """
-        def check_serial(event):
-            return self.serial == event.ad.serial
-
-        self._registration_ids = [
-            event_bus.register(android_events.AndroidStartServicesEvent,
-                               self._start, filter_fn=check_serial),
-            event_bus.register(android_events.AndroidStopServicesEvent,
-                               self._stop, filter_fn=check_serial)]
-
-    def unregister(self):
-        """Unregisters all subscriptions in this service."""
-        event_bus.unregister_all(from_list=self._registration_ids)
-        self._registration_ids.clear()
-
-    def _start(self, start_event):
-        """Start the service. Called upon an AndroidStartServicesEvent.
-
-        Args:
-            start_event: The AndroidStartServicesEvent instance.
-        """
-        raise NotImplementedError
-
-    def _stop(self, stop_event):
-        """Stop the service. Called upon an AndroidStopServicesEvent.
-
-        Args:
-            stop_event: The AndroidStopServicesEvent instance.
-        """
-        raise NotImplementedError
-
-
-class AdbLogcatService(AndroidService):
-    """Service for adb logcat."""
-
-    def _start(self, _):
-        self.ad.start_adb_logcat()
-
-    def _stop(self, _):
-        self.ad.stop_adb_logcat()
-
-
-class Sl4aService(AndroidService):
-    """Service for SL4A."""
-
-    def _start(self, start_event):
-        if self.ad.skip_sl4a:
-            return
-
-        if not self.ad.is_sl4a_installed():
-            self.ad.log.error('sl4a.apk is not installed')
-            raise errors.AndroidDeviceError(
-                'The required sl4a.apk is not installed',
-                serial=self.serial)
-        if not self.ad.ensure_screen_on():
-            self.ad.log.error("User window cannot come up")
-            raise errors.AndroidDeviceError(
-                "User window cannot come up", serial=self.serial)
-
-        droid, ed = self.ad.get_droid()
-        ed.start()
-
-    def _stop(self, _):
-        self.ad.terminate_all_sessions()
-        self.ad._sl4a_manager.stop_service()
diff --git a/src/antlion/controllers/android_lib/tel/__init__.py b/src/antlion/controllers/android_lib/tel/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/android_lib/tel/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/android_lib/tel/tel_utils.py b/src/antlion/controllers/android_lib/tel/tel_utils.py
deleted file mode 100644
index 0be8ef5..0000000
--- a/src/antlion/controllers/android_lib/tel/tel_utils.py
+++ /dev/null
@@ -1,671 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Generic telephony utility functions. Cloned from test_utils.tel."""
-
-import re
-import struct
-import time
-from queue import Empty
-
-from antlion.logger import epoch_to_log_line_timestamp
-from antlion.controllers.adb_lib.error import AdbCommandError
-
-INCALL_UI_DISPLAY_FOREGROUND = "foreground"
-INCALL_UI_DISPLAY_BACKGROUND = "background"
-INCALL_UI_DISPLAY_DEFAULT = "default"
-
-# Max time to wait after caller make a call and before
-# callee start ringing
-MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT = 30
-
-# Max time to wait after toggle airplane mode and before
-# get expected event
-MAX_WAIT_TIME_AIRPLANEMODE_EVENT = 90
-
-# Wait time between state check retry
-WAIT_TIME_BETWEEN_STATE_CHECK = 5
-
-# Constant for Data Roaming State
-DATA_ROAMING_ENABLE = 1
-DATA_ROAMING_DISABLE = 0
-
-# Constant for Telephony Manager Call State
-TELEPHONY_STATE_RINGING = "RINGING"
-TELEPHONY_STATE_IDLE = "IDLE"
-TELEPHONY_STATE_OFFHOOK = "OFFHOOK"
-TELEPHONY_STATE_UNKNOWN = "UNKNOWN"
-
-# Constant for Service State
-SERVICE_STATE_EMERGENCY_ONLY = "EMERGENCY_ONLY"
-SERVICE_STATE_IN_SERVICE = "IN_SERVICE"
-SERVICE_STATE_OUT_OF_SERVICE = "OUT_OF_SERVICE"
-SERVICE_STATE_POWER_OFF = "POWER_OFF"
-SERVICE_STATE_UNKNOWN = "UNKNOWN"
-
-# Constant for Network Mode
-NETWORK_MODE_GSM_ONLY = "NETWORK_MODE_GSM_ONLY"
-NETWORK_MODE_WCDMA_ONLY = "NETWORK_MODE_WCDMA_ONLY"
-NETWORK_MODE_LTE_ONLY = "NETWORK_MODE_LTE_ONLY"
-
-# Constant for Events
-EVENT_CALL_STATE_CHANGED = "CallStateChanged"
-EVENT_SERVICE_STATE_CHANGED = "ServiceStateChanged"
-
-
-class CallStateContainer:
-    INCOMING_NUMBER = "incomingNumber"
-    SUBSCRIPTION_ID = "subscriptionId"
-    CALL_STATE = "callState"
-
-
-class ServiceStateContainer:
-    VOICE_REG_STATE = "voiceRegState"
-    VOICE_NETWORK_TYPE = "voiceNetworkType"
-    DATA_REG_STATE = "dataRegState"
-    DATA_NETWORK_TYPE = "dataNetworkType"
-    OPERATOR_NAME = "operatorName"
-    OPERATOR_ID = "operatorId"
-    IS_MANUAL_NW_SELECTION = "isManualNwSelection"
-    ROAMING = "roaming"
-    IS_EMERGENCY_ONLY = "isEmergencyOnly"
-    NETWORK_ID = "networkId"
-    SYSTEM_ID = "systemId"
-    SUBSCRIPTION_ID = "subscriptionId"
-    SERVICE_STATE = "serviceState"
-
-
-def dumpsys_last_call_info(ad):
-    """ Get call information by dumpsys telecom. """
-    num = dumpsys_last_call_number(ad)
-    output = ad.adb.shell("dumpsys telecom")
-    result = re.search(r"Call TC@%s: {(.*?)}" % num, output, re.DOTALL)
-    call_info = {"TC": num}
-    if result:
-        result = result.group(1)
-        for attr in ("startTime", "endTime", "direction", "isInterrupted",
-                     "callTechnologies", "callTerminationsReason",
-                     "isVideoCall", "callProperties"):
-            match = re.search(r"%s: (.*)" % attr, result)
-            if match:
-                if attr in ("startTime", "endTime"):
-                    call_info[attr] = epoch_to_log_line_timestamp(
-                        int(match.group(1)))
-                else:
-                    call_info[attr] = match.group(1)
-    ad.log.debug("call_info = %s", call_info)
-    return call_info
-
-
-def dumpsys_last_call_number(ad):
-    output = ad.adb.shell("dumpsys telecom")
-    call_nums = re.findall("Call TC@(\d+):", output)
-    if not call_nums:
-        return 0
-    else:
-        return int(call_nums[-1])
-
-
-def get_device_epoch_time(ad):
-    return int(1000 * float(ad.adb.shell("date +%s.%N")))
-
-
-def get_outgoing_voice_sub_id(ad):
-    """ Get outgoing voice subscription id
-    """
-    if hasattr(ad, "outgoing_voice_sub_id"):
-        return ad.outgoing_voice_sub_id
-    else:
-        return ad.droid.subscriptionGetDefaultVoiceSubId()
-
-
-def get_rx_tx_power_levels(log, ad):
-    """ Obtains Rx and Tx power levels from the MDS application.
-
-    The method requires the MDS app to be installed in the DUT.
-
-    Args:
-        log: logger object
-        ad: an android device
-
-    Return:
-        A tuple where the first element is an array array with the RSRP value
-        in Rx chain, and the second element is the transmitted power in dBm.
-        Values for invalid Rx / Tx chains are set to None.
-    """
-    cmd = ('am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
-           'response wait "com.google.mdstest/com.google.mdstest.instrument.'
-           'ModemCommandInstrumentation"')
-    try:
-        output = ad.adb.shell(cmd)
-    except AdbCommandError as e:
-        log.error(e)
-        output = None
-
-    if not output or 'result=SUCCESS' not in output:
-        raise RuntimeError('Could not obtain Tx/Rx power levels from MDS. Is '
-                           'the MDS app installed?')
-
-    response = re.search(r"(?<=response=).+", output)
-
-    if not response:
-        raise RuntimeError('Invalid response from the MDS app:\n' + output)
-
-    # Obtain a list of bytes in hex format from the response string
-    response_hex = response.group(0).split(' ')
-
-    def get_bool(pos):
-        """ Obtain a boolean variable from the byte array. """
-        return response_hex[pos] == '01'
-
-    def get_int32(pos):
-        """ Obtain an int from the byte array. Bytes are printed in
-        little endian format."""
-        return struct.unpack(
-            '<i', bytearray.fromhex(''.join(response_hex[pos:pos + 4])))[0]
-
-    rx_power = []
-    RX_CHAINS = 4
-
-    for i in range(RX_CHAINS):
-        # Calculate starting position for the Rx chain data structure
-        start = 12 + i * 22
-
-        # The first byte in the data structure indicates if the rx chain is
-        # valid.
-        if get_bool(start):
-            rx_power.append(get_int32(start + 2) / 10)
-        else:
-            rx_power.append(None)
-
-    # Calculate the position for the tx chain data structure
-    tx_pos = 12 + RX_CHAINS * 22
-
-    tx_valid = get_bool(tx_pos)
-    if tx_valid:
-        tx_power = get_int32(tx_pos + 2) / -10
-    else:
-        tx_power = None
-
-    return rx_power, tx_power
-
-
-def get_telephony_signal_strength(ad):
-    #{'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
-    # 'cdmaAsuLevel': 99, 'evdoDbm': -120, 'gsmDbm': -1, 'cdmaEcio': -160,
-    # 'level': 2, 'lteLevel': 2, 'cdmaDbm': -120, 'dbm': -112, 'cdmaLevel': 0,
-    # 'lteAsuLevel': 28, 'gsmAsuLevel': 99, 'gsmBitErrorRate': 0,
-    # 'lteDbm': -112, 'gsmSignalStrength': 99}
-    try:
-        signal_strength = ad.droid.telephonyGetSignalStrength()
-        if not signal_strength:
-            signal_strength = {}
-    except Exception as e:
-        ad.log.error(e)
-        signal_strength = {}
-    return signal_strength
-
-
-def initiate_call(log,
-                  ad,
-                  callee_number,
-                  emergency=False,
-                  incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
-                  video=False):
-    """Make phone call from caller to callee.
-
-    Args:
-        log: log object.
-        ad: Caller android device object.
-        callee_number: Callee phone number.
-        emergency : specify the call is emergency.
-            Optional. Default value is False.
-        incall_ui_display: show the dialer UI foreground or background
-        video: whether to initiate as video call
-
-    Returns:
-        result: if phone call is placed successfully.
-    """
-    ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
-    sub_id = get_outgoing_voice_sub_id(ad)
-    begin_time = get_device_epoch_time(ad)
-    ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
-    try:
-        # Make a Call
-        ad.log.info("Make a phone call to %s", callee_number)
-        if emergency:
-            ad.droid.telecomCallEmergencyNumber(callee_number)
-        else:
-            ad.droid.telecomCallNumber(callee_number, video)
-
-        # Verify OFFHOOK state
-        if not wait_for_call_offhook_for_subscription(
-                log, ad, sub_id, event_tracking_started=True):
-            ad.log.info("sub_id %s not in call offhook state", sub_id)
-            last_call_drop_reason(ad, begin_time=begin_time)
-            return False
-        else:
-            return True
-    finally:
-        if hasattr(ad, "sdm_log") and getattr(ad, "sdm_log"):
-            ad.adb.shell("i2cset -fy 3 64 6 1 b", ignore_status=True)
-            ad.adb.shell("i2cset -fy 3 65 6 1 b", ignore_status=True)
-        ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
-        if incall_ui_display == INCALL_UI_DISPLAY_FOREGROUND:
-            ad.droid.telecomShowInCallScreen()
-        elif incall_ui_display == INCALL_UI_DISPLAY_BACKGROUND:
-            ad.droid.showHomeScreen()
-
-
-def is_event_match(event, field, value):
-    """Return if <field> in "event" match <value> or not.
-
-    Args:
-        event: event to test. This event need to have <field>.
-        field: field to match.
-        value: value to match.
-
-    Returns:
-        True if <field> in "event" match <value>.
-        False otherwise.
-    """
-    return is_event_match_for_list(event, field, [value])
-
-
-def is_event_match_for_list(event, field, value_list):
-    """Return if <field> in "event" match any one of the value
-        in "value_list" or not.
-
-    Args:
-        event: event to test. This event need to have <field>.
-        field: field to match.
-        value_list: a list of value to match.
-
-    Returns:
-        True if <field> in "event" match one of the value in "value_list".
-        False otherwise.
-    """
-    try:
-        value_in_event = event['data'][field]
-    except KeyError:
-        return False
-    for value in value_list:
-        if value_in_event == value:
-            return True
-    return False
-
-
-def is_phone_in_call(log, ad):
-    """Return True if phone in call.
-
-    Args:
-        log: log object.
-        ad:  android device.
-    """
-    try:
-        return ad.droid.telecomIsInCall()
-    except:
-        return "mCallState=2" in ad.adb.shell(
-            "dumpsys telephony.registry | grep mCallState")
-
-
-def last_call_drop_reason(ad, begin_time=None):
-    reasons = ad.search_logcat(
-        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time)
-    reason_string = ""
-    if reasons:
-        log_msg = "Logcat call drop reasons:"
-        for reason in reasons:
-            log_msg = "%s\n\t%s" % (log_msg, reason["log_message"])
-            if "ril reason str" in reason["log_message"]:
-                reason_string = reason["log_message"].split(":")[-1].strip()
-        ad.log.info(log_msg)
-    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION",
-                               begin_time)
-    if reasons:
-        ad.log.warning("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION is seen")
-    ad.log.info("last call dumpsys: %s",
-                sorted(dumpsys_last_call_info(ad).items()))
-    return reason_string
-
-
-def toggle_airplane_mode(log, ad, new_state=None, strict_checking=True):
-    """ Toggle the state of airplane mode.
-
-    Args:
-        log: log handler.
-        ad: android_device object.
-        new_state: Airplane mode state to set to.
-            If None, opposite of the current state.
-        strict_checking: Whether to turn on strict checking that checks all features.
-
-    Returns:
-        result: True if operation succeed. False if error happens.
-    """
-    if ad.skip_sl4a:
-        return toggle_airplane_mode_by_adb(log, ad, new_state)
-    else:
-        return toggle_airplane_mode_msim(
-            log, ad, new_state, strict_checking=strict_checking)
-
-
-def toggle_airplane_mode_by_adb(log, ad, new_state=None):
-    """ Toggle the state of airplane mode.
-
-    Args:
-        log: log handler.
-        ad: android_device object.
-        new_state: Airplane mode state to set to.
-            If None, opposite of the current state.
-
-    Returns:
-        result: True if operation succeed. False if error happens.
-    """
-    cur_state = bool(int(ad.adb.shell("settings get global airplane_mode_on")))
-    if new_state == cur_state:
-        ad.log.info("Airplane mode already in %s", new_state)
-        return True
-    elif new_state is None:
-        new_state = not cur_state
-    ad.log.info("Change airplane mode from %s to %s", cur_state, new_state)
-    try:
-        ad.adb.shell("settings put global airplane_mode_on %s" % int(new_state))
-        ad.adb.shell("am broadcast -a android.intent.action.AIRPLANE_MODE")
-    except Exception as e:
-        ad.log.error(e)
-        return False
-    changed_state = bool(int(ad.adb.shell("settings get global airplane_mode_on")))
-    return changed_state == new_state
-
-
-def toggle_airplane_mode_msim(log, ad, new_state=None, strict_checking=True):
-    """ Toggle the state of airplane mode.
-
-    Args:
-        log: log handler.
-        ad: android_device object.
-        new_state: Airplane mode state to set to.
-            If None, opposite of the current state.
-        strict_checking: Whether to turn on strict checking that checks all features.
-
-    Returns:
-        result: True if operation succeed. False if error happens.
-    """
-
-    cur_state = ad.droid.connectivityCheckAirplaneMode()
-    if cur_state == new_state:
-        ad.log.info("Airplane mode already in %s", new_state)
-        return True
-    elif new_state is None:
-        new_state = not cur_state
-        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state,
-                    new_state)
-    sub_id_list = []
-    active_sub_info = ad.droid.subscriptionGetAllSubInfoList()
-    if active_sub_info:
-        for info in active_sub_info:
-            sub_id_list.append(info['subscriptionId'])
-
-    ad.ed.clear_all_events()
-    time.sleep(0.1)
-    service_state_list = []
-    if new_state:
-        service_state_list.append(SERVICE_STATE_POWER_OFF)
-        ad.log.info("Turn on airplane mode")
-
-    else:
-        # If either one of these 3 events show up, it should be OK.
-        # Normal SIM, phone in service
-        service_state_list.append(SERVICE_STATE_IN_SERVICE)
-        # NO SIM, or Dead SIM, or no Roaming coverage.
-        service_state_list.append(SERVICE_STATE_OUT_OF_SERVICE)
-        service_state_list.append(SERVICE_STATE_EMERGENCY_ONLY)
-        ad.log.info("Turn off airplane mode")
-
-    for sub_id in sub_id_list:
-        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(
-            sub_id)
-
-    timeout_time = time.time() + MAX_WAIT_TIME_AIRPLANEMODE_EVENT
-    ad.droid.connectivityToggleAirplaneMode(new_state)
-
-    try:
-        try:
-            event = ad.ed.wait_for_event(
-                EVENT_SERVICE_STATE_CHANGED,
-                is_event_match_for_list,
-                timeout=MAX_WAIT_TIME_AIRPLANEMODE_EVENT,
-                field=ServiceStateContainer.SERVICE_STATE,
-                value_list=service_state_list)
-            ad.log.info("Got event %s", event)
-        except Empty:
-            ad.log.warning("Did not get expected service state change to %s",
-                           service_state_list)
-        finally:
-            for sub_id in sub_id_list:
-                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(
-                    sub_id)
-    except Exception as e:
-        ad.log.error(e)
-
-    # APM on (new_state=True) will turn off bluetooth but may not turn it on
-    try:
-        if new_state and not _wait_for_bluetooth_in_state(
-                log, ad, False, timeout_time - time.time()):
-            ad.log.error(
-                "Failed waiting for bluetooth during airplane mode toggle")
-            if strict_checking: return False
-    except Exception as e:
-        ad.log.error("Failed to check bluetooth state due to %s", e)
-        if strict_checking:
-            raise
-
-    # APM on (new_state=True) will turn off wifi but may not turn it on
-    if new_state and not _wait_for_wifi_in_state(log, ad, False,
-                                                 timeout_time - time.time()):
-        ad.log.error("Failed waiting for wifi during airplane mode toggle on")
-        if strict_checking: return False
-
-    if ad.droid.connectivityCheckAirplaneMode() != new_state:
-        ad.log.error("Set airplane mode to %s failed", new_state)
-        return False
-    return True
-
-
-def toggle_cell_data_roaming(ad, state):
-    """Enable cell data roaming for default data subscription.
-
-    Wait for the data roaming status to be DATA_STATE_CONNECTED
-        or DATA_STATE_DISCONNECTED.
-
-    Args:
-        ad: Android Device Object.
-        state: True or False for enable or disable cell data roaming.
-
-    Returns:
-        True if success.
-        False if failed.
-    """
-    state_int = {True: DATA_ROAMING_ENABLE, False: DATA_ROAMING_DISABLE}[state]
-    action_str = {True: "Enable", False: "Disable"}[state]
-    if ad.droid.connectivityCheckDataRoamingMode() == state:
-        ad.log.info("Data roaming is already in state %s", state)
-        return True
-    if not ad.droid.connectivitySetDataRoaming(state_int):
-        ad.error.info("Fail to config data roaming into state %s", state)
-        return False
-    if ad.droid.connectivityCheckDataRoamingMode() == state:
-        ad.log.info("Data roaming is configured into state %s", state)
-        return True
-    else:
-        ad.log.error("Data roaming is not configured into state %s", state)
-        return False
-
-
-def wait_for_call_offhook_event(
-        log,
-        ad,
-        sub_id,
-        event_tracking_started=False,
-        timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT):
-    """Wait for an incoming call on specified subscription.
-
-    Args:
-        log: log object.
-        ad: android device object.
-        event_tracking_started: True if event tracking already state outside
-        timeout: time to wait for event
-
-    Returns:
-        True: if call offhook event is received.
-        False: if call offhook event is not received.
-    """
-    if not event_tracking_started:
-        ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
-        ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
-    try:
-        ad.ed.wait_for_event(
-            EVENT_CALL_STATE_CHANGED,
-            is_event_match,
-            timeout=timeout,
-            field=CallStateContainer.CALL_STATE,
-            value=TELEPHONY_STATE_OFFHOOK)
-        ad.log.info("Got event %s", TELEPHONY_STATE_OFFHOOK)
-    except Empty:
-        ad.log.info("No event for call state change to OFFHOOK")
-        return False
-    finally:
-        if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(
-                sub_id)
-    return True
-
-
-def wait_for_call_offhook_for_subscription(
-        log,
-        ad,
-        sub_id,
-        event_tracking_started=False,
-        timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
-        interval=WAIT_TIME_BETWEEN_STATE_CHECK):
-    """Wait for an incoming call on specified subscription.
-
-    Args:
-        log: log object.
-        ad: android device object.
-        sub_id: subscription ID
-        timeout: time to wait for ring
-        interval: checking interval
-
-    Returns:
-        True: if incoming call is received and answered successfully.
-        False: for errors
-    """
-    if not event_tracking_started:
-        ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
-        ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
-    offhook_event_received = False
-    end_time = time.time() + timeout
-    try:
-        while time.time() < end_time:
-            if not offhook_event_received:
-                if wait_for_call_offhook_event(log, ad, sub_id, True,
-                                               interval):
-                    offhook_event_received = True
-            telephony_state = ad.droid.telephonyGetCallStateForSubscription(
-                sub_id)
-            telecom_state = ad.droid.telecomGetCallState()
-            if telephony_state == TELEPHONY_STATE_OFFHOOK and (
-                    telecom_state == TELEPHONY_STATE_OFFHOOK):
-                ad.log.info("telephony and telecom are in OFFHOOK state")
-                return True
-            else:
-                ad.log.info(
-                    "telephony in %s, telecom in %s, expecting OFFHOOK state",
-                    telephony_state, telecom_state)
-            if offhook_event_received:
-                time.sleep(interval)
-    finally:
-        if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(
-                sub_id)
-
-
-def _wait_for_bluetooth_in_state(log, ad, state, max_wait):
-    # FIXME: These event names should be defined in a common location
-    _BLUETOOTH_STATE_ON_EVENT = 'BluetoothStateChangedOn'
-    _BLUETOOTH_STATE_OFF_EVENT = 'BluetoothStateChangedOff'
-    ad.ed.clear_events(_BLUETOOTH_STATE_ON_EVENT)
-    ad.ed.clear_events(_BLUETOOTH_STATE_OFF_EVENT)
-
-    ad.droid.bluetoothStartListeningForAdapterStateChange()
-    try:
-        bt_state = ad.droid.bluetoothCheckState()
-        if bt_state == state:
-            return True
-        if max_wait <= 0:
-            ad.log.error("Time out: bluetooth state still %s, expecting %s",
-                         bt_state, state)
-            return False
-
-        event = {
-            False: _BLUETOOTH_STATE_OFF_EVENT,
-            True: _BLUETOOTH_STATE_ON_EVENT
-        }[state]
-        event = ad.ed.pop_event(event, max_wait)
-        ad.log.info("Got event %s", event['name'])
-        return True
-    except Empty:
-        ad.log.error("Time out: bluetooth state still in %s, expecting %s",
-                     bt_state, state)
-        return False
-    finally:
-        ad.droid.bluetoothStopListeningForAdapterStateChange()
-
-
-def wait_for_droid_in_call(log, ad, max_time):
-    """Wait for android to be in call state.
-
-    Args:
-        log: log object.
-        ad:  android device.
-        max_time: maximal wait time.
-
-    Returns:
-        If phone become in call state within max_time, return True.
-        Return False if timeout.
-    """
-    return _wait_for_droid_in_state(log, ad, max_time, is_phone_in_call)
-
-
-def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args,
-                             **kwargs):
-    while max_time >= 0:
-        if state_check_func(log, ad, *args, **kwargs):
-            return True
-
-        time.sleep(WAIT_TIME_BETWEEN_STATE_CHECK)
-        max_time -= WAIT_TIME_BETWEEN_STATE_CHECK
-
-    return False
-
-
-# TODO: replace this with an event-based function
-def _wait_for_wifi_in_state(log, ad, state, max_wait):
-    return _wait_for_droid_in_state(log, ad, max_wait,
-        lambda log, ad, state: \
-                (True if ad.droid.wifiCheckState() == state else False),
-                state)
diff --git a/src/antlion/controllers/anritsu_lib/OWNERS b/src/antlion/controllers/anritsu_lib/OWNERS
deleted file mode 100644
index e4010df..0000000
--- a/src/antlion/controllers/anritsu_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/anritsu_lib/__init__.py b/src/antlion/controllers/anritsu_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/anritsu_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/anritsu_lib/_anritsu_utils.py b/src/antlion/controllers/anritsu_lib/_anritsu_utils.py
deleted file mode 100644
index ea5736f..0000000
--- a/src/antlion/controllers/anritsu_lib/_anritsu_utils.py
+++ /dev/null
@@ -1,233 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Utility functions for for Anritsu Signalling Tester.
-"""
-# yapf: disable
-
-OPERATION_COMPLETE = 1
-NO_ERROR = 0
-
-ANRITSU_ERROR_CODES = {
-    0: 'No errors occurred',
-    2: 'The specified file does not exist',
-    14: 'The buffer size is insufficient',
-    29: 'The save destination is a write-protected file.',
-    80: 'A file with the same name already exists.'
-        ' (If Overwrite is specified to 0.)',
-    87: 'The specified value is wrong.',
-    112: 'The disk space is insufficient.',
-    183: 'SmartStudio is already running.',
-    1060: 'The control software has not been started or has already terminated',
-    1067: 'SmartStudio, control software or SMS Centre could not start due to'
-          'a problem or problems resulting from OS or the MD8475A system.',
-    1229: 'Connecting to the server failed.',
-    1235: 'A request is suspended.',
-    1460: 'The operation is terminated due to the expiration of the'
-          ' timeout period.',
-    9999: 'A GPIB command error occurred.',
-    536870912: 'The license could not be confirmed.',
-    536870913: 'The specified file cannot be loaded by the SmartStudio.',
-    536870914: 'The specified process ID does not exist.',
-    536870915: 'The received data does not exist.',
-    536870916: 'Simulation is not running.',
-    536870917: 'Simulation is running.',
-    536870918: 'Test Case has never been executed.',
-    536870919: 'The resource cannot be obtained.',
-    536870920: 'A resource protocol error, such as download error or'
-               ' license error, occurred.',
-    536870921: 'The function call has been in invalid status.',
-    536870922: 'The current Simulation Model does not allow the operation.',
-    536870923: 'The Cell name to be set does not exist.',
-    536870924: 'The test is being executed.',
-    536870925: 'The current UE status does not correspond to the'
-               ' test parameters.',
-    536870926: 'There is no LOG information because the simulation'
-               ' has not been executed.',
-    536870927: 'Measure Export has already been executed.',
-    536870928: 'SmartStudio is not connected to the SMS Centre.',
-    536870929: 'SmartStudio failed to send an SMS message to the SMS Centre.',
-    536870930: 'SmartStudio has successfully sent an SMS message'
-               ' to the SMS Centre,but the SMS Centre judges it as an error.',
-    536870931: 'The processing that is unavailable with the current system'
-               ' status has been executed.',
-    536870932: 'The option could not be confirmed.',
-    536870933: 'Measure Export has been stopped.',
-    536870934: 'SmartStudio cannot load the specified file because the'
-               ' version is old.',
-    536870935: 'The data with the specified PDN number does not exist.',
-    536870936: 'The data with the specified Dedicated number does not exist.',
-    536870937: 'The PDN data cannot be added because the upper limit of the'
-               ' number of PDN data has been reached.',
-    536870938: 'The number of antennas, which cannot be set to the current'
-               ' Simulation Model,has been specified.',
-    536870939: 'Calibration of path loss failed.',
-    536870940: 'There is a parameter conflict.',
-    536870941: 'The DL Ref Power setting is out of the setting range'
-               ' at W-CDMA (Evolution).',
-    536870942: 'DC-HSDPA is not available for the current channel setting.',
-    536870943: 'The specified Packet Rate cannot be used by the current'
-               ' Simulation Model.',
-    536870944: 'The W-CDMA Cell parameter F-DPCH is set to Enable.',
-    536870945: 'Target is invalid.',
-    536870946: 'The PWS Centre detects an error.',
-    536870947: 'The Ec/Ior setting is invalid.',
-    536870948: 'The combination of Attach Type and TA Update Type is invalid.',
-    536870949: 'The license of the option has expired.',
-    536870950: 'The Ping command is being executed.',
-    536870951: 'The Ping command is not being executed.',
-    536870952: 'The current Test Case parameter setting is wrong.',
-    536870953: 'The specified IP address is the same as that of Default Gateway'
-               'specified by Simulation parameter.',
-    536870954: 'TFT IE conversion failed.',
-    536870955: 'Saving settings to the SmartStudio scenario failed.',
-    536875008: 'An error exists in the parameter configuration.'
-               '(This error applies only to the current version.)',
-    536936448: 'License verification failed.',
-    536936449: 'The IMS Services cannot load the specified file.',
-    536936462: 'Simulation is not performed and no log information exists.',
-    536936467: 'The executed process is inoperable in the current status'
-               ' of Visual User Agent.',
-    536936707: 'The specified Virtual Network is not running.',
-    536936709: 'The specified Virtual Network is running. '
-               'Any one of the Virtual Networks is running.',
-    536936727: 'The specified Virtual Network does not exist.',
-    536936729: 'When the Virtual Network already exists.',
-    554762241: 'The RF Measurement launcher cannot be accessed.',
-    554762242: 'License check of the RF Measurement failed.',
-    554762243: 'Function is called when RF Measurement cannot be set.',
-    554762244: 'RF Measurement has been already started.',
-    554762245: 'RF Measurement failed to start due to a problem resulting'
-               ' from OS or the MD8475A system.',
-    554762246: 'RF Measurement is not started or is already terminated.',
-    554762247: 'There is a version mismatch between RF Measurement and CAL.',
-    554827777: 'The specified value for RF Measurement is abnormal.',
-    554827778: 'GPIB command error has occurred in RF Measurement.',
-    554827779: 'Invalid file path was specified to RF Measurement.',
-    554827780: 'RF Measurement argument is NULL pointer.',
-    555810817: 'RF Measurement is now performing the measurement.',
-    555810818: 'RF Measurement is now not performing the measurement.',
-    555810819: 'RF Measurement is not measured yet. (There is no result '
-               'information since measurement is not performed.)',
-    555810820: 'An error has occurred when RF Measurement'
-               ' starts the measurement.',
-    555810821: 'Simulation has stopped when RF Measurement is '
-               'performing the measurement.',
-    555810822: 'An error has been retrieved from the Platform when '
-               'RF Measurement is performing the measurement.',
-    555810823: 'Measurement has been started in the system state where RF '
-               'Measurement is invalid.',
-    556859393: 'RF Measurement is now saving a file.',
-    556859394: 'There is insufficient disk space when saving'
-               'a Measure Result file of RF Measurement.',
-    556859395: 'An internal error has occurred or USB cable has been'
-               ' disconnected when saving a Measure Result'
-               ' file of RF Measurement.',
-    556859396: 'A write-protected file was specified as the save destination'
-               ' when saving a Measure Result file of RF Measurement.',
-    568328193: 'An internal error has occurred in RF Measurement.',
-    687865857: 'Calibration Measure DSP is now being measured.',
-    687865858: 'Calibration measurement failed.',
-    687865859: 'Calibration slot is empty or its system does not apply.',
-    687865860: 'Unexpected command is received from Calibration HWC.',
-    687865861: 'Failed to receive the Calibration measurement result.',
-    687865862: 'Failed to open the correction value file on the'
-               ' Calibration HDD.',
-    687865863: 'Failed to move the pointer on the Calibration correction'
-               ' value table.',
-    687865864: 'Failed to write the correction value to the Calibration'
-               ' correction value file on the Calibration HDD.',
-    687865865: 'Failed to load the correction value from the Calibration HDD.',
-    687865866: 'Failed to create a directory to which the correction value '
-               'file on the Calibration HDD is saved.',
-    687865867: 'Correction data has not been written in the'
-               ' Calibration-specified correction table.',
-    687865868: 'Data received from Calibration HWC does not exist.',
-    687865869: 'Data has not been written to the Flash ROM'
-               ' of Calibration BASE UNIT.',
-    687865870: 'Correction data has not been written to the'
-               ' Calibration-specified sector.',
-    687866111: 'An calibration error other than described above occurred.',
-}
-
-
-def _error_code_tostring(error_code):
-    ''' returns the description of the error from the error code
-    returned by anritsu MD8475A '''
-    try:
-        error_string = ANRITSU_ERROR_CODES[error_code]
-    except KeyError:
-        error_string = "Error : {} ".format(error_code)
-
-    return error_string
-
-
-class AnritsuUtils(object):
-    def gsm_encode(text):
-        '''To encode text string with GSM 7-bit alphabet for common symbols'''
-        table = {' ': '%20', '!': '%21', '\"': '%22', '#': '%23', '$': '%24',
-                 '/': '%2F', '%': '%25', '&': '%26', '\'': '%27', '(': '%28',
-                 ')': '%29', '*': '%2A', '+': '%2B', ',': '%2C', ':': '%3A',
-                 ';': '%3B', '<': '%3C', '=': '%3D', '>': '%3E', '?': '%3F',
-                 '@': '%40', '[': '%5B', ']': '%5D', '_': '%5F', 'é': '%C3%A9'}
-        coded_str = ""
-        for char in text:
-            if char in table:
-                coded_str += table[char]
-            else:
-                coded_str += char
-        return coded_str
-
-    def gsm_decode(text):
-        '''To decode text string with GSM 7-bit alphabet for common symbols'''
-        table = {'%20': ' ', '%21': '!', '%22': '\"', '%23': '#', '%24': '$',
-                 '%2F': '/', '%25': '%', '%26': '&', '%27': '\'', '%28': '(',
-                 '%29': ')', '%2A': '*', '%2B': '+', '%2C': ',', '%3A': ':',
-                 '%3B': ';', '%3C': '<', '%3D': '=', '%3E': '>', '%3F': '?',
-                 '%40': '@', '%5B': '[', '%5D': ']', '%5F': '_', '%C3%A9': 'é'}
-        coded_str = text
-        for char in table:
-            if char in text:
-                coded_str = coded_str.replace(char, table[char])
-        return coded_str
-
-    def cdma_encode(text):
-        '''To encode text string with GSM 7-bit alphabet for common symbols'''
-        table = {' ': '%20', '!': '%21', '\"': '%22', '#': '%23', '$': '%24',
-                 '/': '%2F', '%': '%25', '&': '%26', '\'': '%27', '(': '%28',
-                 ')': '%29', '*': '%2A', '+': '%2B', ',': '%2C', ':': '%3A',
-                 ';': '%3B', '<': '%3C', '=': '%3D', '>': '%3E', '?': '%3F',
-                 '@': '%40', '[': '%5B', ']': '%5D', '_': '%5F'}
-        coded_str = ""
-        for char in text:
-            if char in table:
-                coded_str += table[char]
-            else:
-                coded_str += char
-        return coded_str
-
-class AnritsuError(Exception):
-    '''Exception for errors related to Anritsu.'''
-    def __init__(self, error, command=None):
-        self._error_code = error
-        self._error_message = _error_code_tostring(self._error_code)
-        if command is not None:
-            self._error_message = "Command {} returned the error: '{}'".format(
-                                  command, self._error_message)
-
-    def __str__(self):
-        return self._error_message
-# yapf: enable
diff --git a/src/antlion/controllers/anritsu_lib/band_constants.py b/src/antlion/controllers/anritsu_lib/band_constants.py
deleted file mode 100644
index 18dd5bc..0000000
--- a/src/antlion/controllers/anritsu_lib/band_constants.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# GSM BAND constants
-GSM_BAND_GSM450 = "GSM450"
-GSM_BAND_GSM480 = "GSM480"
-GSM_BAND_GSM850 = "GSM850"
-GSM_BAND_PGSM900 = "P-GSM900"
-GSM_BAND_EGSM900 = "E-GSM900"
-GSM_BAND_RGSM900 = "R-GSM900"
-GSM_BAND_DCS1800 = "DCS1800"
-GSM_BAND_PCS1900 = "PCS1900"
-
-LTE_BAND_2 = 2
-LTE_BAND_4 = 4
-LTE_BAND_12 = 12
-WCDMA_BAND_1 = 1
-WCDMA_BAND_2 = 2
diff --git a/src/antlion/controllers/anritsu_lib/cell_configurations.py b/src/antlion/controllers/anritsu_lib/cell_configurations.py
deleted file mode 100644
index 83773e0..0000000
--- a/src/antlion/controllers/anritsu_lib/cell_configurations.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Sanity tests for voice tests in telephony
-"""
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_PCS1900
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_GSM850
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_2
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_4
-from antlion.controllers.anritsu_lib.band_constants import LTE_BAND_12
-from antlion.controllers.anritsu_lib.band_constants import WCDMA_BAND_1
-from antlion.controllers.anritsu_lib.band_constants import WCDMA_BAND_2
-from antlion.controllers.anritsu_lib.md8475a import BtsBandwidth
-
-# Different Cell configurations
-# TMO bands
-lte_band4_ch2000_fr2115_pcid1_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 11,
-    'cid': 1,
-    'pcid': 1,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid2_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 12,
-    'cid': 2,
-    'pcid': 2,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid3_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 13,
-    'cid': 3,
-    'pcid': 3,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid4_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 14,
-    'cid': 4,
-    'pcid': 4,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid5_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 15,
-    'cid': 5,
-    'pcid': 5,
-    'channel': 2000
-}
-
-lte_band4_ch2000_fr2115_pcid6_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 16,
-    'cid': 6,
-    'pcid': 6,
-    'channel': 2000
-}
-
-lte_band4_ch2050_fr2120_pcid7_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 17,
-    'cid': 7,
-    'pcid': 7,
-    'channel': 2050
-}
-
-lte_band4_ch2250_fr2140_pcid8_cell = {
-    'band': LTE_BAND_4,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 18,
-    'cid': 8,
-    'pcid': 8,
-    'channel': 2250
-}
-
-lte_band2_ch900_fr1960_pcid9_cell = {
-    'band': LTE_BAND_2,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 19,
-    'cid': 9,
-    'pcid': 9,
-    'channel': 900
-}
-
-lte_band12_ch5095_fr737_pcid10_cell = {
-    'band': LTE_BAND_12,
-    'bandwidth': BtsBandwidth.LTE_BANDWIDTH_10MHz,
-    'mcc': '001',
-    'mnc': '01',
-    'tac': 20,
-    'cid': 10,
-    'pcid': 10,
-    'channel': 5095
-}
-
-wcdma_band1_ch10700_fr2140_cid31_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 31,
-    'rac': 31,
-    'cid': 31,
-    'channel': 10700,
-    'psc': 31
-}
-
-wcdma_band1_ch10700_fr2140_cid32_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 32,
-    'rac': 32,
-    'cid': 32,
-    'channel': 10700,
-    'psc': 32
-}
-
-wcdma_band1_ch10700_fr2140_cid33_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 33,
-    'rac': 33,
-    'cid': 33,
-    'channel': 10700,
-    'psc': 33
-}
-
-wcdma_band1_ch10700_fr2140_cid34_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 34,
-    'rac': 34,
-    'cid': 34,
-    'channel': 10700,
-    'psc': 34
-}
-
-wcdma_band1_ch10700_fr2140_cid35_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 35,
-    'rac': 35,
-    'cid': 35,
-    'channel': 10700,
-    'psc': 35
-}
-
-wcdma_band1_ch10575_fr2115_cid36_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 36,
-    'rac': 36,
-    'cid': 36,
-    'channel': 10575,
-    'psc': 36
-}
-
-wcdma_band1_ch10800_fr2160_cid37_cell = {
-    'band': WCDMA_BAND_1,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 37,
-    'rac': 37,
-    'cid': 37,
-    'channel': 10800,
-    'psc': 37
-}
-
-wcdma_band2_ch9800_fr1960_cid38_cell = {
-    'band': WCDMA_BAND_2,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 38,
-    'rac': 38,
-    'cid': 38,
-    'channel': 9800,
-    'psc': 38
-}
-
-wcdma_band2_ch9900_fr1980_cid39_cell = {
-    'band': WCDMA_BAND_2,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 39,
-    'rac': 39,
-    'cid': 39,
-    'channel': 9900,
-    'psc': 39
-}
-
-gsm_band1900_ch512_fr1930_cid51_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 51,
-    'rac': 51,
-    'cid': 51,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid52_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 52,
-    'rac': 52,
-    'cid': 52,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid53_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 53,
-    'rac': 53,
-    'cid': 53,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid54_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 54,
-    'rac': 54,
-    'cid': 54,
-    'channel': 512,
-}
-
-gsm_band1900_ch512_fr1930_cid55_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 55,
-    'rac': 55,
-    'cid': 55,
-    'channel': 512,
-}
-
-gsm_band1900_ch640_fr1955_cid56_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 56,
-    'rac': 56,
-    'cid': 56,
-    'channel': 640,
-}
-
-gsm_band1900_ch750_fr1977_cid57_cell = {
-    'band': GSM_BAND_PCS1900,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 57,
-    'rac': 57,
-    'cid': 57,
-    'channel': 750,
-}
-
-gsm_band850_ch128_fr869_cid58_cell = {
-    'band': GSM_BAND_GSM850,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 58,
-    'rac': 58,
-    'cid': 58,
-    'channel': 128,
-}
-
-gsm_band850_ch251_fr893_cid59_cell = {
-    'band': GSM_BAND_GSM850,
-    'mcc': '001',
-    'mnc': '01',
-    'lac': 59,
-    'rac': 59,
-    'cid': 59,
-    'channel': 251,
-}
diff --git a/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py b/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py
deleted file mode 100644
index 55a89e9..0000000
--- a/src/antlion/controllers/anritsu_lib/md8475_cellular_simulator.py
+++ /dev/null
@@ -1,732 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import ntpath
-import time
-import antlion.controllers.cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-from antlion.controllers.anritsu_lib import md8475a
-from antlion.controllers.anritsu_lib import _anritsu_utils as anritsu
-
-
-class MD8475CellularSimulator(cc.AbstractCellularSimulator):
-
-    MD8475_VERSION = 'A'
-
-    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
-    LTE_SUPPORTS_DL_256QAM = False
-
-    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
-    LTE_SUPPORTS_UL_64QAM = False
-
-    # Indicates if 4x4 MIMO is supported for LTE
-    LTE_SUPPORTS_4X4_MIMO = False
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 2
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = -10
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-    LTE_BASIC_SIM_FILE = 'SIM_default_LTE.wnssp'
-    LTE_BASIC_CELL_FILE = 'CELL_LTE_config.wnscp'
-    LTE_CA_BASIC_SIM_FILE = 'SIM_LTE_CA.wnssp'
-    LTE_CA_BASIC_CELL_FILE = 'CELL_LTE_CA_config.wnscp'
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_CONFIG_PATH = 'C:\\Users\\MD8475A\\Documents\\DAN_configs\\'
-
-    def __init__(self, ip_address):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the MD8475 instrument
-        """
-        super().__init__()
-
-        try:
-            self.anritsu = md8475a.MD8475A(ip_address,
-                                           md8475_version=self.MD8475_VERSION)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('Could not connect to MD8475.')
-
-        self.bts = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.anritsu.stop_simulation()
-        self.anritsu.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        cell_file_name = self.LTE_BASIC_CELL_FILE
-        sim_file_name = self.LTE_BASIC_SIM_FILE
-
-        cell_file_path = ntpath.join(self.CALLBOX_CONFIG_PATH, cell_file_name)
-        sim_file_path = ntpath.join(self.CALLBOX_CONFIG_PATH, sim_file_name)
-
-        self.anritsu.load_simulation_paramfile(sim_file_path)
-        self.anritsu.load_cell_paramfile(cell_file_path)
-
-        # MD4875A supports only 2 carriers. The MD4875B class adds other cells.
-        self.bts = [
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS1),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS2)
-        ]
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        The reason why this is implemented in a separate method and not calling
-        LteSimulation.BtsConfig for each separate band is that configuring each
-        ssc cannot be done separately, as it is necessary to know which
-        carriers are on the same band in order to decide which RF outputs can
-        be shared in the test equipment.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-        # Validate the number of carriers.
-        if self.num_carriers > self.LTE_MAX_CARRIERS:
-            raise cc.CellularSimulatorError('The test equipment supports up '
-                                            'to {} carriers.'.format(
-                                                self.LTE_MAX_CARRIERS))
-
-        # Initialize the base stations in the test equipment
-        self.anritsu.set_simulation_model(
-            *[md8475a.BtsTechnology.LTE for _ in range(self.num_carriers)],
-            reset=False)
-
-        # If base stations use different bands, make sure that the RF cards are
-        # not being shared by setting the right maximum MIMO modes
-        if self.num_carriers == 2:
-            # RF cards are never shared when doing 2CA so 4X4 can be done in
-            # both base stations.
-            self.bts[0].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-        elif self.num_carriers == 3:
-            # 4X4 can only be done in the second base station if it is shared
-            # with the primary. If the RF cards cannot be shared, then at most
-            # 2X2 can be done.
-            self.bts[0].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            if bands[0] == bands[1]:
-                self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_4X4
-            else:
-                self.bts[1].mimo_support = md8475a.LteMimoMode.MIMO_2X2
-            self.bts[2].mimo_support = md8475a.LteMimoMode.MIMO_2X2
-        elif self.num_carriers > 3:
-            raise NotImplementedError('The controller doesn\'t implement more '
-                                      'than 3 carriers for MD8475B yet.')
-
-        # Enable carrier aggregation if there is more than one carrier
-        if self.num_carriers > 1:
-            self.anritsu.set_carrier_aggregation_enabled()
-
-        # Restart the simulation as changing the simulation model will stop it.
-        self.anritsu.start_simulation()
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        nrb_ul = int(self.bts[bts_index].nrb_ul)
-        max_nrb_ul = self.bts[bts_index].max_nrb_ul
-        input_level = str(
-            round(input_power - 10 * math.log10(nrb_ul / max_nrb_ul), 1))
-        if nrb_ul < max_nrb_ul:
-            self.log.info('Number of UL RBs ({}) is less than the maximum RB '
-                          'allocation ({}). Increasing UL reference power to '
-                          '{} dbm to compensate'.format(
-                              nrb_ul, max_nrb_ul, input_level))
-        self.bts[bts_index].input_level = input_level
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        self.bts[bts_index].output_level = output_power
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        # Temporarily adding this line to workaround a bug in the
-        # Anritsu callbox in which the channel number needs to be set
-        # to a different value before setting it to the final one.
-        self.bts[bts_index].dl_channel = str(int(channel_number + 1))
-        time.sleep(8)
-        self.bts[bts_index].dl_channel = str(int(channel_number))
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        if enabled and not self.LTE_SUPPORTS_DL_256QAM:
-            raise RuntimeError('256 QAM is not supported')
-        self.bts[bts_index].lte_dl_modulation_order = \
-            md8475a.ModulationType.Q256 if enabled else md8475a.ModulationType.Q64
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.bts[bts_index].lte_ul_modulation_order = \
-            md8475a.ModulationType.Q64 if enabled else md8475a.ModulationType.Q16
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        if mac_padding:
-            self.bts[bts_index].tbs_pattern = 'FULLALLOCATION'
-        else:
-            self.bts[bts_index].tbs_pattern = 'OFF'
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        self.anritsu.set_lte_rrc_status_change(enabled)
-        if enabled:
-            self.anritsu.set_lte_rrc_status_change_timer(time)
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        self.bts[bts_index].cfi = cfi
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        # TODO (b/146068532): implement.
-        self.bts[bts_index].paging_duration = cycle_duration
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.bts[bts_index].phich_resource = phich
-
-    def set_drx_connected_mode(self, bts_index, active):
-        """ Sets the DRX connected mode
-
-        Args:
-            bts_index: the base station number
-            active: Boolean indicating whether cDRX mode
-                is active
-        """
-        mode = 'MANUAL' if active else 'OFF'
-        self.bts[bts_index].drx_connected_mode = mode
-
-    def set_drx_on_duration_timer(self, bts_index, timer):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to wait and check for user data
-                after waking from the DRX cycle
-        """
-        self.bts[bts_index].drx_on_duration_timer = timer
-
-    def set_drx_inactivity_timer(self, bts_index, timer):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            timer: The time interval to wait before entering DRX mode
-        """
-        self.bts[bts_index].drx_inactivity_timer = timer
-
-    def set_drx_retransmission_timer(self, bts_index, timer):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to remain active
-
-        """
-        self.bts[bts_index].drx_retransmission_timer = timer
-
-    def set_drx_long_cycle(self, bts_index, cycle):
-        """ Sets the amount of subframes representing a DRX long cycle.
-
-        Args:
-            bts_index: the base station number
-            cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-        """
-        self.bts[bts_index].drx_long_cycle = cycle
-
-    def set_drx_long_cycle_offset(self, bts_index, offset):
-        """ Sets the offset used to determine the subframe number
-        to begin the long drx cycle
-
-        Args:
-            bts_index: the base station number
-            offset: Number in range 0 to (long cycle - 1)
-        """
-        self.bts[bts_index].drx_long_cycle_offset = offset
-
-    def set_band(self, bts_index, band):
-        """ Sets the right duplex mode before switching to a new band.
-
-        Args:
-            bts_index: the base station number
-            band: desired band
-        """
-        bts = self.bts[bts_index]
-
-        # The callbox won't restore the band-dependent default values if the
-        # request is to switch to the same band as the one the base station is
-        # currently using. To ensure that default values are restored, go to a
-        # different band before switching.
-        if int(bts.band) == band:
-            # Using bands 1 and 2 but it could be any others
-            bts.band = '1' if band != 1 else '2'
-            # Switching to config.band will be handled by the parent class
-            # implementation of this method.
-
-        bts.duplex_mode = self.get_duplex_mode(band).value
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-
-        if 33 <= int(band) <= 46:
-            return LteSimulation.DuplexMode.TDD
-        else:
-            return LteSimulation.DuplexMode.FDD
-
-    def set_tdd_config(self, bts_index, config):
-        """ Sets the frame structure for TDD bands.
-
-        Args:
-            bts_index: the base station number
-            config: the desired frame structure. An int between 0 and 6.
-        """
-
-        if not 0 <= config <= 6:
-            raise ValueError("The frame structure configuration has to be a "
-                             "number between 0 and 6")
-
-        self.bts[bts_index].uldl_configuration = config
-
-        # Wait for the setting to propagate
-        time.sleep(5)
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        # Cast to int in case it was passed as a string
-        ssf_config = int(ssf_config)
-
-        if not 0 <= ssf_config <= 9:
-            raise ValueError('The Special Sub-Frame configuration has to be a '
-                             'number between 0 and 9.')
-
-        self.bts[bts_index].tdd_special_subframe = ssf_config
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the LTE channel bandwidth (MHz)
-
-        Args:
-            bts_index: the base station number
-            bandwidth: desired bandwidth (MHz)
-        """
-        bts = self.bts[bts_index]
-
-        if bandwidth == 20:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_20MHz
-        elif bandwidth == 15:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_15MHz
-        elif bandwidth == 10:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_10MHz
-        elif bandwidth == 5:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_5MHz
-        elif bandwidth == 3:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_3MHz
-        elif bandwidth == 1.4:
-            bts.bandwidth = md8475a.BtsBandwidth.LTE_BANDWIDTH_1dot4MHz
-        else:
-            msg = "Bandwidth = {} MHz is not valid for LTE".format(bandwidth)
-            self.log.error(msg)
-            raise ValueError(msg)
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def set_mimo_mode(self, bts_index, mimo):
-        """ Sets the number of DL antennas for the desired MIMO mode.
-
-        Args:
-            bts_index: the base station number
-            mimo: object of class MimoMode
-        """
-
-        bts = self.bts[bts_index]
-
-        # If the requested mimo mode is not compatible with the current TM,
-        # warn the user before changing the value.
-
-        if mimo == LteSimulation.MimoMode.MIMO_1x1:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM1,
-                    LteSimulation.TransmissionMode.TM7
-            ]:
-                self.log.warning(
-                    "Using only 1 DL antennas is not allowed with "
-                    "the current transmission mode. Changing the "
-                    "number of DL antennas will override this "
-                    "setting.")
-            bts.dl_antenna = 1
-        elif mimo == LteSimulation.MimoMode.MIMO_2x2:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM2,
-                    LteSimulation.TransmissionMode.TM3,
-                    LteSimulation.TransmissionMode.TM4,
-                    LteSimulation.TransmissionMode.TM8,
-                    LteSimulation.TransmissionMode.TM9
-            ]:
-                self.log.warning("Using two DL antennas is not allowed with "
-                                 "the current transmission mode. Changing the "
-                                 "number of DL antennas will override this "
-                                 "setting.")
-            bts.dl_antenna = 2
-        elif mimo == LteSimulation.MimoMode.MIMO_4x4 and \
-            self.LTE_SUPPORTS_4X4_MIMO:
-            if bts.transmode not in [
-                    LteSimulation.TransmissionMode.TM2,
-                    LteSimulation.TransmissionMode.TM3,
-                    LteSimulation.TransmissionMode.TM4,
-                    LteSimulation.TransmissionMode.TM9
-            ]:
-                self.log.warning("Using four DL antennas is not allowed with "
-                                 "the current transmission mode. Changing the "
-                                 "number of DL antennas will override this "
-                                 "setting.")
-
-            bts.dl_antenna = 4
-        else:
-            RuntimeError("The requested MIMO mode is not supported.")
-
-    def set_scheduling_mode(self, bts_index, scheduling, mcs_dl, mcs_ul,
-                            nrb_dl, nrb_ul):
-        """ Sets the scheduling mode for LTE
-
-        Args:
-            bts_index: the base station number
-            scheduling: DYNAMIC or STATIC scheduling (Enum list)
-            mcs_dl: Downlink MCS (only for STATIC scheduling)
-            mcs_ul: Uplink MCS (only for STATIC scheduling)
-            nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
-            nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
-        """
-
-        bts = self.bts[bts_index]
-        bts.lte_scheduling_mode = scheduling.value
-
-        if scheduling == LteSimulation.SchedulingMode.STATIC:
-
-            if not all([nrb_dl, nrb_ul, mcs_dl, mcs_ul]):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            bts.packet_rate = md8475a.BtsPacketRate.LTE_MANUAL
-            bts.lte_mcs_dl = mcs_dl
-            bts.lte_mcs_ul = mcs_ul
-            bts.nrb_dl = nrb_dl
-            bts.nrb_ul = nrb_ul
-
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-
-        # Trigger UE capability enquiry from network to get
-        # UE supported CA band combinations. Here freq_bands is a hex string.
-        self.anritsu.trigger_ue_capability_enquiry(ue_capability_enquiry)
-
-        testcase = self.anritsu.get_AnritsuTestCases()
-        # A bug in the instrument's software (b/139547391) requires the test
-        # procedure to be set to whatever was the previous configuration before
-        # setting it to MULTICELL.
-        testcase.procedure = md8475a.TestProcedure(testcase.procedure)
-        testcase.procedure = md8475a.TestProcedure.PROCEDURE_MULTICELL
-        testcase.power_control = md8475a.TestPowerControl.POWER_CONTROL_DISABLE
-        testcase.measurement_LTE = md8475a.TestMeasurement.MEASUREMENT_DISABLE
-
-        # Enable the secondary carrier base stations for CA
-        for bts_index in range(1, self.num_carriers):
-            self.bts[bts_index].dl_cc_enabled = True
-
-        self.anritsu.start_testcase()
-
-        retry_counter = 0
-        self.log.info("Waiting for the test case to start...")
-        time.sleep(5)
-
-        while self.anritsu.get_testcase_status() == "0":
-            retry_counter += 1
-            if retry_counter == 3:
-                raise RuntimeError(
-                    "The test case failed to start after {} "
-                    "retries. The connection between the phone "
-                    "and the base station might be unstable.".format(
-                        retry_counter))
-            time.sleep(10)
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the LTE basetation
-
-        Args:
-            bts_index: the base station number
-            tmode: Enum list from class 'TransmissionModeLTE'
-        """
-
-        bts = self.bts[bts_index]
-
-        # If the selected transmission mode does not support the number of DL
-        # antennas, throw an exception.
-        if (tmode in [
-                LteSimulation.TransmissionMode.TM1,
-                LteSimulation.TransmissionMode.TM7
-        ] and bts.dl_antenna != '1'):
-            # TM1 and TM7 only support 1 DL antenna
-            raise ValueError("{} allows only one DL antenna. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.".format(tmode.value))
-        elif (tmode == LteSimulation.TransmissionMode.TM8
-              and bts.dl_antenna != '2'):
-            # TM8 requires 2 DL antennas
-            raise ValueError("TM2 requires two DL antennas. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.")
-        elif (tmode in [
-                LteSimulation.TransmissionMode.TM2,
-                LteSimulation.TransmissionMode.TM3,
-                LteSimulation.TransmissionMode.TM4,
-                LteSimulation.TransmissionMode.TM9
-        ] and bts.dl_antenna == '1'):
-            # TM2, TM3, TM4 and TM9 require 2 or 4 DL antennas
-            raise ValueError("{} requires at least two DL atennas. Change the "
-                             "number of DL antennas before setting the "
-                             "transmission mode.".format(tmode.value))
-
-        # The TM mode is allowed for the current number of DL antennas, so it
-        # is safe to change this setting now
-        bts.transmode = tmode.value
-
-        time.sleep(5)  # It takes some time to propagate the new settings
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_registration_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone did not attach before '
-                                            'the timeout period ended.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_communication_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Communication state before '
-                                            'the timeout period ended.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.anritsu.wait_for_idle_state(time_to_wait=timeout)
-        except anritsu.AnritsuError:
-            raise cc.CellularSimulatorError('The phone was not in Idle state '
-                                            'before the time the timeout '
-                                            'period ended.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        if self.anritsu.get_smartstudio_status() == \
-            md8475a.ProcessingStatus.PROCESS_STATUS_NOTRUN.value:
-            self.log.info('Device cannot be detached because simulation is '
-                          'not running.')
-            return
-        self.anritsu.set_simulation_state_to_poweroff()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        self.anritsu.stop_simulation()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        try:
-            self.anritsu.start_ip_traffic()
-        except md8475a.AnritsuError as inst:
-            # This typically happens when traffic is already running.
-            # TODO (b/141962691): continue only if traffic is running
-            self.log.warning(str(inst))
-        time.sleep(4)
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        try:
-            self.anritsu.stop_ip_traffic()
-        except md8475a.AnritsuError as inst:
-            # This typically happens when traffic has already been stopped
-            # TODO (b/141962691): continue only if traffic is stopped
-            self.log.warning(str(inst))
-        time.sleep(2)
-
-    def get_measured_pusch_power(self):
-        """ Queries PUSCH power measured at the callbox.
-
-        Returns:
-            The PUSCH power in the primary input port.
-        """
-        # Try three times before raising an exception. This is needed because
-        # the callbox sometimes reports an active chain as 'DEACTIVE'.
-        retries_left = 3
-
-        while retries_left > 0:
-
-            ul_pusch = self.anritsu.get_measured_pusch_power().split(',')[0]
-
-            if ul_pusch != 'DEACTIVE':
-                return float(ul_pusch)
-
-            time.sleep(3)
-            retries_left -= 1
-            self.log.info('Chain shows as inactive. %d retries left.' %
-                          retries_left)
-
-        raise cc.CellularSimulatorError('Could not get measured PUSCH power.')
-
-
-class MD8475BCellularSimulator(MD8475CellularSimulator):
-
-    MD8475_VERSION = 'B'
-
-    # Indicates if it is able to use 256 QAM as the downlink modulation for LTE
-    LTE_SUPPORTS_DL_256QAM = True
-
-    # Indicates if it is able to use 64 QAM as the uplink modulation for LTE
-    LTE_SUPPORTS_UL_64QAM = True
-
-    # Indicates if 4x4 MIMO is supported for LTE
-    LTE_SUPPORTS_4X4_MIMO = True
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 4
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = -30
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-    LTE_BASIC_SIM_FILE = 'SIM_default_LTE.wnssp2'
-    LTE_BASIC_CELL_FILE = 'CELL_LTE_config.wnscp2'
-    LTE_CA_BASIC_SIM_FILE = 'SIM_LTE_CA.wnssp2'
-    LTE_CA_BASIC_CELL_FILE = 'CELL_LTE_CA_config.wnscp2'
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_CONFIG_PATH = 'C:\\Users\\MD8475B\\Documents\\DAN_configs\\'
-
-    def setup_lte_scenario(self):
-        """ The B model can support up to five carriers. """
-
-        super().setup_lte_scenario()
-
-        self.bts.extend([
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS3),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS4),
-            self.anritsu.get_BTS(md8475a.BtsNumber.BTS5)
-        ])
diff --git a/src/antlion/controllers/anritsu_lib/md8475a.py b/src/antlion/controllers/anritsu_lib/md8475a.py
deleted file mode 100644
index ac67229..0000000
--- a/src/antlion/controllers/anritsu_lib/md8475a.py
+++ /dev/null
@@ -1,5066 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Controller interface for Anritsu Signalling Tester MD8475A.
-"""
-
-import logging
-import time
-import socket
-from enum import Enum
-from enum import IntEnum
-
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuError
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuUtils
-from antlion.controllers.anritsu_lib._anritsu_utils import NO_ERROR
-from antlion.controllers.anritsu_lib._anritsu_utils import OPERATION_COMPLETE
-
-from antlion import tracelogger
-
-TERMINATOR = "\0"
-
-# The following wait times (except COMMUNICATION_STATE_WAIT_TIME) are actually
-# the times for socket to time out. Increasing them is to make sure there is
-# enough time for MD8475A operation to be completed in some cases.
-# It won't increase test execution time.
-SMARTSTUDIO_LAUNCH_WAIT_TIME = 300  # was 90
-SMARTSTUDIO_SIMULATION_START_WAIT_TIME = 300  # was 120
-REGISTRATION_STATE_WAIT_TIME = 240
-LOAD_SIMULATION_PARAM_FILE_WAIT_TIME = 30
-COMMUNICATION_STATE_WAIT_TIME = 240
-ANRITSU_SOCKET_BUFFER_SIZE = 8192
-COMMAND_COMPLETE_WAIT_TIME = 180  # was 90
-SETTLING_TIME = 1
-WAIT_TIME_IDENTITY_RESPONSE = 5
-IDLE_STATE_WAIT_TIME = 240
-
-IMSI_READ_USERDATA_WCDMA = "081501"
-IMEI_READ_USERDATA_WCDMA = "081502"
-IMEISV_READ_USERDATA_WCDMA = "081503"
-IMSI_READ_USERDATA_LTE = "075501"
-IMEI_READ_USERDATA_LTE = "075502"
-IMEISV_READ_USERDATA_LTE = "075503"
-IMSI_READ_USERDATA_GSM = "081501"
-IMEI_READ_USERDATA_GSM = "081502"
-IMEISV_READ_USERDATA_GSM = "081503"
-IDENTITY_REQ_DATA_LEN = 24
-SEQ_LOG_MESSAGE_START_INDEX = 60
-
-WCDMA_BANDS = {
-    "I": "1",
-    "II": "2",
-    "III": "3",
-    "IV": "4",
-    "V": "5",
-    "VI": "6",
-    "VII": "7",
-    "VIII": "8",
-    "IX": "9",
-    "X": "10",
-    "XI": "11",
-    "XII": "12",
-    "XIII": "13",
-    "XIV": "14"
-}
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        ip_address = c["ip_address"]
-        objs.append(MD8475A(ip_address))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class ProcessingStatus(Enum):
-    ''' MD8475A processing status for UE,Packet,Voice,Video,SMS,
-        PPP, PWS '''
-    PROCESS_STATUS_NONE = "NONE"
-    PROCESS_STATUS_NOTRUN = "NOTRUN"
-    PROCESS_STATUS_POWEROFF = "POWEROFF"
-    PROCESS_STATUS_REGISTRATION = "REGISTRATION"
-    PROCESS_STATUS_DETACH = "DETACH"
-    PROCESS_STATUS_IDLE = "IDLE"
-    PROCESS_STATUS_ORIGINATION = "ORIGINATION"
-    PROCESS_STATUS_HANDOVER = "HANDOVER"
-    PROCESS_STATUS_UPDATING = "UPDATING"
-    PROCESS_STATUS_TERMINATION = "TERMINATION"
-    PROCESS_STATUS_COMMUNICATION = "COMMUNICATION"
-    PROCESS_STATUS_UERELEASE = "UERELEASE"
-    PROCESS_STATUS_NWRELEASE = "NWRELEASE"
-
-
-class BtsNumber(Enum):
-    '''ID number for MD8475A supported BTS '''
-    BTS1 = "BTS1"
-    BTS2 = "BTS2"
-    BTS3 = "BTS3"
-    BTS4 = "BTS4"
-    BTS5 = "BTS5"
-
-
-class BtsTechnology(Enum):
-    ''' BTS system technology'''
-    LTE = "LTE"
-    WCDMA = "WCDMA"
-    TDSCDMA = "TDSCDMA"
-    GSM = "GSM"
-    CDMA1X = "CDMA1X"
-    EVDO = "EVDO"
-
-
-class BtsBandwidth(Enum):
-    ''' Values for Cell Bandwidth '''
-    LTE_BANDWIDTH_1dot4MHz = "1.4MHz"
-    LTE_BANDWIDTH_3MHz = "3MHz"
-    LTE_BANDWIDTH_5MHz = "5MHz"
-    LTE_BANDWIDTH_10MHz = "10MHz"
-    LTE_BANDWIDTH_15MHz = "15MHz"
-    LTE_BANDWIDTH_20MHz = "20MHz"
-
-    def get_float_value(bts_bandwidth):
-        """ Returns a float representing the bandwidth in MHz.
-
-        Args:
-            bts_bandwidth: a BtsBandwidth enum or a string matching one of the
-            values in the BtsBandwidth enum.
-        """
-
-        if isinstance(bts_bandwidth, BtsBandwidth):
-            bandwidth_str = bts_bandwidth.value
-        elif isinstance(bts_bandwidth, str):
-            bandwidth_str = bts_bandwidth
-        else:
-            raise TypeError('bts_bandwidth should be an instance of string or '
-                            'BtsBandwidth. ')
-
-        if bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_20MHz.value:
-            return 20
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_15MHz.value:
-            return 15
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_10MHz.value:
-            return 10
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_5MHz.value:
-            return 5
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_3MHz.value:
-            return 3
-        elif bandwidth_str == BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value:
-            return 1.4
-        else:
-            raise ValueError(
-                'Could not map {} to a bandwidth value.'.format(bandwidth_str))
-
-
-MAX_NRB_FOR_BANDWIDTH = {
-    BtsBandwidth.LTE_BANDWIDTH_1dot4MHz.value: 6,
-    BtsBandwidth.LTE_BANDWIDTH_3MHz.value: 15,
-    BtsBandwidth.LTE_BANDWIDTH_5MHz.value: 25,
-    BtsBandwidth.LTE_BANDWIDTH_10MHz.value: 50,
-    BtsBandwidth.LTE_BANDWIDTH_15MHz.value: 75,
-    BtsBandwidth.LTE_BANDWIDTH_20MHz.value: 100
-}
-
-
-class LteMimoMode(Enum):
-    """ Values for LTE MIMO modes. """
-    NONE = "MIMONOT"
-    MIMO_2X2 = "MIMO2X2"
-    MIMO_4X4 = "MIMO4X4"
-
-
-class BtsGprsMode(Enum):
-    ''' Values for Gprs Modes '''
-    NO_GPRS = "NO_GPRS"
-    GPRS = "GPRS"
-    EGPRS = "EGPRS"
-
-
-class BtsPacketRate(Enum):
-    ''' Values for Cell Packet rate '''
-    LTE_MANUAL = "MANUAL"
-    LTE_BESTEFFORT = "BESTEFFORT"
-    WCDMA_DL384K_UL64K = "DL384K_UL64K"
-    WCDMA_DLHSAUTO_REL7_UL384K = "DLHSAUTO_REL7_UL384K"
-    WCDMA_DL18_0M_UL384K = "DL18_0M_UL384K"
-    WCDMA_DL21_6M_UL384K = "DL21_6M_UL384K"
-    WCDMA_DLHSAUTO_REL7_ULHSAUTO = "DLHSAUTO_REL7_ULHSAUTO"
-    WCDMA_DL18_0M_UL1_46M = "DL18_0M_UL1_46M"
-    WCDMA_DL18_0M_UL2_0M = "DL18_0M_UL2_0M"
-    WCDMA_DL18_0M_UL5_76M = "DL18_0M_UL5_76M"
-    WCDMA_DL21_6M_UL1_46M = "DL21_6M_UL1_46M"
-    WCDMA_DL21_6M_UL2_0M = "DL21_6M_UL2_0M"
-    WCDMA_DL21_6M_UL5_76M = "DL21_6M_UL5_76M"
-    WCDMA_DLHSAUTO_REL8_UL384K = "DLHSAUTO_REL8_UL384K"
-    WCDMA_DL23_4M_UL384K = "DL23_4M_UL384K"
-    WCDMA_DL28_0M_UL384K = "DL28_0M_UL384K"
-    WCDMA_DL36_0M_UL384K = "DL36_0M_UL384K"
-    WCDMA_DL43_2M_UL384K = "DL43_2M_UL384K"
-    WCDMA_DLHSAUTO_REL8_ULHSAUTO = "DLHSAUTO_REL8_ULHSAUTO"
-    WCDMA_DL23_4M_UL1_46M = "DL23_4M_UL1_46M"
-    WCDMA_DL23_4M_UL2_0M = "DL23_4M_UL2_0M"
-    WCDMA_DL23_4M_UL5_76M = "DL23_4M_UL5_76M"
-    WCDMA_DL28_0M_UL1_46M = "DL28_0M_UL1_46M"
-    WCDMA_DL28_0M_UL2_0M = "DL28_0M_UL2_0M"
-    WCDMA_DL28_0M_UL5_76M = "L28_0M_UL5_76M"
-    WCDMA_DL36_0M_UL1_46M = "DL36_0M_UL1_46M"
-    WCDMA_DL36_0M_UL2_0M = "DL36_0M_UL2_0M"
-    WCDMA_DL36_0M_UL5_76M = "DL36_0M_UL5_76M"
-    WCDMA_DL43_2M_UL1_46M = "DL43_2M_UL1_46M"
-    WCDMA_DL43_2M_UL2_0M = "DL43_2M_UL2_0M"
-    WCDMA_DL43_2M_UL5_76M = "DL43_2M_UL5_76M"
-
-
-class BtsPacketWindowSize(Enum):
-    ''' Values for Cell Packet window size '''
-    WINDOW_SIZE_1 = 1
-    WINDOW_SIZE_8 = 8
-    WINDOW_SIZE_16 = 16
-    WINDOW_SIZE_32 = 32
-    WINDOW_SIZE_64 = 64
-    WINDOW_SIZE_128 = 128
-    WINDOW_SIZE_256 = 256
-    WINDOW_SIZE_512 = 512
-    WINDOW_SIZE_768 = 768
-    WINDOW_SIZE_1024 = 1024
-    WINDOW_SIZE_1536 = 1536
-    WINDOW_SIZE_2047 = 2047
-
-
-class BtsServiceState(Enum):
-    ''' Values for BTS service state '''
-    SERVICE_STATE_IN = "IN"
-    SERVICE_STATE_OUT = "OUT"
-
-
-class BtsCellBarred(Enum):
-    ''' Values for Cell barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    BARRED = "BARRED"
-
-
-class BtsAccessClassBarred(Enum):
-    ''' Values for Access class barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    EMERGENCY = "EMERGENCY"
-    BARRED = "BARRED"
-    USERSPECIFIC = "USERSPECIFIC"
-
-
-class BtsLteEmergencyAccessClassBarred(Enum):
-    ''' Values for Lte emergency access class barred parameter '''
-    NOTBARRED = "NOTBARRED"
-    BARRED = "BARRED"
-
-
-class BtsNwNameEnable(Enum):
-    ''' Values for BT network name enable parameter '''
-    NAME_ENABLE = "ON"
-    NAME_DISABLE = "OFF"
-
-
-class IPAddressType(Enum):
-    ''' Values for IP address type '''
-    IPV4 = "IPV4"
-    IPV6 = "IPV6"
-    IPV4V6 = "IPV4V6"
-
-
-class TriggerMessageIDs(Enum):
-    ''' ID for Trigger messages  '''
-    RRC_CONNECTION_REQ = 111101
-    RRC_CONN_REESTABLISH_REQ = 111100
-    ATTACH_REQ = 141141
-    DETACH_REQ = 141145
-    MM_LOC_UPDATE_REQ = 221108
-    GMM_ATTACH_REQ = 241101
-    GMM_RA_UPDATE_REQ = 241108
-    IDENTITY_REQUEST_LTE = 141155
-    IDENTITY_REQUEST_WCDMA = 241115
-    IDENTITY_REQUEST_GSM = 641115
-    UE_CAPABILITY_ENQUIRY = 111167
-
-
-class TriggerMessageReply(Enum):
-    ''' Values for Trigger message reply parameter '''
-    ACCEPT = "ACCEPT"
-    REJECT = "REJECT"
-    IGNORE = "IGNORE"
-    NONE = "NONE"
-    ILLEGAL = "ILLEGAL"
-
-
-class TestProcedure(Enum):
-    ''' Values for different Test procedures in MD8475A '''
-    PROCEDURE_BL = "BL"
-    PROCEDURE_SELECTION = "SELECTION"
-    PROCEDURE_RESELECTION = "RESELECTION"
-    PROCEDURE_REDIRECTION = "REDIRECTION"
-    PROCEDURE_HO = "HO"
-    PROCEDURE_HHO = "HHO"
-    PROCEDURE_SHO = "SHO"
-    PROCEDURE_MEASUREMENT = "MEASUREMENT"
-    PROCEDURE_CELLCHANGE = "CELLCHANGE"
-    PROCEDURE_MULTICELL = "MULTICELL"
-
-
-class TestPowerControl(Enum):
-    ''' Values for power control in test procedure '''
-    POWER_CONTROL_ENABLE = "ENABLE"
-    POWER_CONTROL_DISABLE = "DISABLE"
-
-
-class TestMeasurement(Enum):
-    ''' Values for mesaurement in test procedure '''
-    MEASUREMENT_ENABLE = "ENABLE"
-    MEASUREMENT_DISABLE = "DISABLE"
-
-
-'''MD8475A processing states'''
-_PROCESS_STATES = {
-    "NONE": ProcessingStatus.PROCESS_STATUS_NONE,
-    "NOTRUN": ProcessingStatus.PROCESS_STATUS_NOTRUN,
-    "POWEROFF": ProcessingStatus.PROCESS_STATUS_POWEROFF,
-    "REGISTRATION": ProcessingStatus.PROCESS_STATUS_REGISTRATION,
-    "DETACH": ProcessingStatus.PROCESS_STATUS_DETACH,
-    "IDLE": ProcessingStatus.PROCESS_STATUS_IDLE,
-    "ORIGINATION": ProcessingStatus.PROCESS_STATUS_ORIGINATION,
-    "HANDOVER": ProcessingStatus.PROCESS_STATUS_HANDOVER,
-    "UPDATING": ProcessingStatus.PROCESS_STATUS_UPDATING,
-    "TERMINATION": ProcessingStatus.PROCESS_STATUS_TERMINATION,
-    "COMMUNICATION": ProcessingStatus.PROCESS_STATUS_COMMUNICATION,
-    "UERELEASE": ProcessingStatus.PROCESS_STATUS_UERELEASE,
-    "NWRELEASE": ProcessingStatus.PROCESS_STATUS_NWRELEASE,
-}
-
-
-class ImsCscfStatus(Enum):
-    """ MD8475A ims cscf status for UE
-    """
-    OFF = "OFF"
-    SIPIDLE = "SIPIDLE"
-    CONNECTED = "CONNECTED"
-    CALLING = "CALLING"
-    RINGING = "RINGING"
-    UNKNOWN = "UNKNOWN"
-
-
-class ImsCscfCall(Enum):
-    """ MD8475A ims cscf call action
-    """
-    MAKE = "MAKE"
-    END = "END"
-    MAKEVIDEO = "MAKEVIDEO"
-    MAKE2ND = "MAKE2ND"
-    END2ND = "END2ND"
-    ANSWER = "ANSWER"
-    HOLD = "HOLD"
-    RESUME = "RESUME"
-
-
-class VirtualPhoneStatus(IntEnum):
-    ''' MD8475A virtual phone status for UE voice and UE video
-        PPP, PWS '''
-    STATUS_IDLE = 0
-    STATUS_VOICECALL_ORIGINATION = 1
-    STATUS_VOICECALL_INCOMING = 2
-    STATUS_VOICECALL_INPROGRESS = 3
-    STATUS_VOICECALL_DISCONNECTING = 4
-    STATUS_VOICECALL_DISCONNECTED = 5
-    STATUS_VIDEOCALL_ORIGINATION = 6
-    STATUS_VIDEOCALL_INCOMING = 7
-    STATUS_VIDEOCALL_INPROGRESS = 8
-    STATUS_VIDEOCALL_DISCONNECTING = 9
-    STATUS_VIDEOCALL_DISCONNECTED = 10
-
-
-'''Virtual Phone Status '''
-_VP_STATUS = {
-    "0": VirtualPhoneStatus.STATUS_IDLE,
-    "1": VirtualPhoneStatus.STATUS_VOICECALL_ORIGINATION,
-    "2": VirtualPhoneStatus.STATUS_VOICECALL_INCOMING,
-    "3": VirtualPhoneStatus.STATUS_VOICECALL_INPROGRESS,
-    "4": VirtualPhoneStatus.STATUS_VOICECALL_DISCONNECTING,
-    "5": VirtualPhoneStatus.STATUS_VOICECALL_DISCONNECTED,
-    "6": VirtualPhoneStatus.STATUS_VIDEOCALL_ORIGINATION,
-    "7": VirtualPhoneStatus.STATUS_VIDEOCALL_INCOMING,
-    "8": VirtualPhoneStatus.STATUS_VIDEOCALL_INPROGRESS,
-    "9": VirtualPhoneStatus.STATUS_VIDEOCALL_DISCONNECTING,
-    "10": VirtualPhoneStatus.STATUS_VIDEOCALL_DISCONNECTED,
-}
-
-
-class VirtualPhoneAutoAnswer(Enum):
-    ''' Virtual phone auto answer enable values'''
-    ON = "ON"
-    OFF = "OFF"
-
-
-class CsfbType(Enum):
-    ''' CSFB Type values'''
-    CSFB_TYPE_REDIRECTION = "REDIRECTION"
-    CSFB_TYPE_HANDOVER = "HO"
-
-
-class ReturnToEUTRAN(Enum):
-    '''Return to EUTRAN setting values '''
-    RETEUTRAN_ENABLE = "ENABLE"
-    RETEUTRAN_DISABLE = "DISABLE"
-
-
-class CTCHSetup(Enum):
-    '''CTCH setting values '''
-    CTCH_ENABLE = "ENABLE"
-    CTCH_DISABLE = "DISABLE"
-
-
-class UEIdentityType(Enum):
-    '''UE Identity type values '''
-    IMSI = "IMSI"
-    IMEI = "IMEI"
-    IMEISV = "IMEISV"
-
-
-class CBCHSetup(Enum):
-    '''CBCH setting values '''
-    CBCH_ENABLE = "ENABLE"
-    CBCH_DISABLE = "DISABLE"
-
-
-class Switch(Enum):
-    ''' Values for ENABLE or DISABLE '''
-    ENABLE = "ENABLE"
-    DISABLE = "DISABLE"
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    Q16 = '16QAM'
-    Q64 = '64QAM'
-    Q256 = '256QAM'
-
-
-class MD8475A(object):
-    """Class to communicate with Anritsu MD8475A Signalling Tester.
-       This uses GPIB command to interface with Anritsu MD8475A """
-    def __init__(self, ip_address, wlan=False, md8475_version="A"):
-        self._error_reporting = True
-        self._ipaddr = ip_address
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-        self._wlan = wlan
-        port_number = 28002
-        self._md8475_version = md8475_version
-        if md8475_version == "B":
-            global TERMINATOR
-            TERMINATOR = "\n"
-            port_number = 5025
-
-        # Open socket connection to Signaling Tester
-        self.log.info("Opening Socket Connection with "
-                      "Signaling Tester ({}) ".format(self._ipaddr))
-        try:
-            self._sock = socket.create_connection((self._ipaddr, port_number),
-                                                  timeout=120)
-            self.send_query("*IDN?", 60)
-            self.log.info("Communication with Signaling Tester OK.")
-            self.log.info("Opened Socket connection to ({})"
-                          "with handle ({})".format(self._ipaddr, self._sock))
-            # launching Smart Studio Application needed for the simulation
-            ret = self.launch_smartstudio()
-        except socket.timeout:
-            raise AnritsuError("Timeout happened while conencting to"
-                               " Anritsu MD8475A")
-        except socket.error:
-            raise AnritsuError("Socket creation error")
-
-    def get_BTS(self, btsnumber):
-        """ Returns the BTS object based on the BTS number provided
-
-        Args:
-            btsnumber: BTS number (BTS1, BTS2)
-
-        Returns:
-            BTS object
-        """
-        return _BaseTransceiverStation(self, btsnumber)
-
-    def get_AnritsuTestCases(self):
-        """ Returns the Anritsu Test Case Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Test Case Module Object
-        """
-        return _AnritsuTestCases(self)
-
-    def get_VirtualPhone(self):
-        """ Returns the Anritsu Virtual Phone Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Virtual Phone Module Object
-        """
-        return _VirtualPhone(self)
-
-    def get_PDN(self, pdn_number):
-        """ Returns the PDN Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu PDN Module Object
-        """
-        return _PacketDataNetwork(self, pdn_number)
-
-    def get_TriggerMessage(self):
-        """ Returns the Anritsu Trigger Message Module Object
-
-        Args:
-            None
-
-        Returns:
-            Anritsu Trigger Message Module Object
-        """
-        return _TriggerMessage(self)
-
-    def get_IMS(self, vnid):
-        """ Returns the IMS Module Object with VNID
-
-        Args:
-            vnid: Virtual Network ID
-
-        Returns:
-            Anritsu IMS VNID Module Object
-        """
-        return _IMS_Services(self, vnid)
-
-    def get_ims_cscf_status(self, virtual_network_id):
-        """ Get the IMS CSCF Status of virtual network
-
-        Args:
-            virtual_network_id: virtual network id
-
-        Returns:
-            IMS CSCF status
-        """
-        cmd = "IMSCSCFSTAT? {}".format(virtual_network_id)
-        return self.send_query(cmd)
-
-    def ims_cscf_call_action(self, virtual_network_id, action):
-        """ IMS CSCF Call action
-
-        Args:
-            virtual_network_id: virtual network id
-            action: action to make
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFCALL {},{}".format(virtual_network_id, action)
-        self.send_command(cmd)
-
-    def send_query(self, query, sock_timeout=120):
-        """ Sends a Query message to Anritsu and return response
-
-        Args:
-            query - Query string
-
-        Returns:
-            query response
-        """
-        self.log.info("--> {}".format(query))
-        querytoSend = (query + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(querytoSend)
-            result = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
-                TERMINATOR.encode('utf-8'))
-            response = result.decode('utf-8')
-            self.log.info('<-- {}'.format(response))
-            return response
-        except socket.timeout:
-            raise AnritsuError("Timeout: Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error")
-
-    def send_command(self, command, sock_timeout=120):
-        """ Sends a Command message to Anritsu
-
-        Args:
-            command - command string
-
-        Returns:
-            None
-        """
-        self.log.info("--> {}".format(command))
-        if self._error_reporting:
-            cmdToSend = (command + ";ERROR?" + TERMINATOR).encode('utf-8')
-            self._sock.settimeout(sock_timeout)
-            try:
-                self._sock.send(cmdToSend)
-                err = self._sock.recv(ANRITSU_SOCKET_BUFFER_SIZE).rstrip(
-                    TERMINATOR.encode('utf-8'))
-                error = int(err.decode('utf-8'))
-                if error != NO_ERROR:
-                    raise AnritsuError(error, command)
-            except socket.timeout:
-                raise AnritsuError("Timeout for Command Response from Anritsu")
-            except socket.error:
-                raise AnritsuError("Socket Error for Anritsu command")
-            except Exception as e:
-                raise AnritsuError(e, command)
-        else:
-            cmdToSend = (command + TERMINATOR).encode('utf-8')
-            try:
-                self._sock.send(cmdToSend)
-            except socket.error:
-                raise AnritsuError("Socket Error", command)
-            return
-
-    def launch_smartstudio(self):
-        """ launch the Smart studio application
-            This should be done before stating simulation
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        # check the Smart Studio status . If Smart Studio doesn't exist ,
-        # start it.if it is running, stop it. Smart Studio should be in
-        # NOTRUN (Simulation Stopped) state to start new simulation
-        stat = self.send_query("STAT?", 30)
-        if stat == "NOTEXIST":
-            self.log.info("Launching Smart Studio Application,"
-                          "it takes about a minute.")
-            time_to_wait = SMARTSTUDIO_LAUNCH_WAIT_TIME
-            sleep_interval = 15
-            waiting_time = 0
-
-            err = self.send_command("RUN", SMARTSTUDIO_LAUNCH_WAIT_TIME)
-            stat = self.send_query("STAT?")
-            while stat != "NOTRUN":
-                time.sleep(sleep_interval)
-                waiting_time = waiting_time + sleep_interval
-                if waiting_time <= time_to_wait:
-                    stat = self.send_query("STAT?")
-                else:
-                    raise AnritsuError("Timeout: Smart Studio launch")
-        elif stat == "RUNNING":
-            # Stop simulation if necessary
-            self.send_command("STOP", 60)
-            stat = self.send_query("STAT?")
-
-        # The state of the Smart Studio should be NOTRUN at this point
-        # after the one of the steps from above
-        if stat != "NOTRUN":
-            self.log.info(
-                "Can not launch Smart Studio, "
-                "please shut down all the Smart Studio SW components")
-            raise AnritsuError("Could not run SmartStudio")
-
-    def close_smartstudio(self):
-        """ Closes the Smart studio application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        self.send_command("EXIT", 60)
-
-    def get_smartstudio_status(self):
-        """ Gets the Smart studio status
-
-        Args:
-            None
-
-        Returns:
-            Smart studio status
-        """
-        return self.send_query("STAT?")
-
-    def start_simulation(self):
-        """ Starting the simulation of the network model.
-            simulation model or simulation parameter file
-            should be set before starting the simulation
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        time_to_wait = SMARTSTUDIO_SIMULATION_START_WAIT_TIME
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.send_command("START", SMARTSTUDIO_SIMULATION_START_WAIT_TIME)
-
-        self.log.info("Waiting for CALLSTAT=POWEROFF")
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[0] != "POWEROFF":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("Timeout: Starting simulation")
-
-    def stop_simulation(self):
-        """ Stop simulation operation
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        # Stop virtual network (IMS) #1 if still running
-        # this is needed before Sync command is supported in 6.40a
-        if self.send_query("IMSVNSTAT? 1") == "RUNNING":
-            self.send_command("IMSSTOPVN 1")
-        if self.send_query("IMSVNSTAT? 2") == "RUNNING":
-            self.send_command("IMSSTOPVN 2")
-        stat = self.send_query("STAT?")
-        # Stop simulation if its is RUNNING
-        if stat == "RUNNING":
-            self.send_command("STOP", 60)
-            stat = self.send_query("STAT?")
-            if stat != "NOTRUN":
-                self.log.info("Failed to stop simulation")
-                raise AnritsuError("Failed to stop simulation")
-
-    def reset(self):
-        """ reset simulation parameters
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("*RST", COMMAND_COMPLETE_WAIT_TIME)
-
-    def load_simulation_paramfile(self, filepath):
-        """ loads simulation model parameter file
-        Args:
-          filepath : simulation model parameter file path
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        cmd = "LOADSIMPARAM \"" + filepath + '\";ERROR?'
-        self.send_query(cmd, LOAD_SIMULATION_PARAM_FILE_WAIT_TIME)
-
-    def load_cell_paramfile(self, filepath):
-        """ loads cell model parameter file
-
-        Args:
-          filepath : cell model parameter file path
-
-        Returns:
-            None
-        """
-        self.stop_simulation()
-        cmd = "LOADCELLPARAM \"" + filepath + '\";ERROR?'
-        status = int(self.send_query(cmd))
-        if status != NO_ERROR:
-            raise AnritsuError(status, cmd)
-
-    def _set_simulation_model(self, sim_model, reset=True):
-        """ Set simulation model and valid the configuration
-
-        Args:
-            sim_model: simulation model
-            reset: if True, reset the simulation after setting the new
-            simulation model
-        Returns:
-            True/False
-        """
-        error = int(
-            self.send_query("SIMMODEL %s;ERROR?" % sim_model,
-                            COMMAND_COMPLETE_WAIT_TIME))
-        if error:  # Try again if first set SIMMODEL fails
-            time.sleep(3)
-            if "WLAN" in sim_model:
-                new_sim_model = sim_model[:-5]
-                error = int(
-                    self.send_query("SIMMODEL %s;ERROR?" % new_sim_model,
-                                    COMMAND_COMPLETE_WAIT_TIME))
-                time.sleep(3)
-            error = int(
-                self.send_query("SIMMODEL %s;ERROR?" % sim_model,
-                                COMMAND_COMPLETE_WAIT_TIME))
-            if error:
-                return False
-        if reset:
-            # Reset might be necessary because SIMMODEL will load
-            # some of the contents from previous parameter files.
-            self.reset()
-        return True
-
-    def set_simulation_model(self, *bts_rats, reset=True):
-        """ Stops the simulation and then sets the simulation model.
-
-        Args:
-            *bts_rats: base station rats for BTS 1 to 5.
-            reset: if True, reset the simulation after setting the new
-            simulation model
-        Returns:
-            True or False
-        """
-        self.stop_simulation()
-        if len(bts_rats) not in range(1, 6):
-            raise ValueError(
-                "set_simulation_model requires 1 to 5 BTS values.")
-        simmodel = ",".join(bts_rat.value for bts_rat in bts_rats)
-        if self._wlan:
-            simmodel = simmodel + "," + "WLAN"
-        return self._set_simulation_model(simmodel, reset)
-
-    def get_simulation_model(self):
-        """ Gets the simulation model
-
-        Args:
-            None
-
-        Returns:
-            Current simulation model
-        """
-        cmd = "SIMMODEL?"
-        return self.send_query(cmd)
-
-    def get_lte_rrc_status_change(self):
-        """ Gets the LTE RRC status change function state
-
-        Returns:
-            Boolean: True is Enabled / False is Disabled
-        """
-        cmd = "L_RRCSTAT?"
-        return self.send_query(cmd) == "ENABLE"
-
-    def set_lte_rrc_status_change(self, status_change):
-        """ Enables or Disables the LTE RRC status change function
-
-        Returns:
-            None
-        """
-        cmd = "L_RRCSTAT "
-        if status_change:
-            cmd += "ENABLE"
-        else:
-            cmd += "DISABLE"
-        self.send_command(cmd)
-
-    def get_lte_rrc_status_change_timer(self):
-        """ Gets the LTE RRC Status Change Timer
-
-        Returns:
-            returns a status change timer integer value
-        """
-        cmd = "L_STATTMR?"
-        return self.send_query(cmd)
-
-    def set_lte_rrc_status_change_timer(self, time):
-        """ Sets the LTE RRC Status Change Timer parameter
-
-        Returns:
-            None
-        """
-        cmd = "L_STATTMR %s" % time
-        self.send_command(cmd)
-
-    def set_umts_rrc_status_change(self, status_change):
-        """ Enables or Disables the UMTS RRC status change function
-
-        Returns:
-            None
-        """
-        cmd = "W_RRCSTAT "
-        if status_change:
-            cmd += "ENABLE"
-        else:
-            cmd += "DISABLE"
-        self.send_command(cmd)
-
-    def get_umts_rrc_status_change(self):
-        """ Gets the UMTS RRC Status Change
-
-        Returns:
-            Boolean: True is Enabled / False is Disabled
-        """
-        cmd = "W_RRCSTAT?"
-        return self.send_query(cmd)
-
-    def set_umts_dch_stat_timer(self, timer_seconds):
-        """ Sets the UMTS RRC DCH timer
-
-        Returns:
-            None
-        """
-        cmd = "W_STATTMRDCH %s" % timer_seconds
-        self.send_command(cmd)
-
-    def set_simulation_state_to_poweroff(self):
-        """ Sets the simulation state to POWER OFF
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("RESETSIMULATION POWEROFF")
-        time_to_wait = 30
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=POWEROFF")
-        callstat = self.send_query("CALLSTAT?").split(",")
-        while callstat[0] != "POWEROFF":
-            time.sleep(sleep_interval)
-            waiting_time = waiting_time + sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT?").split(",")
-            else:
-                break
-
-    def set_simulation_state_to_idle(self, btsnumber):
-        """ Sets the simulation state to IDLE
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        if not isinstance(btsnumber, BtsNumber):
-            raise ValueError(' The parameter should be of type "BtsNumber" ')
-        cmd = "RESETSIMULATION IDLE," + btsnumber.value
-        self.send_command(cmd)
-        time_to_wait = 30
-        sleep_interval = 2
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=IDLE")
-        callstat = self.send_query("CALLSTAT?").split(",")
-        while callstat[0] != "IDLE":
-            time.sleep(sleep_interval)
-            waiting_time = waiting_time + sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT?").split(",")
-            else:
-                break
-
-    def set_trigger_message_mode(self, msg_id):
-        """ Sets the Message Mode of the trigger
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        cmd = "TMMESSAGEMODE {},USERDATA".format(msg_id)
-        self.send_command(cmd)
-
-    def set_data_of_trigger_message(self, msg_id, user_data):
-        """ Sets the User Data of the trigger message
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-            user_data: Hex data
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        data_len = len(user_data) * 4
-
-        cmd = "TMUSERDATA {}, {}, {}".format(msg_id, user_data, data_len)
-        self.send_command(cmd)
-
-    def send_trigger_message(self, msg_id):
-        """ Sends the User Data of the trigger information
-
-        Args:
-            msg_id: The hex value of the identity of an RRC/NAS message.
-
-        Returns:
-            None
-        """
-
-        if isinstance(msg_id, TriggerMessageIDs):
-            msg_id = msg_id.value
-
-        cmd = "TMSENDUSERMSG {}".format(msg_id)
-        self.send_command(cmd)
-
-    def wait_for_registration_state(self,
-                                    bts=1,
-                                    time_to_wait=REGISTRATION_STATE_WAIT_TIME):
-        """ Waits for UE registration state on Anritsu
-
-        Args:
-          bts: index of MD8475A BTS, eg 1, 2
-          time_to_wait: time to wait for the phone to get to registration state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for IDLE/COMMUNICATION state on anritsu.")
-
-        sleep_interval = 1
-        sim_model = (self.get_simulation_model()).split(",")
-        # wait 1 more round for GSM because of PS attach
-        registration_check_iterations = 2 if sim_model[bts - 1] == "GSM" else 1
-        for _ in range(registration_check_iterations):
-            waiting_time = 0
-            while waiting_time <= time_to_wait:
-                callstat = self.send_query(
-                    "CALLSTAT? BTS{}".format(bts)).split(",")
-                if callstat[0] == "IDLE" or callstat[1] == "COMMUNICATION":
-                    break
-                time.sleep(sleep_interval)
-                waiting_time += sleep_interval
-            else:
-                raise AnritsuError(
-                    "UE failed to register in {} seconds".format(time_to_wait))
-            time.sleep(sleep_interval)
-
-    def wait_for_communication_state(
-            self, time_to_wait=COMMUNICATION_STATE_WAIT_TIME):
-        """ Waits for UE communication state on Anritsu
-
-        Args:
-          time_to_wait: time to wait for the phone to get to communication state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for COMMUNICATION state on anritsu")
-        sleep_interval = 1
-        waiting_time = 0
-
-        self.log.info("Waiting for CALLSTAT=COMMUNICATION")
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[1] != "COMMUNICATION":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("UE failed to register on network")
-
-    def wait_for_idle_state(self, time_to_wait=IDLE_STATE_WAIT_TIME):
-        """ Waits for UE idle state on Anritsu
-
-        Args:
-          time_to_wait: time to wait for the phone to get to idle state
-
-        Returns:
-            None
-        """
-        self.log.info("wait for IDLE state on anritsu.")
-
-        sleep_interval = 1
-        waiting_time = 0
-
-        callstat = self.send_query("CALLSTAT? BTS1").split(",")
-        while callstat[0] != "IDLE":
-            time.sleep(sleep_interval)
-            waiting_time += sleep_interval
-            if waiting_time <= time_to_wait:
-                callstat = self.send_query("CALLSTAT? BTS1").split(",")
-            else:
-                raise AnritsuError("UE failed to go on idle state")
-
-    def get_camping_cell(self):
-        """ Gets the current camping cell information
-
-        Args:
-          None
-
-        Returns:
-            returns a tuple (BTS number, RAT Technology) '
-        """
-        bts_number, rat_info = self.send_query("CAMPINGCELL?").split(",")
-        return bts_number, rat_info
-
-    def get_supported_bands(self, rat):
-        """ Gets the supported bands from UE capability information
-
-        Args:
-          rat: LTE or WCDMA
-
-        Returns:
-            returns a list of bnads
-        """
-        cmd = "UEINFO? "
-        if rat == "LTE":
-            cmd += "L"
-        elif rat == "WCDMA":
-            cmd += "W"
-        else:
-            raise ValueError('The rat argument needs to be "LTE" or "WCDMA"')
-        cmd += "_SupportedBand"
-        result = self.send_query(cmd).split(",")
-        if result == "NONE":
-            return None
-        if rat == "WCDMA":
-            bands = []
-            for band in result:
-                bands.append(WCDMA_BANDS[band])
-            return bands
-        else:
-            return result
-
-    def start_testcase(self):
-        """ Starts a test case on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("STARTTEST")
-
-    def get_testcase_status(self):
-        """ Gets the current test case status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current test case status
-        """
-        return self.send_query("TESTSTAT?")
-
-    def start_ip_traffic(self, pdn='1'):
-        """ Starts IP data traffic with the selected PDN.
-
-        Args:
-            pdn: the pdn to be used for data traffic. Defaults to '1'.
-        """
-        self.send_command('OPERATEIPTRAFFIC START,' + pdn)
-
-    def stop_ip_traffic(self, pdn='1'):
-        """ Stops IP data traffic with the selected PDN.
-
-         Args:
-            pdn: pdn for which data traffic has to be stopped. Defaults to '1'.
-        """
-        self.send_command('OPERATEIPTRAFFIC STOP,' + pdn)
-
-    def set_carrier_aggregation_enabled(self, enabled=True):
-        """ Enables or disables de carrier aggregation option.
-
-        Args:
-            enabled: enables CA if True and disables CA if False.
-        """
-        cmd = 'CA ' + 'ENABLE' if enabled else 'DISABLE'
-        self.send_command(cmd)
-
-    # Common Default Gateway:
-    @property
-    def gateway_ipv4addr(self):
-        """ Gets the IPv4 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        return self.send_query("DGIPV4?")
-
-    @gateway_ipv4addr.setter
-    def gateway_ipv4addr(self, ipv4_addr):
-        """ sets the IPv4 address of the default gateway
-        Args:
-            ipv4_addr: IPv4 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "DGIPV4 " + ipv4_addr
-        self.send_command(cmd)
-
-    @property
-    def gateway_ipv6addr(self):
-        """ Gets the IPv6 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        return self.send_query("DGIPV6?")
-
-    @gateway_ipv6addr.setter
-    def gateway_ipv6addr(self, ipv6_addr):
-        """ sets the IPv6 address of the default gateway
-        Args:
-            ipv6_addr: IPv6 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "DGIPV6 " + ipv6_addr
-        self.send_command(cmd)
-
-    @property
-    def usim_key(self):
-        """ Gets the USIM Security Key
-
-        Args:
-          None
-
-        Returns:
-            USIM Security Key
-        """
-        return self.send_query("USIMK?")
-
-    @usim_key.setter
-    def usim_key(self, usimk):
-        """ sets the USIM Security Key
-        Args:
-            usimk: USIM Security Key, eg "000102030405060708090A0B0C0D0E0F"
-
-        Returns:
-            None
-        """
-        cmd = "USIMK " + usimk
-        self.send_command(cmd)
-
-    def get_ue_status(self):
-        """ Gets the current UE status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        UE_STATUS_INDEX = 0
-        ue_status = self.send_query("CALLSTAT?").split(",")[UE_STATUS_INDEX]
-        return _PROCESS_STATES[ue_status]
-
-    def get_packet_status(self):
-        """ Gets the current Packet status on Anritsu
-
-        Args:
-          None
-
-        Returns:
-            current Packet status
-        """
-        PACKET_STATUS_INDEX = 1
-        packet_status = self.send_query("CALLSTAT?").split(
-            ",")[PACKET_STATUS_INDEX]
-        return _PROCESS_STATES[packet_status]
-
-    def disconnect(self):
-        """ Disconnect the Anritsu box from test PC
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        # no need to # exit smart studio application
-        # self.close_smartstudio()
-        self._sock.close()
-
-    def machine_reboot(self):
-        """ Reboots the Anritsu Machine
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("REBOOT")
-
-    def save_sequence_log(self, fileName):
-        """ Saves the Anritsu Sequence logs to file
-
-        Args:
-          fileName: log file name
-
-        Returns:
-            None
-        """
-        cmd = 'SAVESEQLOG "{}"'.format(fileName)
-        self.send_command(cmd)
-
-    def clear_sequence_log(self):
-        """ Clears the Anritsu Sequence logs
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("CLEARSEQLOG")
-
-    def save_message_log(self, fileName):
-        """ Saves the Anritsu Message logs to file
-
-        Args:
-          fileName: log file name
-
-        Returns:
-            None
-        """
-        cmd = 'SAVEMSGLOG "{}"'.format(fileName)
-        self.send_command(cmd)
-
-    def clear_message_log(self):
-        """ Clears the Anritsu Message logs
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command("CLEARMSGLOG")
-
-    def save_trace_log(self, fileName, fileType, overwrite, start, end):
-        """ Saves the Anritsu Trace logs
-
-        Args:
-          fileName: log file name
-          fileType: file type (BINARY, TEXT, H245,PACKET, CPLABE)
-          overwrite: whether to over write
-          start: starting trace number
-          end: ending trace number
-
-        Returns:
-            None
-        """
-        cmd = 'SAVETRACELOG "{}",{},{},{},{}'.format(fileName, fileType,
-                                                     overwrite, start, end)
-        self.send_command(cmd)
-
-    def send_cmas_lte_wcdma(self, serialNo, messageID, warningMessage):
-        """ Sends a CMAS message
-
-        Args:
-          serialNo: serial number of CMAS message
-          messageID: CMAS message ID
-          warningMessage:  CMAS Warning message
-
-        Returns:
-            None
-        """
-        cmd = ('PWSSENDWM 3GPP,"BtsNo=1&WarningSystem=CMAS&SerialNo={}'
-               '&MessageID={}&wm={}"').format(serialNo, messageID,
-                                              warningMessage)
-        self.send_command(cmd)
-
-    def send_etws_lte_wcdma(self, serialNo, messageID, warningType,
-                            warningMessage, userAlertenable, popUpEnable):
-        """ Sends a ETWS message
-
-        Args:
-          serialNo: serial number of CMAS message
-          messageID: CMAS message ID
-          warningMessage:  CMAS Warning message
-
-        Returns:
-            None
-        """
-        cmd = (
-            'PWSSENDWM 3GPP,"BtsNo=1&WarningSystem=ETWS&SerialNo={}&'
-            'Primary=ON&PrimaryMessageID={}&Secondary=ON&SecondaryMessageID={}'
-            '&WarningType={}&wm={}&UserAlert={}&Popup={}&dcs=0x10&LanguageCode=en"'
-        ).format(serialNo, messageID, messageID, warningType, warningMessage,
-                 userAlertenable, popUpEnable)
-        self.send_command(cmd)
-
-    def send_cmas_etws_cdma1x(self, message_id, service_category, alert_ext,
-                              response_type, severity, urgency, certainty):
-        """ Sends a CMAS/ETWS message on CDMA 1X
-
-        Args:
-          serviceCategory: service category of alert
-          messageID: message ID
-          alertText: Warning message
-
-        Returns:
-            None
-        """
-        cmd = (
-            'PWSSENDWM 3GPP2,"BtsNo=1&ServiceCategory={}&MessageID={}&AlertText={}&'
-            'CharSet=ASCII&ResponseType={}&Severity={}&Urgency={}&Certainty={}"'
-        ).format(service_category, message_id, alert_ext, response_type,
-                 severity, urgency, certainty)
-        self.send_command(cmd)
-
-    @property
-    def csfb_type(self):
-        """ Gets the current CSFB type
-
-        Args:
-            None
-
-        Returns:
-            current CSFB type
-        """
-        return self.send_query("SIMMODELEX? CSFB")
-
-    @csfb_type.setter
-    def csfb_type(self, csfb_type):
-        """ sets the CSFB type
-        Args:
-            csfb_type: CSFB type
-
-        Returns:
-            None
-        """
-        if not isinstance(csfb_type, CsfbType):
-            raise ValueError('The parameter should be of type "CsfbType" ')
-        cmd = "SIMMODELEX CSFB," + csfb_type.value
-        self.send_command(cmd)
-
-    @property
-    def csfb_return_to_eutran(self):
-        """ Gets the current return to EUTRAN status
-
-        Args:
-            None
-
-        Returns:
-            current return to EUTRAN status
-        """
-        return self.send_query("SIMMODELEX? RETEUTRAN")
-
-    @csfb_return_to_eutran.setter
-    def csfb_return_to_eutran(self, enable):
-        """ sets the return to EUTRAN feature
-        Args:
-            enable: enable/disable return to EUTRAN feature
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, ReturnToEUTRAN):
-            raise ValueError(
-                'The parameter should be of type "ReturnToEUTRAN"')
-        cmd = "SIMMODELEX RETEUTRAN," + enable.value
-        self.send_command(cmd)
-
-    def set_packet_preservation(self):
-        """ Set packet state to Preservation
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEPACKET PRESERVATION"
-        self.send_command(cmd)
-
-    def set_packet_dormant(self):
-        """ Set packet state to Dormant
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEPACKET DORMANT"
-        self.send_command(cmd)
-
-    def get_ue_identity(self, identity_type):
-        """ Get the UE identity IMSI, IMEI, IMEISV
-
-        Args:
-            identity_type : IMSI/IMEI/IMEISV
-
-        Returns:
-            IMSI/IMEI/IMEISV value
-        """
-        bts, rat = self.get_camping_cell()
-        if rat == BtsTechnology.LTE.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_LTE.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_LTE
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_LTE
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_LTE
-            else:
-                return None
-        elif rat == BtsTechnology.WCDMA.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_WCDMA.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_WCDMA
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_WCDMA
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_WCDMA
-            else:
-                return None
-        elif rat == BtsTechnology.GSM.value:
-            identity_request = TriggerMessageIDs.IDENTITY_REQUEST_GSM.value
-            if identity_type == UEIdentityType.IMSI:
-                userdata = IMSI_READ_USERDATA_GSM
-            elif identity_type == UEIdentityType.IMEI:
-                userdata = IMEI_READ_USERDATA_GSM
-            elif identity_type == UEIdentityType.IMEISV:
-                userdata = IMEISV_READ_USERDATA_GSM
-            else:
-                return None
-        else:
-            return None
-
-        self.send_command("TMMESSAGEMODE {},USERDATA".format(identity_request))
-        time.sleep(SETTLING_TIME)
-        self.send_command("TMUSERDATA {}, {}, {}".format(
-            identity_request, userdata, IDENTITY_REQ_DATA_LEN))
-        time.sleep(SETTLING_TIME)
-        self.send_command("TMSENDUSERMSG {}".format(identity_request))
-        time.sleep(WAIT_TIME_IDENTITY_RESPONSE)
-        # Go through sequence log and find the identity response message
-        target = '"{}"'.format(identity_type.value)
-        seqlog = self.send_query("SEQLOG?").split(",")
-        while (target not in seqlog):
-            index = int(seqlog[0]) - 1
-            if index < SEQ_LOG_MESSAGE_START_INDEX:
-                self.log.error("Can not find " + target)
-                return None
-            seqlog = self.send_query("SEQLOG? %d" % index).split(",")
-        return (seqlog[-1])
-
-    def trigger_ue_capability_enquiry(self, requested_bands):
-        """ Triggers LTE RRC UE capability enquiry from callbox.
-
-        Args:
-            requested_bands: User data in hex format
-        """
-        self.set_trigger_message_mode(TriggerMessageIDs.UE_CAPABILITY_ENQUIRY)
-        time.sleep(SETTLING_TIME)
-        self.set_data_of_trigger_message(
-            TriggerMessageIDs.UE_CAPABILITY_ENQUIRY, requested_bands)
-        time.sleep(SETTLING_TIME)
-        self.send_trigger_message(TriggerMessageIDs.UE_CAPABILITY_ENQUIRY)
-        time.sleep(SETTLING_TIME)
-
-    def get_measured_pusch_power(self):
-        """ Queries the PUSCH power.
-
-        Returns:
-            A string indicating PUSCH power in each input port.
-        """
-        return self.send_query("MONITOR? UL_PUSCH")
-
-    def select_usim(self, usim):
-        """ Select pre-defined Anritsu USIM models
-
-        Args:
-            usim: any of P0035Bx, P0135Ax, P0250Ax, P0260Ax
-
-        Returns:
-            None
-        """
-        cmd = "SELECTUSIM {}".format(usim)
-        self.send_command(cmd)
-
-
-class _AnritsuTestCases(object):
-    '''Class to interact with the MD8475 supported test procedures '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def procedure(self):
-        """ Gets the current Test Procedure type
-
-        Args:
-            None
-
-        Returns:
-            One of TestProcedure type values
-        """
-        return self._anritsu.send_query("TESTPROCEDURE?")
-
-    @procedure.setter
-    def procedure(self, procedure):
-        """ sets the Test Procedure type
-        Args:
-            procedure: One of TestProcedure type values
-
-        Returns:
-            None
-        """
-        if not isinstance(procedure, TestProcedure):
-            raise ValueError(
-                'The parameter should be of type "TestProcedure" ')
-        cmd = "TESTPROCEDURE " + procedure.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def bts_direction(self):
-        """ Gets the current Test direction
-
-         Args:
-            None
-
-        Returns:
-            Current Test direction eg:BTS2,BTS1
-        """
-        return self._anritsu.send_query("TESTBTSDIRECTION?")
-
-    @bts_direction.setter
-    def bts_direction(self, direction):
-        """ sets the Test direction  eg: BTS1 to BTS2 '''
-
-        Args:
-            direction: tuple (from-bts,to_bts) of type BtsNumber
-
-        Returns:
-            None
-        """
-        if not isinstance(direction, tuple) or len(direction) != 2:
-            raise ValueError("Pass a tuple with two items")
-        from_bts, to_bts = direction
-        if (isinstance(from_bts, BtsNumber) and isinstance(to_bts, BtsNumber)):
-            cmd = "TESTBTSDIRECTION {},{}".format(from_bts.value, to_bts.value)
-            self._anritsu.send_command(cmd)
-        else:
-            raise ValueError(' The parameters should be of type "BtsNumber" ')
-
-    @property
-    def registration_timeout(self):
-        """ Gets the current Test registration timeout
-
-        Args:
-            None
-
-        Returns:
-            Current test registration timeout value
-        """
-        return self._anritsu.send_query("TESTREGISTRATIONTIMEOUT?")
-
-    @registration_timeout.setter
-    def registration_timeout(self, timeout_value):
-        """ sets the Test registration timeout value
-        Args:
-            timeout_value: test registration timeout value
-
-        Returns:
-            None
-        """
-        cmd = "TESTREGISTRATIONTIMEOUT " + str(timeout_value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def power_control(self):
-        """ Gets the power control enabled/disabled status for test case
-
-        Args:
-            None
-
-        Returns:
-            current power control enabled/disabled status
-        """
-        return self._anritsu.send_query("TESTPOWERCONTROL?")
-
-    @power_control.setter
-    def power_control(self, enable):
-        """ Sets the power control enabled/disabled status for test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestPowerControl):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestPowerControl" ')
-        cmd = "TESTPOWERCONTROL " + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_LTE(self):
-        """ Checks measurement status for LTE test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? LTE")
-
-    @measurement_LTE.setter
-    def measurement_LTE(self, enable):
-        """ Sets the measurement enabled/disabled status for LTE test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT LTE," + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_WCDMA(self):
-        """ Checks measurement status for WCDMA test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? WCDMA")
-
-    @measurement_WCDMA.setter
-    def measurement_WCDMA(self, enable):
-        """ Sets the measurement enabled/disabled status for WCDMA test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT WCDMA," + enable.value
-        self._anritsu.send_command(cmd)
-
-    @property
-    def measurement_TDSCDMA(self):
-        """ Checks measurement status for TDSCDMA test case
-
-        Args:
-            None
-
-        Returns:
-            Enabled/Disabled
-        """
-        return self._anritsu.send_query("TESTMEASUREMENT? TDSCDMA")
-
-    @measurement_TDSCDMA.setter
-    def measurement_WCDMA(self, enable):
-        """ Sets the measurement enabled/disabled status for TDSCDMA test case
-
-        Args:
-            enable:  enabled/disabled
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, TestMeasurement):
-            raise ValueError(' The parameter should be of type'
-                             ' "TestMeasurement" ')
-        cmd = "TESTMEASUREMENT TDSCDMA," + enable.value
-        self._anritsu.send_command(cmd)
-
-    def set_pdn_targeteps(self, pdn_order, pdn_number=1):
-        """ Sets PDN to connect as a target when performing the
-           test case for packet handover
-
-        Args:
-            pdn_order:  PRIORITY/USER
-            pdn_number: Target PDN number
-
-        Returns:
-            None
-        """
-        cmd = "TESTPDNTARGETEPS " + pdn_order
-        if pdn_order == "USER":
-            cmd = cmd + "," + str(pdn_number)
-        self._anritsu.send_command(cmd)
-
-
-class _BaseTransceiverStation(object):
-    '''Class to interact different BTS supported by MD8475 '''
-    def __init__(self, anritsu, btsnumber):
-        if not isinstance(btsnumber, BtsNumber):
-            raise ValueError(' The parameter should be of type "BtsNumber" ')
-        self._bts_number = btsnumber.value
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def output_level(self):
-        """ Gets the Downlink power of the cell
-
-        Args:
-            None
-
-        Returns:
-            DL Power level
-        """
-        cmd = "OLVL? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @output_level.setter
-    def output_level(self, level):
-        """ Sets the Downlink power of the cell
-
-        Args:
-            level: Power level
-
-        Returns:
-            None
-        """
-        counter = 1
-        while float(level) != float(self.output_level):
-            if counter > 3:
-                raise AnritsuError("Fail to set output level in 3 tries!")
-            cmd = "OLVL {},{}".format(level, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def input_level(self):
-        """ Gets the reference power of the cell
-
-        Args:
-            None
-
-        Returns:
-            Reference Power level
-        """
-        cmd = "RFLVL? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @input_level.setter
-    def input_level(self, level):
-        """ Sets the reference power of the cell
-
-        Args:
-            level: Power level
-
-        Returns:
-            None
-        """
-        counter = 1
-        while float(level) != float(self.input_level):
-            if counter > 3:
-                raise AnritsuError("Fail to set intput level in 3 tries!")
-            cmd = "RFLVL {},{}".format(level, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def band(self):
-        """ Gets the Band of the cell
-
-        Args:
-            None
-
-        Returns:
-            Cell band
-        """
-        cmd = "BAND? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @band.setter
-    def band(self, band):
-        """ Sets the Band of the cell
-
-        Args:
-            band: Band of the cell
-
-        Returns:
-            None
-        """
-        cmd = "BAND {},{}".format(band, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def transmode(self):
-        """ Gets the Transmission Mode of the cell
-
-        Args:
-            None
-
-        Returns:
-            Transmission mode
-        """
-        cmd = "TRANSMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @transmode.setter
-    def transmode(self, tm_mode):
-        """ Sets the TM of the cell
-
-        Args:
-            TM: TM of the cell
-
-        Returns:
-            None
-        """
-        cmd = "TRANSMODE {},{}".format(tm_mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def duplex_mode(self):
-        """ Gets the Duplex Mode of the cell
-
-        Args:
-            None
-
-        Returns:
-            Duplex mode
-        """
-        cmd = "DUPLEXMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @duplex_mode.setter
-    def duplex_mode(self, mode):
-        """ Sets the duplex mode for the cell
-
-        Args:
-            mode: string indicating FDD or TDD
-
-        Returns:
-            None
-        """
-        cmd = "DUPLEXMODE {},{}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def uldl_configuration(self):
-        """ Gets the UL/DL pattern configuration for TDD bands
-
-        Args:
-            None
-
-        Returns:
-            Configuration number
-        """
-        cmd = "ULDLCONFIGURATION? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @uldl_configuration.setter
-    def uldl_configuration(self, configuration):
-        """ Sets the UL/DL pattern configuration for TDD bands
-
-        Args:
-            configuration: configuration number, [ 0, 6 ] inclusive
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: Frame structure has to be [ 0, 6 ] inclusive
-        """
-        if configuration not in range(0, 7):
-            raise ValueError("The frame structure configuration has to be a "
-                             "number between 0 and 6 inclusive")
-
-        cmd = "ULDLCONFIGURATION {},{}".format(configuration, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cfi(self):
-        """ Gets the Control Format Indicator for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The CFI number.
-        """
-        cmd = "CFI? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cfi.setter
-    def cfi(self, cfi):
-        """ Sets the Control Format Indicator for this base station.
-
-        Args:
-            cfi: one of BESTEFFORT, AUTO, 1, 2 or 3.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if cfi's value is invalid
-        """
-
-        cfi = str(cfi)
-
-        valid_values = {'BESTEFFORT', 'AUTO', '1', '2', '3'}
-        if cfi not in valid_values:
-            raise ValueError('Valid values for CFI are %r' % valid_values)
-
-        cmd = "CFI {},{}".format(cfi, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def paging_duration(self):
-        """ Gets the paging cycle duration for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The paging cycle duration in milliseconds.
-        """
-        cmd = "PCYCLE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @paging_duration.setter
-    def paging_duration(self, duration):
-        """ Sets the paging cycle duration for this base station.
-
-        Args:
-            duration: the paging cycle duration in milliseconds.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if duration's value is invalid
-        """
-
-        duration = int(duration)
-
-        valid_values = {320, 640, 1280, 2560}
-        if duration not in valid_values:
-            raise ValueError('Valid values for the paging cycle duration are '
-                             '%r.' % valid_values)
-
-        cmd = "PCYCLE {},{}".format(duration, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def phich_resource(self):
-        """ Gets the PHICH Resource setting for this base station.
-
-        Args:
-            None
-
-        Returns:
-            The PHICH Resource setting.
-        """
-        cmd = "PHICHRESOURCE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @phich_resource.setter
-    def phich_resource(self, phich):
-        """ Sets the PHICH Resource setting for this base station.
-
-        Args:
-            phich: one of 1/6, 1/2, 1, 2.
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: if phich's value is invalid
-        """
-
-        phich = str(phich)
-
-        valid_values = ['1/6', '1/2', '1', '2']
-        if phich not in valid_values:
-            raise ValueError('Valid values for PHICH Resource are %r' %
-                             valid_values)
-
-        cmd = "PHICHRESOURCE {},{}".format(phich, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tdd_special_subframe(self):
-        """ Gets SPECIALSUBFRAME of cell.
-
-        Args:
-            None
-
-        Returns:
-            tdd_special_subframe: integer between 0,9 inclusive
-        """
-        cmd = "SPECIALSUBFRAME? " + self._bts_number
-        tdd_special_subframe = int(self._anritsu.send_query(cmd))
-        return tdd_special_subframe
-
-    @tdd_special_subframe.setter
-    def tdd_special_subframe(self, tdd_special_subframe):
-        """ Sets SPECIALSUBFRAME of cell.
-
-        Args:
-            tdd_special_subframe: int between 0,9 inclusive
-
-        Returns:
-            None
-
-        Raises:
-            ValueError: tdd_special_subframe has to be between 0,9 inclusive
-        """
-        if tdd_special_subframe not in range(0, 10):
-            raise ValueError("The special subframe config is not [0,9]")
-        cmd = "SPECIALSUBFRAME {},{}".format(tdd_special_subframe,
-                                             self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_antenna(self):
-        """ Gets the DL ANTENNA count of the cell
-
-        Args:
-            None
-
-        Returns:
-            No of DL Antenna
-        """
-        cmd = "ANTENNAS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_antenna.setter
-    def dl_antenna(self, num_antenna):
-        """ Sets the DL ANTENNA of the cell
-
-        Args:
-            c: DL ANTENNA of the cell
-
-        Returns:
-            None
-        """
-        cmd = "ANTENNAS {},{}".format(num_antenna, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def bandwidth(self):
-        """ Gets the channel bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            channel bandwidth
-        """
-        cmd = "BANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @bandwidth.setter
-    def bandwidth(self, bandwidth):
-        """ Sets the channel bandwidth of the cell
-
-        Args:
-            bandwidth: channel bandwidth  of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(' The parameter should be of type "BtsBandwidth"')
-        cmd = "BANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_bandwidth(self):
-        """ Gets the downlink bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            downlink bandwidth
-        """
-        cmd = "DLBANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_bandwidth.setter
-    def dl_bandwidth(self, bandwidth):
-        """ Sets the downlink bandwidth of the cell
-
-        Args:
-            bandwidth: downlink bandwidth of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(' The parameter should be of type "BtsBandwidth"')
-        cmd = "DLBANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ul_bandwidth(self):
-        """ Gets the uplink bandwidth of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink bandwidth
-        """
-        cmd = "ULBANDWIDTH? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @ul_bandwidth.setter
-    def ul_bandwidth(self, bandwidth):
-        """ Sets the uplink bandwidth of the cell
-
-        Args:
-            bandwidth: uplink bandwidth of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(bandwidth, BtsBandwidth):
-            raise ValueError(
-                ' The parameter should be of type "BtsBandwidth" ')
-        cmd = "ULBANDWIDTH {},{}".format(bandwidth.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def packet_rate(self):
-        """ Gets the packet rate of the cell
-
-        Args:
-            None
-
-        Returns:
-            packet rate
-        """
-        cmd = "PACKETRATE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @packet_rate.setter
-    def packet_rate(self, packetrate):
-        """ Sets the packet rate of the cell
-
-        Args:
-            packetrate: packet rate of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(packetrate, BtsPacketRate):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketRate" ')
-        cmd = "PACKETRATE {},{}".format(packetrate.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ul_windowsize(self):
-        """ Gets the uplink window size of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink window size
-        """
-        cmd = "ULWINSIZE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @ul_windowsize.setter
-    def ul_windowsize(self, windowsize):
-        """ Sets the uplink window size of the cell
-
-        Args:
-            windowsize: uplink window size of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(windowsize, BtsPacketWindowSize):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketWindowSize" ')
-        cmd = "ULWINSIZE {},{}".format(windowsize.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_windowsize(self):
-        """ Gets the downlink window size of the cell
-
-        Args:
-            None
-
-        Returns:
-            downlink window size
-        """
-        cmd = "DLWINSIZE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_windowsize.setter
-    def dl_windowsize(self, windowsize):
-        """ Sets the downlink window size of the cell
-
-        Args:
-            windowsize: downlink window size of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(windowsize, BtsPacketWindowSize):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsPacketWindowSize" ')
-        cmd = "DLWINSIZE {},{}".format(windowsize.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def service_state(self):
-        """ Gets the service state of BTS
-
-        Args:
-            None
-
-        Returns:
-            service state IN/OUT
-        """
-        cmd = "OUTOFSERVICE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @service_state.setter
-    def service_state(self, service_state):
-        """ Sets the service state of BTS
-
-        Args:
-            service_state: service state of BTS , IN/OUT
-
-        Returns:
-            None
-        """
-        if not isinstance(service_state, BtsServiceState):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsServiceState" ')
-        cmd = "OUTOFSERVICE {},{}".format(service_state.value,
-                                          self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell_barred(self):
-        """ Gets the Cell Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsCellBarred value
-        """
-        cmd = "CELLBARRED?" + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cell_barred.setter
-    def cell_barred(self, barred_option):
-        """ Sets the Cell Barred state of the cell
-
-        Args:
-            barred_option: Cell Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsCellBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsCellBarred" ')
-        cmd = "CELLBARRED {},{}".format(barred_option.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def accessclass_barred(self):
-        """ Gets the Access Class Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsAccessClassBarred value
-        """
-        cmd = "ACBARRED? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @accessclass_barred.setter
-    def accessclass_barred(self, barred_option):
-        """ Sets the Access Class Barred state of the cell
-
-        Args:
-            barred_option: Access Class Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsAccessClassBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsAccessClassBarred" ')
-        cmd = "ACBARRED {},{}".format(barred_option.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lteemergency_ac_barred(self):
-        """ Gets the LTE emergency Access Class Barred state of the cell
-
-        Args:
-            None
-
-        Returns:
-            one of BtsLteEmergencyAccessClassBarred value
-        """
-        cmd = "LTEEMERGENCYACBARRED? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lteemergency_ac_barred.setter
-    def lteemergency_ac_barred(self, barred_option):
-        """ Sets the LTE emergency Access Class Barred state of the cell
-
-        Args:
-            barred_option: Access Class Barred state of the cell
-
-        Returns:
-            None
-        """
-        if not isinstance(barred_option, BtsLteEmergencyAccessClassBarred):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsLteEmergencyAccessClassBarred" ')
-        cmd = "LTEEMERGENCYACBARRED {},{}".format(barred_option.value,
-                                                  self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def mcc(self):
-        """ Gets the MCC of the cell
-
-        Args:
-            None
-
-        Returns:
-            MCC of the cell
-        """
-        cmd = "MCC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mcc.setter
-    def mcc(self, mcc_code):
-        """ Sets the MCC of the cell
-
-        Args:
-            mcc_code: MCC of the cell
-
-        Returns:
-            None
-        """
-        cmd = "MCC {},{}".format(mcc_code, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def mnc(self):
-        """ Gets the MNC of the cell
-
-        Args:
-            None
-
-        Returns:
-            MNC of the cell
-        """
-        cmd = "MNC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mnc.setter
-    def mnc(self, mnc_code):
-        """ Sets the MNC of the cell
-
-        Args:
-            mnc_code: MNC of the cell
-
-        Returns:
-            None
-        """
-        cmd = "MNC {},{}".format(mnc_code, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_fullname_enable(self):
-        """ Gets the network full name enable status
-
-        Args:
-            None
-
-        Returns:
-            one of BtsNwNameEnable value
-        """
-        cmd = "NWFNAMEON? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_fullname_enable.setter
-    def nw_fullname_enable(self, enable):
-        """ Sets the network full name enable status
-
-        Args:
-            enable: network full name enable status
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, BtsNwNameEnable):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsNwNameEnable" ')
-        cmd = "NWFNAMEON {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_fullname(self):
-        """ Gets the network full name
-
-        Args:
-            None
-
-        Returns:
-            Network fulll name
-        """
-        cmd = "NWFNAME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_fullname.setter
-    def nw_fullname(self, fullname):
-        """ Sets the network full name
-
-        Args:
-            fullname: network full name
-
-        Returns:
-            None
-        """
-        cmd = "NWFNAME {},{}".format(fullname, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_shortname_enable(self):
-        """ Gets the network short name enable status
-
-        Args:
-            None
-
-        Returns:
-            one of BtsNwNameEnable value
-        """
-        cmd = "NWSNAMEON? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_shortname_enable.setter
-    def nw_shortname_enable(self, enable):
-        """ Sets the network short name enable status
-
-        Args:
-            enable: network short name enable status
-
-        Returns:
-            None
-        """
-        if not isinstance(enable, BtsNwNameEnable):
-            raise ValueError(' The parameter should be of type'
-                             ' "BtsNwNameEnable" ')
-        cmd = "NWSNAMEON {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nw_shortname(self):
-        """ Gets the network short name
-
-        Args:
-            None
-
-        Returns:
-            Network short name
-        """
-        cmd = "NWSNAME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nw_shortname.setter
-    def nw_shortname(self, shortname):
-        """ Sets the network short name
-
-        Args:
-            shortname: network short name
-
-        Returns:
-            None
-        """
-        cmd = "NWSNAME {},{}".format(shortname, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def apply_parameter_changes(self):
-        """ apply the parameter changes at run time
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "APPLYPARAM"
-        self._anritsu.send_command(cmd)
-
-    @property
-    def wcdma_ctch(self):
-        """ Gets the WCDMA CTCH enable/disable status
-
-        Args:
-            None
-
-        Returns:
-            one of CTCHSetup values
-        """
-        cmd = "CTCHPARAMSETUP? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @wcdma_ctch.setter
-    def wcdma_ctch(self, enable):
-        """ Sets the WCDMA CTCH enable/disable status
-
-        Args:
-            enable: WCDMA CTCH enable/disable status
-
-        Returns:
-            None
-        """
-        cmd = "CTCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lac(self):
-        """ Gets the Location Area Code of the cell
-
-        Args:
-            None
-
-        Returns:
-            LAC value
-        """
-        cmd = "LAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lac.setter
-    def lac(self, lac):
-        """ Sets the Location Area Code of the cell
-
-        Args:
-            lac: Location Area Code of the cell
-
-        Returns:
-            None
-        """
-        cmd = "LAC {},{}".format(lac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def rac(self):
-        """ Gets the Routing Area Code of the cell
-
-        Args:
-            None
-
-        Returns:
-            RAC value
-        """
-        cmd = "RAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @rac.setter
-    def rac(self, rac):
-        """ Sets the Routing Area Code of the cell
-
-        Args:
-            rac: Routing Area Code of the cell
-
-        Returns:
-            None
-        """
-        cmd = "RAC {},{}".format(rac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_channel(self):
-        """ Gets the downlink channel number of the cell
-
-        Args:
-            None
-
-        Returns:
-            RAC value
-        """
-        cmd = "DLCHAN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @dl_channel.setter
-    def dl_channel(self, channel):
-        """ Sets the downlink channel number of the cell
-
-        Args:
-            channel: downlink channel number of the cell
-
-        Returns:
-            None
-        """
-        cmd = "DLCHAN {},{}".format(channel, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dl_cc_enabled(self):
-        """ Checks if component carrier is enabled or disabled
-
-        Args:
-            None
-
-        Returns:
-            True if enabled, False if disabled
-        """
-        return (self._anritsu.send_query("TESTDLCC?" +
-                                         self._bts_number) == "ENABLE")
-
-    @dl_cc_enabled.setter
-    def dl_cc_enabled(self, enabled):
-        """ Enables or disables the component carrier
-
-        Args:
-            enabled: True if it should be enabled, False if disabled
-
-        Returns:
-            None
-        """
-        cmd = "TESTDLCC {},{}".format("ENABLE" if enabled else "DISABLE",
-                                      self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_mcc(self):
-        """ Gets the sector 1 MCC of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 mcc
-        """
-        cmd = "S1MCC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_mcc.setter
-    def sector1_mcc(self, mcc):
-        """ Sets the sector 1 MCC of the CDMA cell
-
-        Args:
-            mcc: sector 1 MCC of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1MCC {},{}".format(mcc, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_sid(self):
-        """ Gets the sector 1 system ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 system Id
-        """
-        cmd = "S1SID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_sid.setter
-    def sector1_sid(self, sid):
-        """ Sets the sector 1 system ID of the CDMA cell
-
-        Args:
-            sid: sector 1 system ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1SID {},{}".format(sid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_nid(self):
-        """ Gets the sector 1 network ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 network Id
-        """
-        cmd = "S1NID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_nid.setter
-    def sector1_nid(self, nid):
-        """ Sets the sector 1 network ID of the CDMA cell
-
-        Args:
-            nid: sector 1 network ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1NID {},{}".format(nid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_baseid(self):
-        """ Gets the sector 1 Base ID of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 Base Id
-        """
-        cmd = "S1BASEID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_baseid.setter
-    def sector1_baseid(self, baseid):
-        """ Sets the sector 1 Base ID of the CDMA cell
-
-        Args:
-            baseid: sector 1 Base ID of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1BASEID {},{}".format(baseid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_latitude(self):
-        """ Gets the sector 1 latitude of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 latitude
-        """
-        cmd = "S1LATITUDE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_latitude.setter
-    def sector1_latitude(self, latitude):
-        """ Sets the sector 1 latitude of the CDMA cell
-
-        Args:
-            latitude: sector 1 latitude of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1LATITUDE {},{}".format(latitude, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def sector1_longitude(self):
-        """ Gets the sector 1 longitude of the CDMA cell
-
-        Args:
-            None
-
-        Returns:
-            sector 1 longitude
-        """
-        cmd = "S1LONGITUDE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @sector1_longitude.setter
-    def sector1_longitude(self, longitude):
-        """ Sets the sector 1 longitude of the CDMA cell
-
-        Args:
-            longitude: sector 1 longitude of the CDMA cell
-
-        Returns:
-            None
-        """
-        cmd = "S1LONGITUDE {},{}".format(longitude, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def evdo_sid(self):
-        """ Gets the Sector ID of the EVDO cell
-
-        Args:
-            None
-
-        Returns:
-            Sector Id
-        """
-        cmd = "S1SECTORID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @evdo_sid.setter
-    def evdo_sid(self, sid):
-        """ Sets the Sector ID of the EVDO cell
-
-        Args:
-            sid: Sector ID of the EVDO cell
-
-        Returns:
-            None
-        """
-        cmd = "S1SECTORID {},{}".format(sid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell_id(self):
-        """ Gets the cell identity of the cell
-
-        Args:
-            None
-
-        Returns:
-            cell identity
-        """
-        cmd = "CELLID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @cell_id.setter
-    def cell_id(self, cell_id):
-        """ Sets the cell identity of the cell
-
-        Args:
-            cell_id: cell identity of the cell
-
-        Returns:
-            None
-        """
-        cmd = "CELLID {},{}".format(cell_id, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def physical_cellid(self):
-        """ Gets the physical cell id of the cell
-
-        Args:
-            None
-
-        Returns:
-            physical cell id
-        """
-        cmd = "PHYCELLID? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @physical_cellid.setter
-    def physical_cellid(self, physical_cellid):
-        """ Sets the physical cell id of the cell
-
-        Args:
-            physical_cellid: physical cell id of the cell
-
-        Returns:
-            None
-        """
-        cmd = "PHYCELLID {},{}".format(physical_cellid, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_mcs_dl(self):
-        """ Gets the Modulation and Coding scheme (DL) of the GSM cell
-
-        Args:
-            None
-
-        Returns:
-            DL MCS
-        """
-        cmd = "DLMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_mcs_dl.setter
-    def gsm_mcs_dl(self, mcs_dl):
-        """ Sets the Modulation and Coding scheme (DL) of the GSM cell
-
-        Args:
-            mcs_dl: Modulation and Coding scheme (DL) of the GSM cell
-
-        Returns:
-            None
-        """
-        cmd = "DLMCS {},{}".format(mcs_dl, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_mcs_ul(self):
-        """ Gets the Modulation and Coding scheme (UL) of the GSM cell
-
-        Args:
-            None
-
-        Returns:
-            UL MCS
-        """
-        cmd = "ULMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_mcs_ul.setter
-    def gsm_mcs_ul(self, mcs_ul):
-        """ Sets the Modulation and Coding scheme (UL) of the GSM cell
-
-        Args:
-            mcs_ul:Modulation and Coding scheme (UL) of the GSM cell
-
-        Returns:
-            None
-        """
-        cmd = "ULMCS {},{}".format(mcs_ul, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_scheduling_mode(self):
-        """ Gets the Scheduling mode of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            Scheduling mode
-        """
-        cmd = "SCHEDULEMODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_scheduling_mode.setter
-    def lte_scheduling_mode(self, mode):
-        """ Sets the Scheduling mode of the LTE cell
-
-        Args:
-            mode: STATIC (default) or DYNAMIC
-
-        Returns:
-            None
-        """
-        counter = 1
-        while mode != self.lte_scheduling_mode:
-            if counter > 3:
-                raise AnritsuError("Fail to set scheduling mode in 3 tries!")
-            cmd = "SCHEDULEMODE {},{}".format(mode, self._bts_number)
-            self._anritsu.send_command(cmd)
-            counter += 1
-            time.sleep(1)
-
-    @property
-    def tbs_pattern(self):
-        """ Gets the TBS Pattern setting for the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            TBS Pattern setting
-        """
-        cmd = "TBSPATTERN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @tbs_pattern.setter
-    def tbs_pattern(self, pattern):
-        """ Sets the TBS Pattern setting for the LTE cell
-
-        Args:
-            mode: "FULLALLOCATION" or "OFF"
-
-        Returns:
-            None
-        """
-        cmd = "TBSPATTERN {}, {}".format(pattern, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_connected_mode(self):
-        """ Gets the Connected DRX LTE cell parameter
-
-        Args:
-            None
-
-        Returns:
-            DRX connected mode (OFF, AUTO, MANUAL)
-        """
-        cmd = "DRXCONN? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_connected_mode.setter
-    def drx_connected_mode(self, mode):
-        """  Sets the Connected DRX LTE cell parameter
-
-        Args:
-            mode: OFF, AUTO, MANUAL
-
-        Returns:
-            None
-        """
-        cmd = "DRXCONN {}, {}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_on_duration_timer(self):
-        """ Gets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            None
-
-        Returns:
-            DRX mode duration timer
-        """
-        cmd = "DRXDURATIONTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_on_duration_timer.setter
-    def drx_on_duration_timer(self, time):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            timer: Amount of PDCCH subframes to wait for user data
-                to be transmitted
-
-        Returns:
-            None
-        """
-        cmd = "DRXDURATIONTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_inactivity_timer(self):
-        """ Gets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            None
-
-        Returns:
-            DRX mode inactivity timer
-        """
-        cmd = "DRXINACTIVITYTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_inactivity_timer.setter
-    def drx_inactivity_timer(self, time):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            timer: Length of the interval to wait
-
-        Returns:
-            None
-        """
-        cmd = "DRXINACTIVITYTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_retransmission_timer(self):
-        """ Gets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            None
-
-        Returns:
-            Number of PDCCH subframes to wait for retransmission
-        """
-        cmd = "DRXRETRANSTIME? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_retransmission_timer.setter
-    def drx_retransmission_timer(self, time):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            time: Number of PDCCH subframes to wait
-            for retransmission
-
-        Returns:
-            None
-        """
-        cmd = "DRXRETRANSTIME PSF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_long_cycle(self):
-        """ Gets the amount of subframes representing a DRX long cycle
-
-        Args:
-            None
-
-        Returns:
-            The amount of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        """
-        cmd = "DRXLONGCYCLE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_long_cycle.setter
-    def drx_long_cycle(self, time):
-        """ Sets the amount of subframes representing a DRX long cycle
-
-        Args:
-            long_cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-
-        Returns:
-            None
-        """
-        cmd = "DRXLONGCYCLE SF{}, {}".format(time, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def drx_long_cycle_offset(self):
-        """ Gets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            None
-
-        Returns:
-            Long cycle offset
-        """
-        cmd = "DRXLONGCYCLESTARTOFFSET? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @drx_long_cycle_offset.setter
-    def drx_long_cycle_offset(self, offset):
-        """ Sets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            offset: Number in range 0...(long cycle - 1)
-        """
-        cmd = "DRXLONGCYCLESTARTOFFSET {}, {}".format(offset, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_mcs_dl(self):
-        """ Gets the Modulation and Coding scheme (DL) of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            DL MCS
-        """
-        cmd = "DLIMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_mcs_dl.setter
-    def lte_mcs_dl(self, mcs_dl):
-        """ Sets the Modulation and Coding scheme (DL) of the LTE cell
-
-        Args:
-            mcs_dl: Modulation and Coding scheme (DL) of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "DLIMCS {},{}".format(mcs_dl, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_mcs_ul(self):
-        """ Gets the Modulation and Coding scheme (UL) of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            UL MCS
-        """
-        cmd = "ULIMCS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_mcs_ul.setter
-    def lte_mcs_ul(self, mcs_ul):
-        """ Sets the Modulation and Coding scheme (UL) of the LTE cell
-
-        Args:
-            mcs_ul: Modulation and Coding scheme (UL) of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "ULIMCS {},{}".format(mcs_ul, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_dl_modulation_order(self):
-        """ Gets the DL modulation order of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            The DL modulation order
-        """
-        cmd = "DLRMC_MOD? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_dl_modulation_order.setter
-    def lte_dl_modulation_order(self, order):
-        """ Sets the DL modulation order of the LTE cell
-
-        Args:
-            order: the DL modulation order of the LTE cell
-
-        Returns:
-            None
-        """
-        if isinstance(order, ModulationType):
-            order = order.value
-        cmd = "DLRMC_MOD {},{}".format(order, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def lte_ul_modulation_order(self):
-        """ Gets the UL modulation order of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            The UL modulation order
-        """
-        cmd = "ULRMC_MOD? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @lte_ul_modulation_order.setter
-    def lte_ul_modulation_order(self, order):
-        """ Sets the UL modulation order of the LTE cell
-
-        Args:
-            order: the UL modulation order of the LTE cell
-
-        Returns:
-            None
-        """
-        if isinstance(order, ModulationType):
-            order = order.value
-        cmd = "ULRMC_MOD {},{}".format(order, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nrb_dl(self):
-        """ Gets the Downlink N Resource Block of the cell
-
-        Args:
-            None
-
-        Returns:
-            Downlink NRB
-        """
-        cmd = "DLNRB? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nrb_dl.setter
-    def nrb_dl(self, blocks):
-        """ Sets the Downlink N Resource Block of the cell
-
-        Args:
-            blocks: Downlink N Resource Block of the cell
-
-        Returns:
-            None
-        """
-        cmd = "DLNRB {},{}".format(blocks, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def nrb_ul(self):
-        """ Gets the uplink N Resource Block of the cell
-
-        Args:
-            None
-
-        Returns:
-            uplink NRB
-        """
-        cmd = "ULNRB? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @nrb_ul.setter
-    def nrb_ul(self, blocks):
-        """ Sets the uplink N Resource Block of the cell
-
-        Args:
-            blocks: uplink N Resource Block of the cell
-
-        Returns:
-            None
-        """
-        cmd = "ULNRB {},{}".format(blocks, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def max_nrb_ul(self):
-        ul_bandwidth = self.ul_bandwidth
-        if ul_bandwidth == 'SAMEASDL':
-            ul_bandwidth = self.dl_bandwidth
-        max_nrb = MAX_NRB_FOR_BANDWIDTH.get(ul_bandwidth, None)
-        if not max_nrb:
-            raise ValueError('Could not get maximum RB allocation'
-                             'for bandwidth: {}'.format(ul_bandwidth))
-        return max_nrb
-
-    @property
-    def mimo_support(self):
-        """ Gets the maximum supported MIMO mode for the LTE bases tation.
-
-        Returns:
-            the MIMO mode as a string
-        """
-        cmd = "LTEMIMO? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @mimo_support.setter
-    def mimo_support(self, mode):
-        """ Sets the maximum supported MIMO mode for the LTE base station.
-
-        Args:
-            mode: a string or an object of the LteMimoMode class.
-        """
-
-        if isinstance(mode, LteMimoMode):
-            mode = mode.value
-
-        cmd = "LTEMIMO {},{}".format(self._bts_number, mode)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def neighbor_cell_mode(self):
-        """ Gets the neighbor cell mode
-
-        Args:
-            None
-
-        Returns:
-            current neighbor cell mode
-        """
-        cmd = "NCLIST? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @neighbor_cell_mode.setter
-    def neighbor_cell_mode(self, mode):
-        """ Sets the neighbor cell mode
-
-        Args:
-            mode: neighbor cell mode , DEFAULT/ USERDATA
-
-        Returns:
-            None
-        """
-        cmd = "NCLIST {},{}".format(mode, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_type(self, system, index):
-        """ Gets the neighbor cell type
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell type
-        """
-        cmd = "NCTYPE? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def set_neighbor_cell_type(self, system, index, cell_type):
-        """ Sets the neighbor cell type
-
-        Args:
-            system: simulation model of neighbor cell
-                   LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-            cell_type: cell type
-                BTS1, BTS2, BTS3, BTS4,CELLNAME, DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "NCTYPE {},{},{},{}".format(system, index, cell_type,
-                                          self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_name(self, system, index):
-        """ Gets the neighbor cell name
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell name
-        """
-        cmd = "NCCELLNAME? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def set_neighbor_cell_name(self, system, index, name):
-        """ Sets the neighbor cell name
-
-        Args:
-            system: simulation model of neighbor cell
-                   LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-            name: cell name
-
-        Returns:
-            None
-        """
-        cmd = "NCCELLNAME {},{},{},{}".format(system, index, name,
-                                              self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    def get_neighbor_cell_mcc(self, system, index):
-        """ Gets the neighbor cell mcc
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell mcc
-        """
-        cmd = "NCMCC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_mnc(self, system, index):
-        """ Gets the neighbor cell mnc
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell mnc
-        """
-        cmd = "NCMNC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_id(self, system, index):
-        """ Gets the neighbor cell id
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell id
-        """
-        cmd = "NCCELLID? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_tac(self, system, index):
-        """ Gets the neighbor cell tracking area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking area code
-        """
-        cmd = "NCTAC? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_dl_channel(self, system, index):
-        """ Gets the neighbor cell downlink channel
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking downlink channel
-        """
-        cmd = "NCDLCHAN? {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_dl_bandwidth(self, system, index):
-        """ Gets the neighbor cell downlink bandwidth
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell tracking downlink bandwidth
-        """
-        cmd = "NCDLBANDWIDTH {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_pcid(self, system, index):
-        """ Gets the neighbor cell physical cell id
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell physical cell id
-        """
-        cmd = "NCPHYCELLID {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_lac(self, system, index):
-        """ Gets the neighbor cell location area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell location area code
-        """
-        cmd = "NCLAC {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    def get_neighbor_cell_rac(self, system, index):
-        """ Gets the neighbor cell routing area code
-
-        Args:
-            system: simulation model of neighbor cell
-                    LTE, WCDMA, TDSCDMA, GSM, CDMA1X,EVDO
-            index: Index of neighbor cell
-
-        Returns:
-            neighbor cell routing area code
-        """
-        cmd = "NCRAC {},{},{}".format(system, index, self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    @property
-    def primary_scrambling_code(self):
-        """ Gets the primary scrambling code for WCDMA cell
-
-        Args:
-            None
-
-        Returns:
-            primary scrambling code
-        """
-        cmd = "PRISCRCODE? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @primary_scrambling_code.setter
-    def primary_scrambling_code(self, psc):
-        """ Sets the primary scrambling code for WCDMA cell
-
-        Args:
-            psc: primary scrambling code
-
-        Returns:
-            None
-        """
-        cmd = "PRISCRCODE {},{}".format(psc, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tac(self):
-        """ Gets the Tracking Area Code of the LTE cell
-
-        Args:
-            None
-
-        Returns:
-            Tracking Area Code of the LTE cell
-        """
-        cmd = "TAC? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @tac.setter
-    def tac(self, tac):
-        """ Sets the Tracking Area Code of the LTE cell
-
-        Args:
-            tac: Tracking Area Code of the LTE cell
-
-        Returns:
-            None
-        """
-        cmd = "TAC {},{}".format(tac, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cell(self):
-        """ Gets the current cell for BTS
-
-        Args:
-            None
-
-        Returns:
-            current cell for BTS
-        """
-        cmd = "CELLSEL? {}".format(self._bts_number)
-        return self._anritsu.send_query(cmd)
-
-    @cell.setter
-    def cell(self, cell_name):
-        """ sets the  cell for BTS
-        Args:
-            cell_name: cell name
-
-        Returns:
-            None
-        """
-        cmd = "CELLSEL {},{}".format(self._bts_number, cell_name)
-        return self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_cbch(self):
-        """ Gets the GSM CBCH enable/disable status
-
-        Args:
-            None
-
-        Returns:
-            one of CBCHSetup values
-        """
-        cmd = "CBCHPARAMSETUP? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_cbch.setter
-    def gsm_cbch(self, enable):
-        """ Sets the GSM CBCH enable/disable status
-
-        Args:
-            enable: GSM CBCH enable/disable status
-
-        Returns:
-            None
-        """
-        cmd = "CBCHPARAMSETUP {},{}".format(enable.value, self._bts_number)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_gprs_mode(self):
-        """ Gets the GSM connection mode
-
-        Args:
-            None
-
-        Returns:
-            A string indicating if connection is EGPRS, GPRS or non-GPRS
-        """
-        cmd = "GPRS? " + self._bts_number
-        return self._anritsu.send_query(cmd)
-
-    @gsm_gprs_mode.setter
-    def gsm_gprs_mode(self, mode):
-        """ Sets the GPRS connection mode
-
-        Args:
-            mode: GPRS connection mode
-
-        Returns:
-            None
-        """
-
-        if not isinstance(mode, BtsGprsMode):
-            raise ValueError(' The parameter should be of type "BtsGprsMode"')
-        cmd = "GPRS {},{}".format(mode.value, self._bts_number)
-
-        self._anritsu.send_command(cmd)
-
-    @property
-    def gsm_slots(self):
-        """ Gets the GSM slot assignment
-
-        Args:
-            None
-
-        Returns:
-            A tuple indicating DL and UL slots.
-        """
-
-        cmd = "MLTSLTCFG? " + self._bts_number
-
-        response = self._anritsu.send_query(cmd)
-        split_response = response.split(',')
-
-        if not len(split_response) == 2:
-            raise ValueError(response)
-
-        return response[0], response[1]
-
-    @gsm_slots.setter
-    def gsm_slots(self, slots):
-        """ Sets the number of downlink / uplink slots for GSM
-
-        Args:
-            slots: a tuple containing two ints indicating (DL,UL)
-
-        Returns:
-            None
-        """
-
-        try:
-            dl, ul = slots
-            dl = int(dl)
-            ul = int(ul)
-        except:
-            raise ValueError(
-                'The parameter slot has to be a tuple containing two ints '
-                'indicating (dl,ul) slots.')
-
-        # Validate
-        if dl < 1 or ul < 1 or dl + ul > 5:
-            raise ValueError(
-                'DL and UL slots have to be >= 1 and the sum <= 5.')
-
-        cmd = "MLTSLTCFG {},{},{}".format(dl, ul, self._bts_number)
-
-        self._anritsu.send_command(cmd)
-
-
-class _VirtualPhone(object):
-    '''Class to interact with virtual phone supported by MD8475 '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def id(self):
-        """ Gets the virtual phone ID
-
-        Args:
-            None
-
-        Returns:
-            virtual phone ID
-        """
-        cmd = "VPID? "
-        return self._anritsu.send_query(cmd)
-
-    @id.setter
-    def id(self, phonenumber):
-        """ Sets the virtual phone ID
-
-        Args:
-            phonenumber: virtual phone ID
-
-        Returns:
-            None
-        """
-        cmd = "VPID {}".format(phonenumber)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def id_c2k(self):
-        """ Gets the virtual phone ID for CDMA 1x
-
-        Args:
-            None
-
-        Returns:
-            virtual phone ID
-        """
-        cmd = "VPIDC2K? "
-        return self._anritsu.send_query(cmd)
-
-    @id_c2k.setter
-    def id_c2k(self, phonenumber):
-        """ Sets the virtual phone ID for CDMA 1x
-
-        Args:
-            phonenumber: virtual phone ID
-
-        Returns:
-            None
-        """
-        cmd = "VPIDC2K {}".format(phonenumber)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def auto_answer(self):
-        """ Gets the auto answer status of virtual phone
-
-        Args:
-            None
-
-        Returns:
-            auto answer status, ON/OFF
-        """
-        cmd = "VPAUTOANSWER? "
-        return self._anritsu.send_query(cmd)
-
-    @auto_answer.setter
-    def auto_answer(self, option):
-        """ Sets the auto answer feature
-
-        Args:
-            option: tuple with two items for turning on Auto Answer
-                    (OFF or (ON, timetowait))
-
-        Returns:
-            None
-        """
-        enable = "OFF"
-        time = 5
-
-        try:
-            enable, time = option
-        except ValueError:
-            if enable != "OFF":
-                raise ValueError("Pass a tuple with two items for"
-                                 " Turning on Auto Answer")
-        cmd = "VPAUTOANSWER {},{}".format(enable.value, time)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def calling_mode(self):
-        """ Gets the calling mode of virtual phone
-
-        Args:
-            None
-
-        Returns:
-            calling mode of virtual phone
-        """
-        cmd = "VPCALLINGMODE? "
-        return self._anritsu.send_query(cmd)
-
-    @calling_mode.setter
-    def calling_mode(self, calling_mode):
-        """ Sets the calling mode of virtual phone
-
-        Args:
-            calling_mode: calling mode of virtual phone
-
-        Returns:
-            None
-        """
-        cmd = "VPCALLINGMODE {}".format(calling_mode)
-        self._anritsu.send_command(cmd)
-
-    def set_voice_off_hook(self):
-        """ Set the virtual phone operating mode to Voice Off Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 0"
-        return self._anritsu.send_command(cmd)
-
-    def set_voice_on_hook(self):
-        """ Set the virtual phone operating mode to Voice On Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 1"
-        return self._anritsu.send_command(cmd)
-
-    def set_video_off_hook(self):
-        """ Set the virtual phone operating mode to Video Off Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 2"
-        return self._anritsu.send_command(cmd)
-
-    def set_video_on_hook(self):
-        """ Set the virtual phone operating mode to Video On Hook
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 3"
-        return self._anritsu.send_command(cmd)
-
-    def set_call_waiting(self):
-        """ Set the virtual phone operating mode to Call waiting
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "OPERATEVPHONE 4"
-        return self._anritsu.send_command(cmd)
-
-    @property
-    def status(self):
-        """ Gets the virtual phone status
-
-        Args:
-            None
-
-        Returns:
-            virtual phone status
-        """
-        cmd = "VPSTAT?"
-        status = self._anritsu.send_query(cmd)
-        return _VP_STATUS[status]
-
-    def sendSms(self, phoneNumber, message):
-        """ Sends the SMS data from Anritsu to UE
-
-        Args:
-            phoneNumber: sender of SMS
-            message: message text
-
-        Returns:
-            None
-        """
-        cmd = ("SENDSMS /?PhoneNumber=001122334455&Sender={}&Text={}"
-               "&DCS=00").format(phoneNumber, AnritsuUtils.gsm_encode(message))
-        return self._anritsu.send_command(cmd)
-
-    def sendSms_c2k(self, phoneNumber, message):
-        """ Sends the SMS data from Anritsu to UE (in CDMA)
-
-        Args:
-            phoneNumber: sender of SMS
-            message: message text
-
-        Returns:
-            None
-        """
-        cmd = ("C2KSENDSMS System=CDMA\&Originating_Address={}\&UserData={}"
-               ).format(phoneNumber, AnritsuUtils.cdma_encode(message))
-        return self._anritsu.send_command(cmd)
-
-    def receiveSms(self):
-        """ Receives SMS messages sent by the UE in an external application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        return self._anritsu.send_query("RECEIVESMS?")
-
-    def receiveSms_c2k(self):
-        """ Receives SMS messages sent by the UE(in CDMA) in an external application
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        return self._anritsu.send_query("C2KRECEIVESMS?")
-
-    def setSmsStatusReport(self, status):
-        """ Set the Status Report value of the SMS
-
-        Args:
-            status: status code
-
-        Returns:
-            None
-        """
-        cmd = "SMSSTATUSREPORT {}".format(status)
-        return self._anritsu.send_command(cmd)
-
-
-class _PacketDataNetwork(object):
-    '''Class to configure PDN parameters'''
-    def __init__(self, anritsu, pdnnumber):
-        self._pdn_number = pdnnumber
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    # Default Gateway Selection
-    @property
-    def pdn_DG_selection(self):
-        """ Gets the default gateway for the PDN
-
-        Args:
-          None
-
-        Returns:
-          Current UE status
-        """
-        cmd = "PDNDEFAULTGATEWAY? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_DG_selection.setter
-    def pdn_DG_selection(self, selection):
-        """ Sets the default gateway selection for the PDN
-
-        Args:
-          Selection: COMMON or USER
-
-        Returns:
-          None
-        """
-        cmd = "PDNDEFAULTGATEWAY {},{}".format(self._pdn_number, selection)
-        self._anritsu.send_command(cmd)
-
-    # PDN specific Default Gateway:
-    @property
-    def pdn_gateway_ipv4addr(self):
-        """ Gets the IPv4 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        cmd = "PDNDGIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_gateway_ipv4addr.setter
-    def pdn_gateway_ipv4addr(self, ipv4_addr):
-        """ sets the IPv4 address of the default gateway
-
-        Args:
-            ipv4_addr: IPv4 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "PDNDGIPV4 {},{}".format(self._pdn_number, ipv4_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_gateway_ipv6addr(self):
-        """ Gets the IPv6 address of the default gateway
-
-        Args:
-          None
-
-        Returns:
-            current UE status
-        """
-        cmd = "PDNDGIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_gateway_ipv6addr.setter
-    def pdn_gateway_ipv6addr(self, ipv6_addr):
-        """ sets the IPv6 address of the default gateway
-
-        Args:
-            ipv6_addr: IPv6 address of the default gateway
-
-        Returns:
-            None
-        """
-        cmd = "PDNDGIPV6 {},{}".format(self._pdn_number, ipv6_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_iptype(self):
-        """ Gets IP type of UE for particular PDN
-
-        Args:
-            None
-
-        Returns:
-            IP type of UE for particular PDN
-        """
-        cmd = "PDNIPTYPE? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_iptype.setter
-    def ue_address_iptype(self, ip_type):
-        """ Set IP type of UE for particular PDN
-
-        Args:
-            ip_type: IP type of UE
-
-        Returns:
-            None
-        """
-        if not isinstance(ip_type, IPAddressType):
-            raise ValueError(
-                ' The parameter should be of type "IPAddressType"')
-        cmd = "PDNIPTYPE {},{}".format(self._pdn_number, ip_type.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_ipv4(self):
-        """ Gets UE IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            UE IPv4 address
-        """
-        cmd = "PDNIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_ipv4.setter
-    def ue_address_ipv4(self, ip_address):
-        """ Set UE IPv4 address
-
-        Args:
-            ip_address: UE IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNIPV4 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ue_address_ipv6(self):
-        """ Gets UE IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            UE IPv6 address
-        """
-        cmd = "PDNIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @ue_address_ipv6.setter
-    def ue_address_ipv6(self, ip_address):
-        """ Set UE IPv6 address
-
-        Args:
-            ip_address: UE IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def primary_dns_address_ipv4(self):
-        """ Gets Primary DNS server IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            Primary DNS server IPv4 address
-        """
-        cmd = "PDNDNSIPV4PRI? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @primary_dns_address_ipv4.setter
-    def primary_dns_address_ipv4(self, ip_address):
-        """ Set Primary DNS server IPv4 address
-
-        Args:
-            ip_address: Primary DNS server IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV4PRI {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def secondary_dns_address_ipv4(self):
-        """ Gets secondary DNS server IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            secondary DNS server IPv4 address
-        """
-        cmd = "PDNDNSIPV4SEC? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @secondary_dns_address_ipv4.setter
-    def secondary_dns_address_ipv4(self, ip_address):
-        """ Set secondary DNS server IPv4 address
-
-        Args:
-            ip_address: secondary DNS server IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV4SEC {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dns_address_ipv6(self):
-        """ Gets DNS server IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            DNS server IPv6 address
-        """
-        cmd = "PDNDNSIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @dns_address_ipv6.setter
-    def dns_address_ipv6(self, ip_address):
-        """ Set DNS server IPv6 address
-
-        Args:
-            ip_address: DNS server IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNDNSIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv4(self):
-        """ Gets Secondary P-CSCF IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            Secondary P-CSCF IPv4 address
-        """
-        cmd = "PDNPCSCFIPV4? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv4.setter
-    def cscf_address_ipv4(self, ip_address):
-        """ Set Secondary P-CSCF IPv4 address
-
-        Args:
-            ip_address: Secondary P-CSCF IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNPCSCFIPV4 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv6(self):
-        """ Gets P-CSCF IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            P-CSCF IPv6 address
-        """
-        cmd = "PDNPCSCFIPV6? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv6.setter
-    def cscf_address_ipv6(self, ip_address):
-        """ Set P-CSCF IPv6 address
-
-        Args:
-            ip_address: P-CSCF IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "PDNPCSCFIPV6 {},{}".format(self._pdn_number, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_ims(self):
-        """ Get PDN IMS VNID binding status
-
-        Args:
-            None
-
-        Returns:
-            PDN IMS VNID binding status
-        """
-        cmd = "PDNIMS? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_ims.setter
-    def pdn_ims(self, switch):
-        """ Set PDN IMS VNID binding Enable/Disable
-
-        Args:
-            switch: "ENABLE/DISABLE"
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type'
-                             ' "Switch", ie, ENABLE or DISABLE ')
-        cmd = "PDNIMS {},{}".format(self._pdn_number, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_vnid(self):
-        """ Get PDN IMS VNID
-
-        Args:
-            None
-
-        Returns:
-            PDN IMS VNID
-        """
-        cmd = "PDNVNID? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_vnid.setter
-    def pdn_vnid(self, vnid):
-        """ Set PDN IMS VNID
-
-        Args:
-            vnid: 1~99
-
-        Returns:
-            None
-        """
-        cmd = "PDNVNID {},{}".format(self._pdn_number, vnid)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_apn_name(self):
-        """ Get PDN APN NAME
-
-        Args:
-            None
-
-        Returns:
-            PDN APN NAME
-        """
-        cmd = "PDNCHECKAPN? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_apn_name.setter
-    def pdn_apn_name(self, name):
-        """ Set PDN APN NAME
-
-        Args:
-            name: fast.t-mobile.com, ims
-
-        Returns:
-            None
-        """
-        cmd = "PDNCHECKAPN {},{}".format(self._pdn_number, name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def pdn_qci(self):
-        """ Get PDN QCI Value
-
-        Args:
-            None
-
-        Returns:
-            PDN QCI Value
-        """
-        cmd = "PDNQCIDEFAULT? " + self._pdn_number
-        return self._anritsu.send_query(cmd)
-
-    @pdn_qci.setter
-    def pdn_qci(self, qci_value):
-        """ Set PDN QCI Value
-
-        Args:
-            qci_value: 5, 9
-
-        Returns:
-            None
-        """
-        cmd = "PDNQCIDEFAULT {},{}".format(self._pdn_number, qci_value)
-        self._anritsu.send_command(cmd)
-
-
-class _TriggerMessage(object):
-    '''Class to interact with trigger message handling supported by MD8475 '''
-    def __init__(self, anritsu):
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    def set_reply_type(self, message_id, reply_type):
-        """ Sets the reply type of the trigger information
-
-        Args:
-            message_id: trigger information message Id
-            reply_type: reply type of the trigger information
-
-        Returns:
-            None
-        """
-        if not isinstance(message_id, TriggerMessageIDs):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageIDs"')
-        if not isinstance(reply_type, TriggerMessageReply):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageReply"')
-
-        cmd = "REJECTTYPE {},{}".format(message_id.value, reply_type.value)
-        self._anritsu.send_command(cmd)
-
-    def set_reject_cause(self, message_id, cause):
-        """ Sets the reject cause of the trigger information
-
-        Args:
-            message_id: trigger information message Id
-            cause: cause for reject
-
-        Returns:
-            None
-        """
-        if not isinstance(message_id, TriggerMessageIDs):
-            raise ValueError(' The parameter should be of type'
-                             ' "TriggerMessageIDs"')
-
-        cmd = "REJECTCAUSE {},{}".format(message_id.value, cause)
-        self._anritsu.send_command(cmd)
-
-
-class _IMS_Services(object):
-    '''Class to configure and operate IMS Services'''
-    def __init__(self, anritsu, vnid):
-        self._vnid = vnid
-        self._anritsu = anritsu
-        self.log = anritsu.log
-
-    @property
-    def sync(self):
-        """ Gets Sync Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID Sync Enable status
-        """
-        cmd = "IMSSYNCENABLE? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @sync.setter
-    def sync(self, switch):
-        """ Set Sync Enable or Disable
-
-        Args:
-            sync: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSSYNCENABLE {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv4(self):
-        """ Gets CSCF IPv4 address
-
-        Args:
-            None
-
-        Returns:
-            CSCF IPv4 address
-        """
-        cmd = "IMSCSCFIPV4? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv4.setter
-    def cscf_address_ipv4(self, ip_address):
-        """ Set CSCF IPv4 address
-
-        Args:
-            ip_address: CSCF IPv4 address
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPV4 {},{}".format(self._vnid, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_address_ipv6(self):
-        """ Gets CSCF IPv6 address
-
-        Args:
-            None
-
-        Returns:
-            CSCF IPv6 address
-        """
-        cmd = "IMSCSCFIPV6? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_address_ipv6.setter
-    def cscf_address_ipv6(self, ip_address):
-        """ Set CSCF IPv6 address
-
-        Args:
-            ip_address: CSCF IPv6 address
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPV6 {},{}".format(self._vnid, ip_address)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def imscscf_iptype(self):
-        """ Gets CSCF IP Type
-
-        Args:
-            None
-
-        Returns:
-            CSCF IP Type
-        """
-        cmd = "IMSCSCFIPTYPE? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @imscscf_iptype.setter
-    def imscscf_iptype(self, iptype):
-        """ Set CSCF IP Type
-
-        Args:
-            iptype: IPV4, IPV6, IPV4V6
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFIPTYPE {},{}".format(self._vnid, iptype)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_monitoring_ua(self):
-        """ Get CSCF Monitoring UA URI
-
-        Args:
-            None
-
-        Returns:
-            CSCF Monitoring UA URI
-        """
-        cmd = "IMSCSCFUAURI? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_monitoring_ua.setter
-    def cscf_monitoring_ua(self, ua_uri):
-        """ Set CSCF Monitoring UA URI
-
-        Args:
-            ua_uri: CSCF Monitoring UA URI
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUAURI {},{}".format(self._vnid, ua_uri)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_host_name(self):
-        """ Get CSCF Host Name
-
-        Args:
-            None
-
-        Returns:
-            CSCF Host Name
-        """
-        cmd = "IMSCSCFNAME? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_host_name.setter
-    def cscf_host_name(self, host_name):
-        """ Set CSCF Host Name
-
-        Args:
-            host_name: CSCF Host Name
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFNAME {},{}".format(self._vnid, host_name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_ims_authentication(self):
-        """ Get CSCF IMS Auth Value
-
-        Args:
-            None
-
-        Returns:
-            CSCF IMS Auth
-        """
-        cmd = "IMSCSCFAUTH? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_ims_authentication.setter
-    def cscf_ims_authentication(self, on_off):
-        """ Set CSCF IMS Auth Value
-
-        Args:
-            on_off: CSCF IMS Auth ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFAUTH {},{}".format(self._vnid, on_off)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_precondition(self):
-        """ Get CSCF IMS Precondition
-
-        Args:
-            None
-
-        Returns:
-            CSCF IMS Precondition
-        """
-        cmd = "IMSCSCFPRECONDITION? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_precondition.setter
-    def cscf_precondition(self, on_off):
-        """ Set CSCF IMS Precondition
-
-        Args:
-            on_off: CSCF IMS Precondition ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFPRECONDITION {},{}".format(self._vnid, on_off)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def cscf_virtual_ua(self):
-        """ Get CSCF Virtual UA URI
-
-        Args:
-            None
-
-        Returns:
-            CSCF Virtual UA URI
-        """
-        cmd = "IMSCSCFVUAURI? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @cscf_virtual_ua.setter
-    def cscf_virtual_ua(self, ua_uri):
-        """ Set CSCF Virtual UA URI
-
-        Args:
-            ua_uri: CSCF Virtual UA URI
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFVUAURI {},{}".format(self._vnid, ua_uri)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def tmo_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @tmo_cscf_userslist_add.setter
-    def tmo_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},00112233445566778899AABBCCDDEEFF,TS34108,AKAV1_MD5,\
-        OPC,00000000000000000000000000000000,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def fi_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @fi_cscf_userslist_add.setter
-    def fi_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},00112233445566778899AABBCCDDEEFF,TS34108,AKAV1_MD5,\
-        OPC,00000000000000000000000000000000,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def vzw_cscf_userslist_add(self):
-        """ Get CSCF USERLIST
-
-        Args:
-            None
-
-        Returns:
-            CSCF USERLIST
-        """
-        cmd = "IMSCSCFUSERSLIST? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @vzw_cscf_userslist_add.setter
-    def vzw_cscf_userslist_add(self, username):
-        """ Set CSCF USER to USERLIST
-            This is needed if IMS AUTH is enabled
-
-        Args:
-            username: CSCF Username
-
-        Returns:
-            None
-        """
-        cmd = "IMSCSCFUSERSLISTADD {},{},465B5CE8B199B49FAA5F0A2EE238A6BC,MILENAGE,AKAV1_MD5,\
-        OP,5F1D289C5D354D0A140C2548F5F3E3BA,8000,TRUE,FALSE,0123456789ABCDEF0123456789ABCDEF,\
-        54CDFEAB9889000001326754CDFEAB98,6754CDFEAB9889BAEFDC457623100132,\
-        326754CDFEAB9889BAEFDC4576231001,TRUE,TRUE,TRUE".format(
-            self._vnid, username)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def dns(self):
-        """ Gets DNS Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID DNS Enable status
-        """
-        cmd = "IMSDNS? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @dns.setter
-    def dns(self, switch):
-        """ Set DNS Enable or Disable
-
-        Args:
-            sync: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSDNS {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ndp_nic(self):
-        """ Gets NDP Network Interface name
-
-        Args:
-            None
-
-        Returns:
-            NDP NIC name
-        """
-        cmd = "IMSNDPNIC? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @ndp_nic.setter
-    def ndp_nic(self, nic_name):
-        """ Set NDP Network Interface name
-
-        Args:
-            nic_name: NDP Network Interface name
-
-        Returns:
-            None
-        """
-        cmd = "IMSNDPNIC {},{}".format(self._vnid, nic_name)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def ndp_prefix(self):
-        """ Gets NDP IPv6 Prefix
-
-        Args:
-            None
-
-        Returns:
-            NDP IPv6 Prefix
-        """
-        cmd = "IMSNDPPREFIX? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @ndp_prefix.setter
-    def ndp_prefix(self, prefix_addr):
-        """ Set NDP IPv6 Prefix
-
-        Args:
-            prefix_addr: NDP IPV6 Prefix Addr
-
-        Returns:
-            None
-        """
-        cmd = "IMSNDPPREFIX {},{},64".format(self._vnid, prefix_addr)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def psap(self):
-        """ Gets PSAP Enable status
-
-        Args:
-            None
-
-        Returns:
-            VNID PSAP Enable status
-        """
-        cmd = "IMSPSAP? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @psap.setter
-    def psap(self, switch):
-        """ Set PSAP Enable or Disable
-
-        Args:
-            switch: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSPSAP {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    @property
-    def psap_auto_answer(self):
-        """ Gets PSAP Auto Answer status
-
-        Args:
-            None
-
-        Returns:
-            VNID PSAP Auto Answer status
-        """
-        cmd = "IMSPSAPAUTOANSWER? " + self._vnid
-        return self._anritsu.send_query(cmd)
-
-    @psap_auto_answer.setter
-    def psap_auto_answer(self, switch):
-        """ Set PSAP Auto Answer Enable or Disable
-
-        Args:
-            switch: ENABLE/DISABLE
-
-        Returns:
-            None
-        """
-        if not isinstance(switch, Switch):
-            raise ValueError(' The parameter should be of type "Switch"')
-        cmd = "IMSPSAPAUTOANSWER {},{}".format(self._vnid, switch.value)
-        self._anritsu.send_command(cmd)
-
-    def start_virtual_network(self):
-        """ Start the specified Virtual Network (IMS service)
-
-        Args:
-            None
-
-        Returns:
-            None
-        """
-        cmd = "IMSSTARTVN " + self._vnid
-        return self._anritsu.send_command(cmd)
diff --git a/src/antlion/controllers/anritsu_lib/mg3710a.py b/src/antlion/controllers/anritsu_lib/mg3710a.py
deleted file mode 100644
index 9d6c559..0000000
--- a/src/antlion/controllers/anritsu_lib/mg3710a.py
+++ /dev/null
@@ -1,715 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Controller interface for Anritsu Signal Generator MG3710A.
-"""
-
-import logging
-import socket
-
-from antlion.controllers.anritsu_lib._anritsu_utils import AnritsuError
-from antlion.controllers.anritsu_lib._anritsu_utils import NO_ERROR
-from antlion.controllers.anritsu_lib._anritsu_utils import OPERATION_COMPLETE
-
-from antlion import tracelogger
-
-TERMINATOR = "\n"
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        ip_address = c["ip_address"]
-        objs.append(MG3710A(ip_address))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class MG3710A(object):
-    """Class to communicate with Anritsu Signal Generator MG3710A.
-       This uses GPIB command to interface with Anritsu MG3710A """
-
-    def __init__(self, ip_address):
-        self._ipaddr = ip_address
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-
-        # Open socket connection to Signaling Tester
-        self.log.info("Opening Socket Connection with "
-                      "Signal Generator MG3710A ({}) ".format(self._ipaddr))
-        try:
-            self._sock = socket.create_connection((self._ipaddr, 49158),
-                                                  timeout=30)
-            self.send_query("*IDN?", 60)
-            self.log.info("Communication Signal Generator MG3710A OK.")
-            self.log.info("Opened Socket connection to ({})"
-                          "with handle ({})".format(self._ipaddr, self._sock))
-        except socket.timeout:
-            raise AnritsuError("Timeout happened while conencting to"
-                               " Anritsu MG3710A")
-        except socket.error:
-            raise AnritsuError("Socket creation error")
-
-    def disconnect(self):
-        """ Disconnect Signal Generator MG3710A
-
-        Args:
-          None
-
-        Returns:
-            None
-        """
-        self.send_command(":SYST:COMM:GTL", opc=False)
-        self._sock.close()
-
-    def send_query(self, query, sock_timeout=10):
-        """ Sends a Query message to Anritsu MG3710A and return response
-
-        Args:
-            query - Query string
-
-        Returns:
-            query response
-        """
-        self.log.info("--> {}".format(query))
-        querytoSend = (query + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(querytoSend)
-            result = self._sock.recv(256).rstrip(TERMINATOR.encode('utf-8'))
-            response = result.decode('utf-8')
-            self.log.info('<-- {}'.format(response))
-            return response
-        except socket.timeout:
-            raise AnritsuError("Timeout: Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error")
-
-    def send_command(self, command, sock_timeout=30, opc=True):
-        """ Sends a Command message to Anritsu MG3710A
-
-        Args:
-            command - command string
-
-        Returns:
-            None
-        """
-        self.log.info("--> {}".format(command))
-        cmdToSend = (command + TERMINATOR).encode('utf-8')
-        self._sock.settimeout(sock_timeout)
-        try:
-            self._sock.send(cmdToSend)
-            if opc:
-                # check operation status
-                status = self.send_query("*OPC?")
-                if int(status) != OPERATION_COMPLETE:
-                    raise AnritsuError("Operation not completed")
-        except socket.timeout:
-            raise AnritsuError("Timeout for Command Response from Anritsu")
-        except socket.error:
-            raise AnritsuError("Socket Error for Anritsu command")
-        return
-
-    @property
-    def sg(self):
-        """ Gets current selected signal generator(SG)
-
-        Args:
-            None
-
-        Returns:
-            selected signal generatr number
-        """
-        return self.send_query("PORT?")
-
-    @sg.setter
-    def sg(self, sg_number):
-        """ Selects the signal generator to be controlled
-
-        Args:
-            sg_number: sg number 1 | 2
-
-        Returns:
-            None
-        """
-        cmd = "PORT {}".format(sg_number)
-        self.send_command(cmd)
-
-    def get_modulation_state(self, sg=1):
-        """ Gets the RF signal modulation state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            modulation state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("OUTP{}:MOD?".format(sg))
-
-    def set_modulation_state(self, state, sg=1):
-        """ Sets the RF signal modulation state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "OUTP{}:MOD {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_rf_output_state(self, sg=1):
-        """ Gets RF signal output state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            RF signal output state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("OUTP{}?".format(sg))
-
-    def set_rf_output_state(self, state, sg=1):
-        """ Sets the RF signal output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "OUTP{} {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency(self, sg=1):
-        """ Gets the selected frequency of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            selected frequency
-        """
-        return self.send_query("SOUR{}:FREQ?".format(sg))
-
-    def set_frequency(self, freq, sg=1):
-        """ Sets the frequency of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            freq : frequency
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ {}".format(sg, freq)
-        self.send_command(cmd)
-
-    def get_frequency_offset_state(self, sg=1):
-        """ Gets the Frequency Offset enable state (ON/OFF) of signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Frequency Offset enable state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:OFFS:STAT?".format(sg))
-
-    def set_frequency_offset_state(self, state, sg=1):
-        """ Sets the Frequency Offset enable state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state, ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:OFFS:STAT {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency_offset(self, sg=1):
-        """ Gets the current frequency offset value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current frequency offset value
-        """
-        return self.send_query("SOUR{}:FREQ:OFFS?".format(sg))
-
-    def set_frequency_offset(self, offset, sg=1):
-        """ Sets the frequency offset value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            offset : frequency offset value
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:OFFS {}".format(sg, offset)
-        self.send_command(cmd)
-
-    def get_frequency_offset_multiplier_state(self, sg=1):
-        """ Gets the Frequency Offset multiplier enable state (ON/OFF) of
-            signal generator
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Frequency Offset  multiplier enable state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:MULT:STAT?".format(sg))
-
-    def set_frequency_offset_multiplier_state(self, state, sg=1):
-        """ Sets the  Frequency Offset multiplier enable state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state, ON/OFF
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:MULT:STAT {}".format(sg, state)
-        self.send_command(cmd)
-
-    def get_frequency_offset_multiplier(self, sg=1):
-        """ Gets the current frequency offset multiplier value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset multiplier value
-        """
-        return self.send_query("SOUR{}:FREQ:MULT?".format(sg))
-
-    def set_frequency_offset_multiplier(self, multiplier, sg=1):
-        """ Sets the frequency offset multiplier value
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            multiplier : frequency offset multiplier value
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:MULT {}".format(sg, multiplier)
-        self.send_command(cmd)
-
-    def get_channel(self, sg=1):
-        """ Gets the current channel number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current channel number
-        """
-        return self.send_query("SOUR{}:FREQ:CHAN:NUMB?".format(sg))
-
-    def set_channel(self, channel, sg=1):
-        """ Sets the channel number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            channel : channel number
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:CHAN:NUMB {}".format(sg, channel)
-        self.send_command(cmd)
-
-    def get_channel_group(self, sg=1):
-        """ Gets the current channel group number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current channel group number
-        """
-        return self.send_query("SOUR{}:FREQ:CHAN:GRO?".format(sg))
-
-    def set_channel_group(self, group, sg=1):
-        """ Sets the channel group number
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            group : channel group number
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:CHAN:GRO {}".format(sg, group)
-        self.send_command(cmd)
-
-    def get_rf_output_level(self, sg=1):
-        """ Gets the current RF output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current RF output level
-        """
-        return self.send_query("SOUR{}:POW:CURR?".format(sg))
-
-    def get_output_level_unit(self, sg=1):
-        """ Gets the current RF output level unit
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current RF output level unit
-        """
-        return self.send_query("UNIT{}:POW?".format(sg))
-
-    def set_output_level_unit(self, unit, sg=1):
-        """ Sets the RF output level unit
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            unit : Output level unit
-
-        Returns:
-            None
-        """
-        cmd = "UNIT{}:POW {}".format(sg, unit)
-        self.send_command(cmd)
-
-    def get_output_level(self, sg=1):
-        """ Gets the Output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            Output level
-        """
-        return self.send_query("SOUR{}:POW?".format(sg))
-
-    def set_output_level(self, level, sg=1):
-        """ Sets the Output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            level : Output level
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:POW {}".format(sg, level)
-        self.send_command(cmd)
-
-    def get_arb_state(self, sg=1):
-        """ Gets the ARB function state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            ARB function state . 0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:RAD:ARB?".format(sg))
-
-    def set_arb_state(self, state, sg=1):
-        """ Sets the ARB function state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            state : enable state (ON/OFF)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB {}".format(sg, state)
-        self.send_command(cmd)
-
-    def restart_arb_waveform_pattern(self, sg=1):
-        """ playback the waveform pattern from the beginning.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WAV:REST".format(sg)
-        self.send_command(cmd)
-
-    def load_waveform(self, package_name, pattern_name, memory, sg=1):
-        """ loads the waveform from HDD to specified memory
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            package_name : Package name of signal
-            pattern_name : Pattern name of signal
-            memory: memory for the signal - "A" or "B"
-
-        Returns:
-            None
-        """
-        cmd = "MMEM{}:LOAD:WAV:WM{} '{}','{}'".format(sg, memory, package_name,
-                                                      pattern_name)
-        self.send_command(cmd)
-
-    def select_waveform(self, package_name, pattern_name, memory, sg=1):
-        """ Selects the waveform to output on specified memory
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            package_name : Package name of signal
-            pattern_name : Pattern name of signal
-            memory: memory for the signal - "A" or "B"
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:WAV '{}','{}'".format(
-            sg, memory, package_name, pattern_name)
-        self.send_command(cmd)
-
-    def get_freq_relative_display_status(self, sg=1):
-        """ Gets the frequency relative display status
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency relative display status.   0 (OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:FREQ:REF:STAT?".format(sg))
-
-    def set_freq_relative_display_status(self, enable, sg=1):
-        """ Sets frequency relative display status
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            enable : enable type (ON/OFF)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:REF:STAT {}".format(sg, enable)
-        self.send_command(cmd)
-
-    def get_freq_channel_display_type(self, sg=1):
-        """ Gets the selected type(frequency/channel) for input display
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            selected type(frequecy/channel) for input display
-        """
-        return self.send_query("SOUR{}:FREQ:TYPE?".format(sg))
-
-    def set_freq_channel_display_type(self, freq_channel, sg=1):
-        """ Sets thes type(frequency/channel) for input display
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            freq_channel : display type (frequency/channel)
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:FREQ:TYPE {}".format(sg, freq_channel)
-        self.send_command(cmd)
-
-    def get_arb_combination_mode(self, sg=1):
-        """ Gets the current mode to generate the pattern
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            current mode to generate the pattern
-        """
-        return self.send_query("SOUR{}:RAD:ARB:PCOM?".format(sg))
-
-    def set_arb_combination_mode(self, mode, sg=1):
-        """ Sets the mode to generate the pattern
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            mode : pattern generation mode
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:PCOM {}".format(sg, mode)
-        self.send_command(cmd)
-
-    def get_arb_pattern_aorb_state(self, a_or_b, sg=1):
-        """ Gets the Pattern A/B output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-
-        Returns:
-            Pattern A/B output state . 0(OFF) | 1(ON)
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:OUTP?".format(a_or_b, sg))
-
-    def set_arb_pattern_aorb_state(self, a_or_b, state, sg=1):
-        """ Sets the Pattern A/B output state
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            state : output state
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:OUTP {}".format(sg, a_or_b, state)
-        self.send_command(cmd)
-
-    def get_arb_level_aorb(self, a_or_b, sg=1):
-        """ Gets the Pattern A/B output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-
-        Returns:
-             Pattern A/B output level
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:POW?".format(sg, a_or_b))
-
-    def set_arb_level_aorb(self, a_or_b, level, sg=1):
-        """ Sets the Pattern A/B output level
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            level : output level
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:POW {}".format(sg, a_or_b, level)
-        self.send_command(cmd)
-
-    def get_arb_freq_offset(self, sg=1):
-        """ Gets the frequency offset between Pattern A and Patten B
-            when CenterSignal is A or B.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset between Pattern A and Patten B
-        """
-        return self.send_query("SOUR{}:RAD:ARB:FREQ:OFFS?".format(sg))
-
-    def set_arb_freq_offset(self, offset, sg=1):
-        """ Sets the frequency offset between Pattern A and Patten B when
-            CenterSignal is A or B.
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            offset : frequency offset
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:FREQ:OFFS {}".format(sg, offset)
-        self.send_command(cmd)
-
-    def get_arb_freq_offset_aorb(self, sg=1):
-        """ Gets the frequency offset of Pattern A/Pattern B based on Baseband
-            center frequency
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-
-        Returns:
-            frequency offset
-        """
-        return self.send_query("SOUR{}:RAD:ARB:WM{}:FREQ:OFFS?".format(
-            sg, a_or_b))
-
-    def set_arb_freq_offset_aorb(self, a_or_b, offset, sg=1):
-        """ Sets the frequency offset of Pattern A/Pattern B based on Baseband
-            center frequency
-
-        Args:
-            sg: signal generator number.
-                Default is 1
-            a_or_b : Patten A or Pattern B( "A" or "B")
-            offset : frequency offset
-
-        Returns:
-            None
-        """
-        cmd = "SOUR{}:RAD:ARB:WM{}:FREQ:OFFS {}".format(sg, a_or_b, offset)
-        self.send_command(cmd)
diff --git a/src/antlion/controllers/ap_lib/ap_get_interface.py b/src/antlion/controllers/ap_lib/ap_get_interface.py
deleted file mode 100644
index ddd6987..0000000
--- a/src/antlion/controllers/ap_lib/ap_get_interface.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from antlion.libs.proc import job
-
-GET_ALL_INTERFACE = 'ls /sys/class/net'
-GET_VIRTUAL_INTERFACE = 'ls /sys/devices/virtual/net'
-BRCTL_SHOW = 'brctl show'
-
-
-class ApInterfacesError(Exception):
-    """Error related to AP interfaces."""
-
-
-class ApInterfaces(object):
-    """Class to get network interface information for the device.
-
-    """
-    def __init__(self, ap, wan_interface_override=None):
-        """Initialize the ApInterface class.
-
-        Args:
-            ap: the ap object within ACTS
-            wan_interface_override: wan interface to use if specified by config
-        """
-        self.ssh = ap.ssh
-        self.wan_interface_override = wan_interface_override
-
-    def get_all_interface(self):
-        """Get all network interfaces on the device.
-
-        Returns:
-            interfaces_all: list of all the network interfaces on device
-        """
-        output = self.ssh.run(GET_ALL_INTERFACE)
-        interfaces_all = output.stdout.split('\n')
-
-        return interfaces_all
-
-    def get_virtual_interface(self):
-        """Get all virtual interfaces on the device.
-
-        Returns:
-            interfaces_virtual: list of all the virtual interfaces on device
-        """
-        output = self.ssh.run(GET_VIRTUAL_INTERFACE)
-        interfaces_virtual = output.stdout.split('\n')
-
-        return interfaces_virtual
-
-    def get_physical_interface(self):
-        """Get all the physical interfaces of the device.
-
-        Get all physical interfaces such as eth ports and wlan ports
-        Returns:
-            interfaces_phy: list of all the physical interfaces
-        """
-        interfaces_all = self.get_all_interface()
-        interfaces_virtual = self.get_virtual_interface()
-        interfaces_phy = list(set(interfaces_all) - set(interfaces_virtual))
-
-        return interfaces_phy
-
-    def get_bridge_interface(self):
-        """Get all the bridge interfaces of the device.
-
-        Returns:
-            interfaces_bridge: the list of bridge interfaces, return None if
-                bridge utility is not available on the device
-        """
-        interfaces_bridge = []
-        try:
-            output = self.ssh.run(BRCTL_SHOW)
-            lines = output.stdout.split('\n')
-            for line in lines:
-                interfaces_bridge.append(line.split('\t')[0])
-            interfaces_bridge.pop(0)
-            interfaces_bridge = [x for x in interfaces_bridge if x != '']
-            return interfaces_bridge
-        except job.Error:
-            logging.info('No brctl utility is available')
-            return None
-
-    def get_wlan_interface(self):
-        """Get all WLAN interfaces and specify 2.4 GHz and 5 GHz interfaces.
-
-        Returns:
-            interfaces_wlan: all wlan interfaces
-        Raises:
-            ApInterfacesError: Missing at least one WLAN interface
-        """
-        wlan_2g = None
-        wlan_5g = None
-        interfaces_phy = self.get_physical_interface()
-        for iface in interfaces_phy:
-            IW_LIST_FREQ = 'iwlist %s freq' % iface
-            output = self.ssh.run(IW_LIST_FREQ)
-            if 'Channel 06' in output.stdout and 'Channel 36' not in output.stdout:
-                wlan_2g = iface
-            elif 'Channel 36' in output.stdout and 'Channel 06' not in output.stdout:
-                wlan_5g = iface
-
-        interfaces_wlan = [wlan_2g, wlan_5g]
-
-        if None not in interfaces_wlan:
-            return interfaces_wlan
-
-        raise ApInterfacesError('Missing at least one WLAN interface')
-
-    def get_wan_interface(self):
-        """Get the WAN interface which has internet connectivity. If a wan
-        interface is already specified return that instead.
-
-        Returns:
-            wan: the only one WAN interface
-        Raises:
-            ApInterfacesError: no running WAN can be found
-        """
-        if self.wan_interface_override:
-            return self.wan_interface_override
-
-        wan = None
-        interfaces_phy = self.get_physical_interface()
-        interfaces_wlan = self.get_wlan_interface()
-        interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan))
-        for iface in interfaces_eth:
-            network_status = self.check_ping(iface)
-            if network_status == 1:
-                wan = iface
-                break
-        if wan:
-            return wan
-
-        output = self.ssh.run('ifconfig')
-        interfaces_all = output.stdout.split('\n')
-        logging.info("IFCONFIG output = %s" % interfaces_all)
-
-        raise ApInterfacesError('No WAN interface available')
-
-    def get_lan_interface(self):
-        """Get the LAN interface connecting to local devices.
-
-        Returns:
-            lan: the only one running LAN interface of the devices
-            None, if nothing was found.
-        """
-        lan = None
-        interfaces_phy = self.get_physical_interface()
-        interfaces_wlan = self.get_wlan_interface()
-        interfaces_eth = list(set(interfaces_phy) - set(interfaces_wlan))
-        interface_wan = self.get_wan_interface()
-        interfaces_eth.remove(interface_wan)
-        for iface in interfaces_eth:
-            LAN_CHECK = 'ifconfig %s' % iface
-            output = self.ssh.run(LAN_CHECK)
-            if 'RUNNING' in output.stdout:
-                lan = iface
-                break
-        return lan
-
-    def check_ping(self, iface):
-        """Check the ping status on specific interface to determine the WAN.
-
-        Args:
-            iface: the specific interface to check
-        Returns:
-            network_status: the connectivity status of the interface
-        """
-        PING = 'ping -c 3 -I %s 8.8.8.8' % iface
-        try:
-            self.ssh.run(PING)
-            return 1
-        except job.Error:
-            return 0
diff --git a/src/antlion/controllers/ap_lib/ap_iwconfig.py b/src/antlion/controllers/ap_lib/ap_iwconfig.py
deleted file mode 100644
index 550f785..0000000
--- a/src/antlion/controllers/ap_lib/ap_iwconfig.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.libs.proc import job
-
-
-class ApIwconfigError(Exception):
-    """Error related to configuring the wireless interface via iwconfig."""
-
-
-class ApIwconfig(object):
-    """Class to configure wireless interface via iwconfig
-
-    """
-
-    PROGRAM_FILE = '/usr/local/sbin/iwconfig'
-
-    def __init__(self, ap):
-        """Initialize the ApIwconfig class.
-
-        Args:
-            ap: the ap object within ACTS
-        """
-        self.ssh = ap.ssh
-
-    def ap_iwconfig(self, interface, arguments=None):
-        """Configure the wireless interface using iwconfig.
-
-        Returns:
-            output: the output of the command, if any
-        """
-        iwconfig_command = '%s %s %s' % (self.PROGRAM_FILE, interface,
-                                         arguments)
-        output = self.ssh.run(iwconfig_command)
-
-        return output
diff --git a/src/antlion/controllers/ap_lib/bridge_interface.py b/src/antlion/controllers/ap_lib/bridge_interface.py
deleted file mode 100644
index 2dd82d8..0000000
--- a/src/antlion/controllers/ap_lib/bridge_interface.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from antlion.libs.proc import job
-
-_BRCTL = 'brctl'
-BRIDGE_NAME = 'br-lan'
-CREATE_BRIDGE = '%s addbr %s' % (_BRCTL, BRIDGE_NAME)
-DELETE_BRIDGE = '%s delbr %s' % (_BRCTL, BRIDGE_NAME)
-BRING_DOWN_BRIDGE = 'ifconfig %s down' % BRIDGE_NAME
-
-
-class BridgeInterfaceConfigs(object):
-    """Configs needed for creating bridge interface between LAN and WLAN.
-
-    """
-    def __init__(self, iface_wlan, iface_lan, bridge_ip):
-        """Set bridge interface configs based on the channel info.
-
-        Args:
-            iface_wlan: the wlan interface as part of the bridge
-            iface_lan: the ethernet LAN interface as part of the bridge
-            bridge_ip: the ip address assigned to the bridge interface
-        """
-        self.iface_wlan = iface_wlan
-        self.iface_lan = iface_lan
-        self.bridge_ip = bridge_ip
-
-
-class BridgeInterface(object):
-    """Class object for bridge interface betwen WLAN and LAN
-
-    """
-    def __init__(self, ap):
-        """Initialize the BridgeInterface class.
-
-        Bridge interface will be added between ethernet LAN port and WLAN port.
-        Args:
-            ap: AP object within ACTS
-        """
-        self.ssh = ap.ssh
-
-    def startup(self, brconfigs):
-        """Start up the bridge interface.
-
-        Args:
-            brconfigs: the bridge interface config, type BridgeInterfaceConfigs
-        """
-
-        logging.info('Create bridge interface between LAN and WLAN')
-        # Create the bridge
-        try:
-            self.ssh.run(CREATE_BRIDGE)
-        except job.Error:
-            logging.warning(
-                'Bridge interface {} already exists, no action needed'.format(
-                    BRIDGE_NAME))
-
-        # Enable 4addr mode on for the wlan interface
-        ENABLE_4ADDR = 'iw dev %s set 4addr on' % (brconfigs.iface_wlan)
-        try:
-            self.ssh.run(ENABLE_4ADDR)
-        except job.Error:
-            logging.warning('4addr is already enabled on {}'.format(
-                brconfigs.iface_wlan))
-
-        # Add both LAN and WLAN interfaces to the bridge interface
-        for interface in [brconfigs.iface_lan, brconfigs.iface_wlan]:
-            ADD_INTERFACE = '%s addif %s %s' % (_BRCTL, BRIDGE_NAME, interface)
-            try:
-                self.ssh.run(ADD_INTERFACE)
-            except job.Error:
-                logging.warning('{} has already been added to {}'.format(
-                    interface, BRIDGE_NAME))
-        time.sleep(5)
-
-        # Set IP address on the bridge interface to bring it up
-        SET_BRIDGE_IP = 'ifconfig %s %s' % (BRIDGE_NAME, brconfigs.bridge_ip)
-        self.ssh.run(SET_BRIDGE_IP)
-        time.sleep(2)
-
-        # Bridge interface is up
-        logging.info('Bridge interface is up and running')
-
-    def teardown(self, brconfigs):
-        """Tear down the bridge interface.
-
-        Args:
-            brconfigs: the bridge interface config, type BridgeInterfaceConfigs
-        """
-        logging.info('Bringing down the bridge interface')
-        # Delete the bridge interface
-        self.ssh.run(BRING_DOWN_BRIDGE)
-        time.sleep(1)
-        self.ssh.run(DELETE_BRIDGE)
-
-        # Bring down wlan interface and disable 4addr mode
-        BRING_DOWN_WLAN = 'ifconfig %s down' % brconfigs.iface_wlan
-        self.ssh.run(BRING_DOWN_WLAN)
-        time.sleep(2)
-        DISABLE_4ADDR = 'iw dev %s set 4addr off' % (brconfigs.iface_wlan)
-        self.ssh.run(DISABLE_4ADDR)
-        time.sleep(1)
-        logging.info('Bridge interface is down')
diff --git a/src/antlion/controllers/ap_lib/dhcp_config.py b/src/antlion/controllers/ap_lib/dhcp_config.py
deleted file mode 100644
index 4cc7adf..0000000
--- a/src/antlion/controllers/ap_lib/dhcp_config.py
+++ /dev/null
@@ -1,203 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-
-_ROUTER_DNS = '8.8.8.8, 4.4.4.4'
-
-
-class Subnet(object):
-    """Configs for a subnet  on the dhcp server.
-
-    Attributes:
-        network: ipaddress.IPv4Network, the network that this subnet is in.
-        start: ipaddress.IPv4Address, the start ip address.
-        end: ipaddress.IPv4Address, the end ip address.
-        router: The router to give to all hosts in this subnet.
-        lease_time: The lease time of all hosts in this subnet.
-        additional_parameters: A dictionary corresponding to DHCP parameters.
-        additional_options: A dictionary corresponding to DHCP options.
-    """
-
-    def __init__(self,
-                 subnet,
-                 start=None,
-                 end=None,
-                 router=None,
-                 lease_time=None,
-                 additional_parameters={},
-                 additional_options={}):
-        """
-        Args:
-            subnet: ipaddress.IPv4Network, The address space of the subnetwork
-                    served by the DHCP server.
-            start: ipaddress.IPv4Address, The start of the address range to
-                   give hosts in this subnet. If not given, the second ip in
-                   the network is used, under the assumption that the first
-                   address is the router.
-            end: ipaddress.IPv4Address, The end of the address range to give
-                 hosts. If not given then the address prior to the broadcast
-                 address (i.e. the second to last ip in the network) is used.
-            router: ipaddress.IPv4Address, The router hosts should use in this
-                    subnet. If not given the first ip in the network is used.
-            lease_time: int, The amount of lease time in seconds
-                        hosts in this subnet have.
-            additional_parameters: A dictionary corresponding to DHCP parameters.
-            additional_options: A dictionary corresponding to DHCP options.
-        """
-        self.network = subnet
-
-        if start:
-            self.start = start
-        else:
-            self.start = self.network[2]
-
-        if not self.start in self.network:
-            raise ValueError('The start range is not in the subnet.')
-        if self.start.is_reserved:
-            raise ValueError('The start of the range cannot be reserved.')
-
-        if end:
-            self.end = end
-        else:
-            self.end = self.network[-2]
-
-        if not self.end in self.network:
-            raise ValueError('The end range is not in the subnet.')
-        if self.end.is_reserved:
-            raise ValueError('The end of the range cannot be reserved.')
-        if self.end < self.start:
-            raise ValueError(
-                'The end must be an address larger than the start.')
-
-        if router:
-            if router >= self.start and router <= self.end:
-                raise ValueError('Router must not be in pool range.')
-            if not router in self.network:
-                raise ValueError('Router must be in the given subnet.')
-
-            self.router = router
-        else:
-            # TODO: Use some more clever logic so that we don't have to search
-            # every host potentially.
-            # This is especially important if we support IPv6 networks in this
-            # configuration. The improved logic that we can use is:
-            #    a) erroring out if start and end encompass the whole network, and
-            #    b) picking any address before self.start or after self.end.
-            self.router = None
-            for host in self.network.hosts():
-                if host < self.start or host > self.end:
-                    self.router = host
-                    break
-
-            if not self.router:
-                raise ValueError('No useable host found.')
-
-        self.lease_time = lease_time
-        self.additional_parameters = additional_parameters
-        self.additional_options = additional_options
-        if 'domain-name-servers' not in self.additional_options:
-            self.additional_options['domain-name-servers'] = _ROUTER_DNS
-
-
-class StaticMapping(object):
-    """Represents a static dhcp host.
-
-    Attributes:
-        identifier: How id of the host (usually the mac addres
-                    e.g. 00:11:22:33:44:55).
-        address: ipaddress.IPv4Address, The ipv4 address to give the host.
-        lease_time: How long to give a lease to this host.
-    """
-
-    def __init__(self, identifier, address, lease_time=None):
-        self.identifier = identifier
-        self.ipv4_address = address
-        self.lease_time = lease_time
-
-
-class DhcpConfig(object):
-    """The configs for a dhcp server.
-
-    Attributes:
-        subnets: A list of all subnets for the dhcp server to create.
-        static_mappings: A list of static host addresses.
-        default_lease_time: The default time for a lease.
-        max_lease_time: The max time to allow a lease.
-    """
-
-    def __init__(self,
-                 subnets=None,
-                 static_mappings=None,
-                 default_lease_time=600,
-                 max_lease_time=7200):
-        self.subnets = copy.deepcopy(subnets) if subnets else []
-        self.static_mappings = (copy.deepcopy(static_mappings)
-                                if static_mappings else [])
-        self.default_lease_time = default_lease_time
-        self.max_lease_time = max_lease_time
-
-    def render_config_file(self):
-        """Renders the config parameters into a format compatible with
-        the ISC DHCP server (dhcpd).
-        """
-        lines = []
-
-        if self.default_lease_time:
-            lines.append('default-lease-time %d;' % self.default_lease_time)
-        if self.max_lease_time:
-            lines.append('max-lease-time %s;' % self.max_lease_time)
-
-        for subnet in self.subnets:
-            address = subnet.network.network_address
-            mask = subnet.network.netmask
-            router = subnet.router
-            start = subnet.start
-            end = subnet.end
-            lease_time = subnet.lease_time
-            additional_parameters = subnet.additional_parameters
-            additional_options = subnet.additional_options
-
-            lines.append('subnet %s netmask %s {' % (address, mask))
-            lines.append('\tpool {')
-            lines.append('\t\toption subnet-mask %s;' % mask)
-            lines.append('\t\toption routers %s;' % router)
-            lines.append('\t\trange %s %s;' % (start, end))
-            if lease_time:
-                lines.append('\t\tdefault-lease-time %d;' % lease_time)
-                lines.append('\t\tmax-lease-time %d;' % lease_time)
-            for param, value in additional_parameters.items():
-                lines.append('\t\t%s %s;' % (param, value))
-            for option, value in additional_options.items():
-                lines.append('\t\toption %s %s;' % (option, value))
-            lines.append('\t}')
-            lines.append('}')
-
-        for mapping in self.static_mappings:
-            identifier = mapping.identifier
-            fixed_address = mapping.ipv4_address
-            host_fake_name = 'host%s' % identifier.replace(':', '')
-            lease_time = mapping.lease_time
-
-            lines.append('host %s {' % host_fake_name)
-            lines.append('\thardware ethernet %s;' % identifier)
-            lines.append('\tfixed-address %s;' % fixed_address)
-            if lease_time:
-                lines.append('\tdefault-lease-time %d;' % lease_time)
-                lines.append('\tmax-lease-time %d;' % lease_time)
-            lines.append('}')
-
-        config_str = '\n'.join(lines)
-
-        return config_str
diff --git a/src/antlion/controllers/ap_lib/dhcp_server.py b/src/antlion/controllers/ap_lib/dhcp_server.py
deleted file mode 100644
index 01411c6..0000000
--- a/src/antlion/controllers/ap_lib/dhcp_server.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
-
-from antlion.controllers.utils_lib.commands import shell
-from antlion import logger
-
-
-class Error(Exception):
-    """An error caused by the dhcp server."""
-
-
-class NoInterfaceError(Exception):
-    """Error thrown when the dhcp server has no interfaces on any subnet."""
-
-
-class DhcpServer(object):
-    """Manages the dhcp server program.
-
-    Only one of these can run in an environment at a time.
-
-    Attributes:
-        config: The dhcp server configuration that is being used.
-    """
-
-    PROGRAM_FILE = 'dhcpd'
-
-    def __init__(self, runner, interface, working_dir='/tmp'):
-        """
-        Args:
-            runner: Object that has a run_async and run methods for running
-                    shell commands.
-            interface: string, The name of the interface to use.
-            working_dir: The directory to work out of.
-        """
-        self._log = logger.create_logger(lambda msg: '[DHCP Server|%s] %s' % (
-            interface, msg))
-        self._runner = runner
-        self._working_dir = working_dir
-        self._shell = shell.ShellCommand(runner, working_dir)
-        self._stdio_log_file = 'dhcpd_%s.log' % interface
-        self._config_file = 'dhcpd_%s.conf' % interface
-        self._lease_file = 'dhcpd_%s.leases' % interface
-        self._pid_file = 'dhcpd_%s.pid' % interface
-        self._identifier = '%s.*%s' % (self.PROGRAM_FILE, self._config_file)
-
-    # There is a slight timing issue where if the proc filesystem in Linux
-    # doesn't get updated in time as when this is called, the NoInterfaceError
-    # will happening.  By adding this retry, the error appears to have gone away
-    # but will still show a warning if the problem occurs.  The error seems to
-    # happen more with bridge interfaces than standard interfaces.
-    @retry(retry=retry_if_exception_type(NoInterfaceError),
-           stop=stop_after_attempt(3),
-           wait=wait_fixed(1))
-    def start(self, config, timeout=60):
-        """Starts the dhcp server.
-
-        Starts the dhcp server daemon and runs it in the background.
-
-        Args:
-            config: dhcp_config.DhcpConfig, Configs to start the dhcp server
-                    with.
-
-        Raises:
-            Error: Raised when a dhcp server error is found.
-        """
-        if self.is_alive():
-            self.stop()
-
-        self._write_configs(config)
-        self._shell.delete_file(self._stdio_log_file)
-        self._shell.delete_file(self._pid_file)
-        self._shell.touch_file(self._lease_file)
-
-        dhcpd_command = '%s -cf "%s" -lf %s -f -pf "%s"' % (
-            self.PROGRAM_FILE, self._config_file, self._lease_file,
-            self._pid_file)
-        base_command = 'cd "%s"; %s' % (self._working_dir, dhcpd_command)
-        job_str = '%s > "%s" 2>&1' % (base_command, self._stdio_log_file)
-        self._runner.run_async(job_str)
-
-        try:
-            self._wait_for_process(timeout=timeout)
-            self._wait_for_server(timeout=timeout)
-        except:
-            self._log.warn("Failed to start DHCP server.")
-            self._log.info("DHCP configuration:\n" +
-                           config.render_config_file() + "\n")
-            self._log.info("DHCP logs:\n" + self.get_logs() + "\n")
-            self.stop()
-            raise
-
-    def stop(self):
-        """Kills the daemon if it is running."""
-        if self.is_alive():
-            self._shell.kill(self._identifier)
-
-    def is_alive(self):
-        """
-        Returns:
-            True if the daemon is running.
-        """
-        return self._shell.is_alive(self._identifier)
-
-    def get_logs(self):
-        """Pulls the log files from where dhcp server is running.
-
-        Returns:
-            A string of the dhcp server logs.
-        """
-        try:
-            # Try reading the PID file. This will fail if the server failed to
-            # start.
-            pid = self._shell.read_file(self._pid_file)
-            # `dhcpd` logs to the syslog, where its messages are interspersed
-            # with all other programs that use the syslog. Log lines contain
-            # `dhcpd[<pid>]`, which we can search for to extract all the logs
-            # from this particular dhcpd instance.
-            # The logs are preferable to the stdio output, since they contain
-            # a superset of the information from stdio, including leases
-            # that the server provides.
-            return self._shell.run(
-                f"grep dhcpd.{pid} /var/log/messages").stdout
-        except Exception:
-            self._log.info(
-                "Failed to read logs from syslog (likely because the server " +
-                "failed to start). Falling back to stdio output.")
-            return self._shell.read_file(self._stdio_log_file)
-
-    def _wait_for_process(self, timeout=60):
-        """Waits for the process to come up.
-
-        Waits until the dhcp server process is found running, or there is
-        a timeout. If the program never comes up then the log file
-        will be scanned for errors.
-
-        Raises: See _scan_for_errors
-        """
-        start_time = time.time()
-        while time.time() - start_time < timeout and not self.is_alive():
-            self._scan_for_errors(False)
-            time.sleep(0.1)
-
-        self._scan_for_errors(True)
-
-    def _wait_for_server(self, timeout=60):
-        """Waits for dhcp server to report that the server is up.
-
-        Waits until dhcp server says the server has been brought up or an
-        error occurs.
-
-        Raises: see _scan_for_errors
-        """
-        start_time = time.time()
-        while time.time() - start_time < timeout:
-            success = self._shell.search_file(
-                'Wrote [0-9]* leases to leases file', self._stdio_log_file)
-            if success:
-                return
-
-            self._scan_for_errors(True)
-
-    def _scan_for_errors(self, should_be_up):
-        """Scans the dhcp server log for any errors.
-
-        Args:
-            should_be_up: If true then dhcp server is expected to be alive.
-                          If it is found not alive while this is true an error
-                          is thrown.
-
-        Raises:
-            Error: Raised when a dhcp server error is found.
-        """
-        # If this is checked last we can run into a race condition where while
-        # scanning the log the process has not died, but after scanning it
-        # has. If this were checked last in that condition then the wrong
-        # error will be thrown. To prevent this we gather the alive state first
-        # so that if it is dead it will definitely give the right error before
-        # just giving a generic one.
-        is_dead = not self.is_alive()
-
-        no_interface = self._shell.search_file(
-            'Not configured to listen on any interfaces', self._stdio_log_file)
-        if no_interface:
-            raise NoInterfaceError(
-                'Dhcp does not contain a subnet for any of the networks the'
-                ' current interfaces are on.')
-
-        if should_be_up and is_dead:
-            raise Error('Dhcp server failed to start.', self)
-
-    def _write_configs(self, config):
-        """Writes the configs to the dhcp server config file."""
-        self._shell.delete_file(self._config_file)
-        config_str = config.render_config_file()
-        self._shell.write_file(self._config_file, config_str)
diff --git a/src/antlion/controllers/ap_lib/extended_capabilities.py b/src/antlion/controllers/ap_lib/extended_capabilities.py
deleted file mode 100644
index c7c8ade..0000000
--- a/src/antlion/controllers/ap_lib/extended_capabilities.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import IntEnum, unique
-from typing import Tuple
-
-
-@unique
-class ExtendedCapability(IntEnum):
-    """All extended capabilities present in IEEE 802.11-2020 Table 9-153.
-
-    Each name has a value corresponding to that extended capability's bit offset
-    in the specification's extended capabilities field.
-
-    Note that most extended capabilities are represented by a single bit, which
-    indicates whether the extended capability is advertised by the STA; but
-    some are represented by multiple bits. In the enum, each extended capability
-    has the value of its offset; comments indicate capabilities that use
-    multiple bits.
-    """
-    TWENTY_FORTY_BSS_COEXISTENCE_MANAGEMENT_SUPPORT = 0
-    GLK = 1
-    EXTENDED_CHANNEL_SWITCHING = 2
-    GLK_GCR = 3
-    PSMP_CAPABILITY = 4
-    # 5 reserved
-    S_PSMP_SUPPORT = 6
-    EVENT = 7
-    DIAGNOSTICS = 8
-    MULTICAST_DIAGNOSTICS = 9
-    LOCATION_TRACKING = 10
-    FMS = 11
-    PROXY_ARP_SERVICE = 12
-    COLLOCATED_INTERFERENCE_REPORTING = 13
-    CIVIC_LOCATION = 14
-    GEOSPATIAL_LOCATION = 15
-    TFS = 16
-    WNM_SLEEP_MODE = 17
-    TIM_BROADCAST = 18
-    BSS_TRANSITION = 19
-    QOS_TRAFFIC_CAPABILITY = 20
-    AC_STATION_COUNT = 21
-    MULTIPLE_BSSID = 22
-    TIMING_MEASUREMENT = 23
-    CHANNEL_USAGE = 24
-    SSID_LIST = 25
-    DMS = 26
-    UTC_TSF_OFFSET = 27
-    TPU_BUFFER_STA_SUPPORT = 28
-    TDLS_PEER_PSM_SUPPORT = 29
-    TDLS_CHANNEL_SWITCHING = 30
-    INTERWORKING = 31
-    QOS_MAP = 32
-    EBR = 33
-    SSPN_INTERFACE = 34
-    # 35 reserved
-    MSGCF_CAPABILITY = 36
-    TDLS_SUPPORT = 37
-    TDLS_PROHIBITED = 38
-    TDLS_CHANNEL_SWITCHING_PROHIBITED = 39
-    REJECT_UNADMITTED_FRAME = 40
-    SERVICE_INTERVAL_GRANULARITY = 41
-    # Bits 41-43 contain SERVICE_INTERVAL_GRANULARITY value
-    IDENTIFIER_LOCATION = 44
-    U_APSD_COEXISTENCE = 45
-    WNM_NOTIFICATION = 46
-    QAB_CAPABILITY = 47
-    UTF_8_SSID = 48
-    QMF_ACTIVATED = 49
-    QMF_RECONFIGURATION_ACTIVATED = 50
-    ROBUST_AV_STREAMING = 51
-    ADVANCED_GCR = 52
-    MESH_GCR = 53
-    SCS = 54
-    QLOAD_REPORT = 55
-    ALTERNATE_EDCA = 56
-    UNPROTECTED_TXOP_NEGOTIATION = 57
-    PROTECTED_TXOP_NEGOTIATION = 58
-    # 59 reserved
-    PROTECTED_QLOAD_REPORT = 60
-    TDLS_WIDER_BANDWIDTH = 61
-    OPERATING_MODE_NOTIFICATION = 62
-    MAX_NUMBER_OF_MSDUS_IN_A_MSDU = 63
-    # 63-64 contain MAX_NUMBER_OF_MSDUS_IN_A_MSDU value
-    CHANNEL_SCHEDULE_MANAGEMENT = 65
-    GEODATABASE_INBAND_ENABLING_SIGNAL = 66
-    NETWORK_CHANNEL_CONTROL = 67
-    WHITE_SPACE_MAP = 68
-    CHANNEL_AVAILABILITY_QUERY = 69
-    FINE_TIMING_MEASUREMENT_RESPONDER = 70
-    FINE_TIMING_MEASUREMENT_INITIATOR = 71
-    FILS_CAPABILITY = 72
-    EXTENDED_SPECTRUM_MANAGEMENT_CAPABLE = 73
-    FUTURE_CHANNEL_GUIDANCE = 74
-    PAD = 75
-    # 76-79 reserved
-    COMPLETE_LIST_OF_NON_TX_BSSID_PROFILES = 80
-    SAE_PASSWORD_IDENTIFIERS_IN_USE = 81
-    SAE_PASSWORD_IDENTIFIERS_USED_EXCLUSIVELY = 82
-    # 83 reserved
-    BEACON_PROTECTION_ENABLED = 84
-    MIRRORED_SCS = 85
-    # 86 reserved
-    LOCAL_MAC_ADDRESS_POLICY = 87
-    # 88-n reserved
-
-
-def _offsets(ext_cap_offset: ExtendedCapability) -> Tuple[int, int]:
-    """For given capability, return the byte and bit offsets within the field.
-
-    802.11 divides the extended capability field into bytes, as does the
-    ExtendedCapabilities class below. This function returns the index of the
-    byte that contains the given extended capability, as well as the bit offset
-    inside that byte (all offsets zero-indexed). For example,
-    MULTICAST_DIAGNOSTICS is bit 9, which is within byte 1 at bit offset 1.
-    """
-    byte_offset = ext_cap_offset // 8
-    bit_offset = ext_cap_offset % 8
-    return byte_offset, bit_offset
-
-
-class ExtendedCapabilities:
-    """Extended capability parsing and representation.
-
-    See IEEE 802.11-2020 9.4.2.26.
-    """
-
-    def __init__(self, ext_cap: bytearray = bytearray()):
-        """Represent the given extended capabilities field.
-
-        Args:
-            ext_cap: IEEE 802.11-2020 9.4.2.26 extended capabilities field.
-            Default is an empty field, meaning no extended capabilities are
-            advertised.
-        """
-        self._ext_cap = ext_cap
-
-    def _capability_advertised(self, ext_cap: ExtendedCapability) -> bool:
-        """Whether an extended capability is advertised.
-
-        Args:
-            ext_cap: an extended capability.
-        Returns:
-            True if the bit is present and its value is 1, otherwise False.
-        Raises:
-            NotImplementedError: for extended capabilities that span more than
-            a single bit. These could be supported, but no callers need them
-            at this time.
-        """
-        if ext_cap in [
-                ExtendedCapability.SERVICE_INTERVAL_GRANULARITY,
-                ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU
-        ]:
-            raise NotImplementedError(
-                f'{ext_cap.name} not implemented yet by {__class__}')
-        byte_offset, bit_offset = _offsets(ext_cap)
-        if len(self._ext_cap) > byte_offset:
-            # Use bit_offset to derive a mask that will check the correct bit.
-            if self._ext_cap[byte_offset] & 2**bit_offset > 0:
-                return True
-        return False
-
-    @property
-    def bss_transition(self) -> bool:
-        return self._capability_advertised(ExtendedCapability.BSS_TRANSITION)
-
-    @property
-    def proxy_arp_service(self) -> bool:
-        return self._capability_advertised(
-            ExtendedCapability.PROXY_ARP_SERVICE)
-
-    @property
-    def utc_tsf_offset(self) -> bool:
-        return self._capability_advertised(ExtendedCapability.UTC_TSF_OFFSET)
-
-    @property
-    def wnm_sleep_mode(self) -> bool:
-        return self._capability_advertised(ExtendedCapability.WNM_SLEEP_MODE)
-
-    # Other extended capability property methods can be added as needed by callers.
diff --git a/src/antlion/controllers/ap_lib/hostapd.py b/src/antlion/controllers/ap_lib/hostapd.py
deleted file mode 100644
index de93ea4..0000000
--- a/src/antlion/controllers/ap_lib/hostapd.py
+++ /dev/null
@@ -1,376 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import itertools
-import logging
-import re
-import time
-from typing import Set
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
-from antlion.controllers.utils_lib.commands import shell
-from antlion.libs.proc.job import Result
-
-
-class Error(Exception):
-    """An error caused by hostapd."""
-
-
-class Hostapd(object):
-    """Manages the hostapd program.
-
-    Attributes:
-        config: The hostapd configuration that is being used.
-    """
-
-    PROGRAM_FILE = '/usr/sbin/hostapd'
-    CLI_PROGRAM_FILE = '/usr/bin/hostapd_cli'
-
-    def __init__(self, runner, interface, working_dir='/tmp'):
-        """
-        Args:
-            runner: Object that has run_async and run methods for executing
-                    shell commands (e.g. connection.SshConnection)
-            interface: string, The name of the interface to use (eg. wlan0).
-            working_dir: The directory to work out of.
-        """
-        self._runner = runner
-        self._interface = interface
-        self._working_dir = working_dir
-        self.config = None
-        self._shell = shell.ShellCommand(runner, working_dir)
-        self._log_file = 'hostapd-%s.log' % self._interface
-        self._ctrl_file = 'hostapd-%s.ctrl' % self._interface
-        self._config_file = 'hostapd-%s.conf' % self._interface
-        self._identifier = '%s.*%s' % (self.PROGRAM_FILE, self._config_file)
-
-    def start(self, config, timeout=60, additional_parameters=None):
-        """Starts hostapd
-
-        Starts the hostapd daemon and runs it in the background.
-
-        Args:
-            config: Configs to start the hostapd with.
-            timeout: Time to wait for DHCP server to come up.
-            additional_parameters: A dictionary of parameters that can sent
-                                   directly into the hostapd config file.  This
-                                   can be used for debugging and or adding one
-                                   off parameters into the config.
-
-        Returns:
-            True if the daemon could be started. Note that the daemon can still
-            start and not work. Invalid configurations can take a long amount
-            of time to be produced, and because the daemon runs indefinitely
-            it's impossible to wait on. If you need to check if configs are ok
-            then periodic checks to is_running and logs should be used.
-        """
-        if self.is_alive():
-            self.stop()
-
-        self.config = config
-
-        self._shell.delete_file(self._ctrl_file)
-        self._shell.delete_file(self._log_file)
-        self._shell.delete_file(self._config_file)
-        self._write_configs(additional_parameters=additional_parameters)
-
-        hostapd_command = '%s -dd -t "%s"' % (self.PROGRAM_FILE,
-                                              self._config_file)
-        base_command = 'cd "%s"; %s' % (self._working_dir, hostapd_command)
-        job_str = 'rfkill unblock all; %s > "%s" 2>&1' %\
-                  (base_command, self._log_file)
-        self._runner.run_async(job_str)
-
-        try:
-            self._wait_for_process(timeout=timeout)
-            self._wait_for_interface(timeout=timeout)
-        except:
-            self.stop()
-            raise
-
-    def stop(self):
-        """Kills the daemon if it is running."""
-        if self.is_alive():
-            self._shell.kill(self._identifier)
-
-    def channel_switch(self, channel_num):
-        """Switches to the given channel.
-
-        Returns:
-            acts.libs.proc.job.Result containing the results of the command.
-        Raises: See _run_hostapd_cli_cmd
-        """
-        try:
-            channel_freq = hostapd_constants.FREQUENCY_MAP[channel_num]
-        except KeyError:
-            raise ValueError('Invalid channel number {}'.format(channel_num))
-        csa_beacon_count = 10
-        channel_switch_cmd = 'chan_switch {} {}'.format(
-            csa_beacon_count, channel_freq)
-        result = self._run_hostapd_cli_cmd(channel_switch_cmd)
-
-    def get_current_channel(self):
-        """Returns the current channel number.
-
-        Raises: See _run_hostapd_cli_cmd
-        """
-        status_cmd = 'status'
-        result = self._run_hostapd_cli_cmd(status_cmd)
-        match = re.search(r'^channel=(\d+)$', result.stdout, re.MULTILINE)
-        if not match:
-            raise Error('Current channel could not be determined')
-        try:
-            channel = int(match.group(1))
-        except ValueError:
-            raise Error('Internal error: current channel could not be parsed')
-        return channel
-
-    def _list_sta(self) -> Result:
-        """List all associated STA MAC addresses.
-
-        Returns:
-            acts.libs.proc.job.Result containing the results of the command.
-        Raises: See _run_hostapd_cli_cmd
-        """
-        list_sta_cmd = 'list_sta'
-        return self._run_hostapd_cli_cmd(list_sta_cmd)
-
-    def get_stas(self) -> Set[str]:
-        """Return MAC addresses of all associated STAs."""
-        list_sta_result = self._list_sta()
-        stas = set()
-        for line in list_sta_result.stdout.splitlines():
-            # Each line must be a valid MAC address. Capture it.
-            m = re.match(r'((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})', line)
-            if m:
-                stas.add(m.group(1))
-        return stas
-
-    def _sta(self, sta_mac: str) -> Result:
-        """Return hostapd's detailed info about an associated STA.
-
-        Returns:
-            acts.libs.proc.job.Result containing the results of the command.
-        Raises: See _run_hostapd_cli_cmd
-        """
-        sta_cmd = 'sta {}'.format(sta_mac)
-        return self._run_hostapd_cli_cmd(sta_cmd)
-
-    def get_sta_extended_capabilities(self,
-                                      sta_mac: str) -> ExtendedCapabilities:
-        """Get extended capabilities for the given STA, as seen by the AP.
-
-        Args:
-            sta_mac: MAC address of the STA in question.
-        Returns:
-            Extended capabilities of the given STA.
-        Raises:
-            Error if extended capabilities for the STA cannot be obtained.
-        """
-        sta_result = self._sta(sta_mac)
-        # hostapd ext_capab field is a hex encoded string representation of the
-        # 802.11 extended capabilities structure, each byte represented by two
-        # chars (each byte having format %02x).
-        m = re.search(r'ext_capab=([0-9A-Faf]+)', sta_result.stdout,
-                      re.MULTILINE)
-        if not m:
-            raise Error('Failed to get ext_capab from STA details')
-        raw_ext_capab = m.group(1)
-        try:
-            return ExtendedCapabilities(bytearray.fromhex(raw_ext_capab))
-        except ValueError:
-            raise Error(
-                f'ext_capab contains invalid hex string repr {raw_ext_capab}')
-
-    def _bss_tm_req(self, client_mac: str,
-                    request: BssTransitionManagementRequest) -> Result:
-        """Send a hostapd BSS Transition Management request command to a STA.
-
-        Args:
-            client_mac: MAC address that will receive the request.
-            request: BSS Transition Management request that will be sent.
-        Returns:
-            acts.libs.proc.job.Result containing the results of the command.
-        Raises: See _run_hostapd_cli_cmd
-        """
-        bss_tm_req_cmd = f'bss_tm_req {client_mac}'
-
-        if request.abridged:
-            bss_tm_req_cmd += ' abridged=1'
-        if request.bss_termination_included and request.bss_termination_duration:
-            bss_tm_req_cmd += f' bss_term={request.bss_termination_duration.duration}'
-        if request.disassociation_imminent:
-            bss_tm_req_cmd += ' disassoc_imminent=1'
-        if request.disassociation_timer is not None:
-            bss_tm_req_cmd += f' disassoc_timer={request.disassociation_timer}'
-        if request.preferred_candidate_list_included:
-            bss_tm_req_cmd += ' pref=1'
-        if request.session_information_url:
-            bss_tm_req_cmd += f' url={request.session_information_url}'
-        if request.validity_interval:
-            bss_tm_req_cmd += f' valid_int={request.validity_interval}'
-
-        # neighbor= can appear multiple times, so it requires special handling.
-        for neighbor in request.candidate_list:
-            bssid = neighbor.bssid
-            bssid_info = hex(neighbor.bssid_information)
-            op_class = neighbor.operating_class
-            chan_num = neighbor.channel_number
-            phy_type = int(neighbor.phy_type)
-            bss_tm_req_cmd += f' neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}'
-
-        return self._run_hostapd_cli_cmd(bss_tm_req_cmd)
-
-    def send_bss_transition_management_req(
-            self, sta_mac: str,
-            request: BssTransitionManagementRequest) -> Result:
-        """Send a BSS Transition Management request to an associated STA.
-
-        Args:
-            sta_mac: MAC address of the STA in question.
-            request: BSS Transition Management request that will be sent.
-        Returns:
-            acts.libs.proc.job.Result containing the results of the command.
-        Raises: See _run_hostapd_cli_cmd
-        """
-        return self._bss_tm_req(sta_mac, request)
-
-    def is_alive(self):
-        """
-        Returns:
-            True if the daemon is running.
-        """
-        return self._shell.is_alive(self._identifier)
-
-    def pull_logs(self):
-        """Pulls the log files from where hostapd is running.
-
-        Returns:
-            A string of the hostapd logs.
-        """
-        # TODO: Auto pulling of logs when stop is called.
-        return self._shell.read_file(self._log_file)
-
-    def _run_hostapd_cli_cmd(self, cmd):
-        """Run the given hostapd_cli command.
-
-        Runs the command, waits for the output (up to default timeout), and
-            returns the result.
-
-        Returns:
-            acts.libs.proc.job.Result containing the results of the ssh command.
-
-        Raises:
-            acts.lib.proc.job.TimeoutError: When the remote command took too
-                long to execute.
-            antlion.controllers.utils_lib.ssh.connection.Error: When the ssh
-                connection failed to be created.
-            antlion.controllers.utils_lib.ssh.connection.CommandError: Ssh worked,
-                but the command had an error executing.
-        """
-        hostapd_cli_job = 'cd {}; {} -p {} {}'.format(self._working_dir,
-                                                      self.CLI_PROGRAM_FILE,
-                                                      self._ctrl_file, cmd)
-        return self._runner.run(hostapd_cli_job)
-
-    def _wait_for_process(self, timeout=60):
-        """Waits for the process to come up.
-
-        Waits until the hostapd process is found running, or there is
-        a timeout. If the program never comes up then the log file
-        will be scanned for errors.
-
-        Raises: See _scan_for_errors
-        """
-        start_time = time.time()
-        while time.time() - start_time < timeout and not self.is_alive():
-            self._scan_for_errors(False)
-            time.sleep(0.1)
-
-    def _wait_for_interface(self, timeout=60):
-        """Waits for hostapd to report that the interface is up.
-
-        Waits until hostapd says the interface has been brought up or an
-        error occurs.
-
-        Raises: see _scan_for_errors
-        """
-        start_time = time.time()
-        while time.time() - start_time < timeout:
-            time.sleep(0.1)
-            success = self._shell.search_file('Setup of interface done',
-                                              self._log_file)
-            if success:
-                return
-            self._scan_for_errors(False)
-
-        self._scan_for_errors(True)
-
-    def _scan_for_errors(self, should_be_up):
-        """Scans the hostapd log for any errors.
-
-        Args:
-            should_be_up: If true then hostapd program is expected to be alive.
-                          If it is found not alive while this is true an error
-                          is thrown.
-
-        Raises:
-            Error: Raised when a hostapd error is found.
-        """
-        # Store this so that all other errors have priority.
-        is_dead = not self.is_alive()
-
-        bad_config = self._shell.search_file('Interface initialization failed',
-                                             self._log_file)
-        if bad_config:
-            raise Error('Interface failed to start', self)
-
-        bad_config = self._shell.search_file(
-            "Interface %s wasn't started" % self._interface, self._log_file)
-        if bad_config:
-            raise Error('Interface failed to start', self)
-
-        if should_be_up and is_dead:
-            raise Error('Hostapd failed to start', self)
-
-    def _write_configs(self, additional_parameters=None):
-        """Writes the configs to the hostapd config file."""
-        self._shell.delete_file(self._config_file)
-
-        interface_configs = collections.OrderedDict()
-        interface_configs['interface'] = self._interface
-        interface_configs['ctrl_interface'] = self._ctrl_file
-        pairs = ('%s=%s' % (k, v) for k, v in interface_configs.items())
-
-        packaged_configs = self.config.package_configs()
-        if additional_parameters:
-            packaged_configs.append(additional_parameters)
-        for packaged_config in packaged_configs:
-            config_pairs = ('%s=%s' % (k, v)
-                            for k, v in packaged_config.items()
-                            if v is not None)
-            pairs = itertools.chain(pairs, config_pairs)
-
-        hostapd_conf = '\n'.join(pairs)
-
-        logging.info('Writing %s' % self._config_file)
-        logging.debug('******************Start*******************')
-        logging.debug('\n%s' % hostapd_conf)
-        logging.debug('*******************End********************')
-
-        self._shell.write_file(self._config_file, hostapd_conf)
diff --git a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py b/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
deleted file mode 100644
index 28062f5..0000000
--- a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
+++ /dev/null
@@ -1,486 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import FrozenSet
-
-from antlion import utils
-
-import antlion.controllers.ap_lib.third_party_ap_profiles.actiontec as actiontec
-import antlion.controllers.ap_lib.third_party_ap_profiles.asus as asus
-import antlion.controllers.ap_lib.third_party_ap_profiles.belkin as belkin
-import antlion.controllers.ap_lib.third_party_ap_profiles.linksys as linksys
-import antlion.controllers.ap_lib.third_party_ap_profiles.netgear as netgear
-import antlion.controllers.ap_lib.third_party_ap_profiles.securifi as securifi
-import antlion.controllers.ap_lib.third_party_ap_profiles.tplink as tplink
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
-
-
-def _get_or_default(var, default_value):
-    """Check variable and return non-null value.
-
-   Args:
-        var: Any variable.
-        default_value: Value to return if the var is None.
-
-   Returns:
-        Variable value if not None, default value otherwise.
-    """
-    return var if var is not None else default_value
-
-
-def create_ap_preset(
-        profile_name='whirlwind',
-        iface_wlan_2g=None,
-        iface_wlan_5g=None,
-        channel=None,
-        mode=None,
-        frequency=None,
-        security=None,
-        pmf_support=None,
-        ssid=None,
-        hidden=None,
-        dtim_period=None,
-        frag_threshold=None,
-        rts_threshold=None,
-        force_wmm=None,
-        beacon_interval=None,
-        short_preamble=None,
-        n_capabilities=None,
-        ac_capabilities=None,
-        vht_bandwidth=None,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-        bss_settings=[]):
-    """AP preset config generator.  This a wrapper for hostapd_config but
-       but supplies the default settings for the preset that is selected.
-
-        You may specify channel or frequency, but not both.  Both options
-        are checked for validity (i.e. you can't specify an invalid channel
-        or a frequency that will not be accepted).
-
-    Args:
-        profile_name: The name of the device want the preset for.
-                      Options: whirlwind
-        channel: int, channel number.
-        dtim: int, DTIM value of the AP, default is 2.
-        frequency: int, frequency of channel.
-        security: Security, the secuirty settings to use.
-        ssid: string, The name of the ssid to brodcast.
-        pmf_support: int, whether pmf is disabled, enabled, or required
-        vht_bandwidth: VHT bandwidth for 11ac operation.
-        bss_settings: The settings for all bss.
-        iface_wlan_2g: the wlan 2g interface name of the AP.
-        iface_wlan_5g: the wlan 5g interface name of the AP.
-        mode: The hostapd 802.11 mode of operation.
-        ssid: The ssid for the wireless network.
-        hidden: Whether to include the ssid in the beacons.
-        dtim_period: The dtim period for the BSS
-        frag_threshold: Max size of packet before fragmenting the packet.
-        rts_threshold: Max size of packet before requiring protection for
-            rts/cts or cts to self.
-        n_capabilities: 802.11n capabilities for for BSS to advertise.
-        ac_capabilities: 802.11ac capabilities for for BSS to advertise.
-        wnm_features: WNM features to enable on the AP.
-
-    Returns: A hostapd_config object that can be used by the hostapd object.
-    """
-
-    # Verify interfaces
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-
-    if channel:
-        frequency = hostapd_config.get_frequency_for_channel(channel)
-    elif frequency:
-        channel = hostapd_config.get_channel_for_frequency(frequency)
-    else:
-        raise ValueError('Specify either frequency or channel.')
-
-    if profile_name == 'whirlwind':
-        # profile indicates phy mode is 11bgn for 2.4Ghz or 11acn for 5Ghz
-        hidden = _get_or_default(hidden, False)
-        force_wmm = _get_or_default(force_wmm, True)
-        beacon_interval = _get_or_default(beacon_interval, 100)
-        short_preamble = _get_or_default(short_preamble, True)
-        dtim_period = _get_or_default(dtim_period, 2)
-        frag_threshold = _get_or_default(frag_threshold, 2346)
-        rts_threshold = _get_or_default(rts_threshold, 2347)
-        if frequency < 5000:
-            interface = iface_wlan_2g
-            mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
-            n_capabilities = _get_or_default(n_capabilities, [
-                hostapd_constants.N_CAPABILITY_LDPC,
-                hostapd_constants.N_CAPABILITY_SGI20,
-                hostapd_constants.N_CAPABILITY_SGI40,
-                hostapd_constants.N_CAPABILITY_TX_STBC,
-                hostapd_constants.N_CAPABILITY_RX_STBC1,
-                hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-            ])
-            config = hostapd_config.HostapdConfig(
-                ssid=ssid,
-                hidden=hidden,
-                security=security,
-                pmf_support=pmf_support,
-                interface=interface,
-                mode=mode,
-                force_wmm=force_wmm,
-                beacon_interval=beacon_interval,
-                dtim_period=dtim_period,
-                short_preamble=short_preamble,
-                frequency=frequency,
-                n_capabilities=n_capabilities,
-                frag_threshold=frag_threshold,
-                rts_threshold=rts_threshold,
-                wnm_features=wnm_features,
-                bss_settings=bss_settings)
-        else:
-            interface = iface_wlan_5g
-            vht_bandwidth = _get_or_default(vht_bandwidth, 80)
-            mode = _get_or_default(mode, hostapd_constants.MODE_11AC_MIXED)
-            if hostapd_config.ht40_plus_allowed(channel):
-                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
-            elif hostapd_config.ht40_minus_allowed(channel):
-                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
-            # Channel 165 operates in 20MHz with n or ac modes.
-            if channel == 165:
-                mode = hostapd_constants.MODE_11N_MIXED
-                extended_channel = hostapd_constants.N_CAPABILITY_HT20
-            # Define the n capability vector for 20 MHz and higher bandwidth
-            if not vht_bandwidth:
-                pass
-            elif vht_bandwidth >= 40:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC, extended_channel,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1
-                ])
-            else:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1,
-                    hostapd_constants.N_CAPABILITY_HT20
-                ])
-            ac_capabilities = _get_or_default(ac_capabilities, [
-                hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-                hostapd_constants.AC_CAPABILITY_RXLDPC,
-                hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-                hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-                hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-                hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-                hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN
-            ])
-            config = hostapd_config.HostapdConfig(
-                ssid=ssid,
-                hidden=hidden,
-                security=security,
-                pmf_support=pmf_support,
-                interface=interface,
-                mode=mode,
-                force_wmm=force_wmm,
-                vht_channel_width=vht_bandwidth,
-                beacon_interval=beacon_interval,
-                dtim_period=dtim_period,
-                short_preamble=short_preamble,
-                frequency=frequency,
-                frag_threshold=frag_threshold,
-                rts_threshold=rts_threshold,
-                n_capabilities=n_capabilities,
-                ac_capabilities=ac_capabilities,
-                bss_settings=bss_settings)
-    elif profile_name == 'whirlwind_11ab_legacy':
-        if frequency < 5000:
-            mode = hostapd_constants.MODE_11B
-        else:
-            mode = hostapd_constants.MODE_11A
-
-        config = create_ap_preset(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  ssid=ssid,
-                                  channel=channel,
-                                  mode=mode,
-                                  security=security,
-                                  pmf_support=pmf_support,
-                                  hidden=hidden,
-                                  force_wmm=force_wmm,
-                                  beacon_interval=beacon_interval,
-                                  short_preamble=short_preamble,
-                                  dtim_period=dtim_period,
-                                  rts_threshold=rts_threshold,
-                                  frag_threshold=frag_threshold,
-                                  n_capabilities=[],
-                                  ac_capabilities=[],
-                                  vht_bandwidth=None,
-                                  wnm_features=wnm_features)
-    elif profile_name == 'whirlwind_11ag_legacy':
-        if frequency < 5000:
-            mode = hostapd_constants.MODE_11G
-        else:
-            mode = hostapd_constants.MODE_11A
-
-        config = create_ap_preset(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  ssid=ssid,
-                                  channel=channel,
-                                  mode=mode,
-                                  security=security,
-                                  pmf_support=pmf_support,
-                                  hidden=hidden,
-                                  force_wmm=force_wmm,
-                                  beacon_interval=beacon_interval,
-                                  short_preamble=short_preamble,
-                                  dtim_period=dtim_period,
-                                  rts_threshold=rts_threshold,
-                                  frag_threshold=frag_threshold,
-                                  n_capabilities=[],
-                                  ac_capabilities=[],
-                                  vht_bandwidth=None,
-                                  wnm_features=wnm_features)
-    elif profile_name == 'mistral':
-        hidden = _get_or_default(hidden, False)
-        force_wmm = _get_or_default(force_wmm, True)
-        beacon_interval = _get_or_default(beacon_interval, 100)
-        short_preamble = _get_or_default(short_preamble, True)
-        dtim_period = _get_or_default(dtim_period, 2)
-        frag_threshold = None
-        rts_threshold = None
-
-        # Google IE
-        # Country Code IE ('us' lowercase)
-        vendor_elements = {
-            'vendor_elements':
-            'dd0cf4f5e80505ff0000ffffffff'
-            '070a75732024041e95051e00'
-        }
-        default_configs = {'bridge': 'br-lan', 'iapp_interface': 'br-lan'}
-
-        if frequency < 5000:
-            interface = iface_wlan_2g
-            mode = _get_or_default(mode, hostapd_constants.MODE_11N_MIXED)
-            n_capabilities = _get_or_default(n_capabilities, [
-                hostapd_constants.N_CAPABILITY_LDPC,
-                hostapd_constants.N_CAPABILITY_SGI20,
-                hostapd_constants.N_CAPABILITY_SGI40,
-                hostapd_constants.N_CAPABILITY_TX_STBC,
-                hostapd_constants.N_CAPABILITY_RX_STBC1,
-                hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-            ])
-
-            additional_params = utils.merge_dicts(
-                vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, default_configs)
-            config = hostapd_config.HostapdConfig(
-                ssid=ssid,
-                hidden=hidden,
-                security=security,
-                pmf_support=pmf_support,
-                interface=interface,
-                mode=mode,
-                force_wmm=force_wmm,
-                beacon_interval=beacon_interval,
-                dtim_period=dtim_period,
-                short_preamble=short_preamble,
-                frequency=frequency,
-                n_capabilities=n_capabilities,
-                frag_threshold=frag_threshold,
-                rts_threshold=rts_threshold,
-                wnm_features=wnm_features,
-                bss_settings=bss_settings,
-                additional_parameters=additional_params,
-                set_ap_defaults_profile=profile_name)
-        else:
-            interface = iface_wlan_5g
-            vht_bandwidth = _get_or_default(vht_bandwidth, 80)
-            mode = _get_or_default(mode, hostapd_constants.MODE_11AC_MIXED)
-            if hostapd_config.ht40_plus_allowed(channel):
-                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
-            elif hostapd_config.ht40_minus_allowed(channel):
-                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
-            # Channel 165 operates in 20MHz with n or ac modes.
-            if channel == 165:
-                mode = hostapd_constants.MODE_11N_MIXED
-                extended_channel = hostapd_constants.N_CAPABILITY_HT20
-            if vht_bandwidth >= 40:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC, extended_channel,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1
-                ])
-            else:
-                n_capabilities = _get_or_default(n_capabilities, [
-                    hostapd_constants.N_CAPABILITY_LDPC,
-                    hostapd_constants.N_CAPABILITY_SGI20,
-                    hostapd_constants.N_CAPABILITY_SGI40,
-                    hostapd_constants.N_CAPABILITY_TX_STBC,
-                    hostapd_constants.N_CAPABILITY_RX_STBC1,
-                    hostapd_constants.N_CAPABILITY_HT20
-                ])
-            ac_capabilities = _get_or_default(ac_capabilities, [
-                hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-                hostapd_constants.AC_CAPABILITY_RXLDPC,
-                hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-                hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-                hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-                hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-                hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
-                hostapd_constants.AC_CAPABILITY_SU_BEAMFORMER,
-                hostapd_constants.AC_CAPABILITY_SU_BEAMFORMEE,
-                hostapd_constants.AC_CAPABILITY_MU_BEAMFORMER,
-                hostapd_constants.AC_CAPABILITY_SOUNDING_DIMENSION_4,
-                hostapd_constants.AC_CAPABILITY_BF_ANTENNA_4
-            ])
-
-            additional_params = utils.merge_dicts(
-                vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT, default_configs)
-            config = hostapd_config.HostapdConfig(
-                ssid=ssid,
-                hidden=hidden,
-                security=security,
-                pmf_support=pmf_support,
-                interface=interface,
-                mode=mode,
-                force_wmm=force_wmm,
-                vht_channel_width=vht_bandwidth,
-                beacon_interval=beacon_interval,
-                dtim_period=dtim_period,
-                short_preamble=short_preamble,
-                frequency=frequency,
-                frag_threshold=frag_threshold,
-                rts_threshold=rts_threshold,
-                n_capabilities=n_capabilities,
-                ac_capabilities=ac_capabilities,
-                wnm_features=wnm_features,
-                bss_settings=bss_settings,
-                additional_parameters=additional_params,
-                set_ap_defaults_profile=profile_name)
-    elif profile_name == 'actiontec_pk5000':
-        config = actiontec.actiontec_pk5000(iface_wlan_2g=iface_wlan_2g,
-                                            channel=channel,
-                                            ssid=ssid,
-                                            security=security)
-    elif profile_name == 'actiontec_mi424wr':
-        config = actiontec.actiontec_mi424wr(iface_wlan_2g=iface_wlan_2g,
-                                             channel=channel,
-                                             ssid=ssid,
-                                             security=security)
-    elif profile_name == 'asus_rtac66u':
-        config = asus.asus_rtac66u(iface_wlan_2g=iface_wlan_2g,
-                                   iface_wlan_5g=iface_wlan_5g,
-                                   channel=channel,
-                                   ssid=ssid,
-                                   security=security)
-    elif profile_name == 'asus_rtac86u':
-        config = asus.asus_rtac86u(iface_wlan_2g=iface_wlan_2g,
-                                   iface_wlan_5g=iface_wlan_5g,
-                                   channel=channel,
-                                   ssid=ssid,
-                                   security=security)
-    elif profile_name == 'asus_rtac5300':
-        config = asus.asus_rtac5300(iface_wlan_2g=iface_wlan_2g,
-                                    iface_wlan_5g=iface_wlan_5g,
-                                    channel=channel,
-                                    ssid=ssid,
-                                    security=security)
-    elif profile_name == 'asus_rtn56u':
-        config = asus.asus_rtn56u(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  channel=channel,
-                                  ssid=ssid,
-                                  security=security)
-    elif profile_name == 'asus_rtn66u':
-        config = asus.asus_rtn66u(iface_wlan_2g=iface_wlan_2g,
-                                  iface_wlan_5g=iface_wlan_5g,
-                                  channel=channel,
-                                  ssid=ssid,
-                                  security=security)
-    elif profile_name == 'belkin_f9k1001v5':
-        config = belkin.belkin_f9k1001v5(iface_wlan_2g=iface_wlan_2g,
-                                         channel=channel,
-                                         ssid=ssid,
-                                         security=security)
-    elif profile_name == 'linksys_ea4500':
-        config = linksys.linksys_ea4500(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'linksys_ea9500':
-        config = linksys.linksys_ea9500(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'linksys_wrt1900acv2':
-        config = linksys.linksys_wrt1900acv2(iface_wlan_2g=iface_wlan_2g,
-                                             iface_wlan_5g=iface_wlan_5g,
-                                             channel=channel,
-                                             ssid=ssid,
-                                             security=security)
-    elif profile_name == 'netgear_r7000':
-        config = netgear.netgear_r7000(iface_wlan_2g=iface_wlan_2g,
-                                       iface_wlan_5g=iface_wlan_5g,
-                                       channel=channel,
-                                       ssid=ssid,
-                                       security=security)
-    elif profile_name == 'netgear_wndr3400':
-        config = netgear.netgear_wndr3400(iface_wlan_2g=iface_wlan_2g,
-                                          iface_wlan_5g=iface_wlan_5g,
-                                          channel=channel,
-                                          ssid=ssid,
-                                          security=security)
-    elif profile_name == 'securifi_almond':
-        config = securifi.securifi_almond(iface_wlan_2g=iface_wlan_2g,
-                                          channel=channel,
-                                          ssid=ssid,
-                                          security=security)
-    elif profile_name == 'tplink_archerc5':
-        config = tplink.tplink_archerc5(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'tplink_archerc7':
-        config = tplink.tplink_archerc7(iface_wlan_2g=iface_wlan_2g,
-                                        iface_wlan_5g=iface_wlan_5g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    elif profile_name == 'tplink_c1200':
-        config = tplink.tplink_c1200(iface_wlan_2g=iface_wlan_2g,
-                                     iface_wlan_5g=iface_wlan_5g,
-                                     channel=channel,
-                                     ssid=ssid,
-                                     security=security)
-    elif profile_name == 'tplink_tlwr940n':
-        config = tplink.tplink_tlwr940n(iface_wlan_2g=iface_wlan_2g,
-                                        channel=channel,
-                                        ssid=ssid,
-                                        security=security)
-    else:
-        raise ValueError('Invalid ap model specified (%s)' % profile_name)
-
-    return config
diff --git a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py b/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
deleted file mode 100644
index 3d298f5..0000000
--- a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-
-
-class BssSettings(object):
-    """Settings for a bss.
-
-    Settings for a bss to allow multiple network on a single device.
-
-    Attributes:
-        name: string, The name that this bss will go by.
-        ssid: string, The name of the ssid to brodcast.
-        hidden: bool, If true then the ssid will be hidden.
-        security: Security, The security settings to use.
-    """
-
-    def __init__(self, name, ssid, hidden=False, security=None, bssid=None):
-        self.name = name
-        self.ssid = ssid
-        self.hidden = hidden
-        self.security = security
-        self.bssid = bssid
-
-    def generate_dict(self):
-        """Returns: A dictionary of bss settings."""
-        settings = collections.OrderedDict()
-        settings['bss'] = self.name
-        if self.bssid:
-            settings['bssid'] = self.bssid
-        if self.ssid:
-            settings['ssid'] = self.ssid
-            settings['ignore_broadcast_ssid'] = 1 if self.hidden else 0
-
-        if self.security:
-            security_settings = self.security.generate_dict()
-            for k, v in security_settings.items():
-                settings[k] = v
-
-        return settings
diff --git a/src/antlion/controllers/ap_lib/hostapd_config.py b/src/antlion/controllers/ap_lib/hostapd_config.py
deleted file mode 100644
index abb50c9..0000000
--- a/src/antlion/controllers/ap_lib/hostapd_config.py
+++ /dev/null
@@ -1,675 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import logging
-from typing import FrozenSet
-
-from antlion.controllers.ap_lib import hostapd_constants
-
-
-def ht40_plus_allowed(channel):
-    """Returns: True iff HT40+ is enabled for this configuration."""
-    channel_supported = (channel in hostapd_constants.HT40_ALLOW_MAP[
-        hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS])
-    return (channel_supported)
-
-
-def ht40_minus_allowed(channel):
-    """Returns: True iff HT40- is enabled for this configuration."""
-    channel_supported = (channel in hostapd_constants.HT40_ALLOW_MAP[
-        hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS])
-    return (channel_supported)
-
-
-def get_frequency_for_channel(channel):
-    """The frequency associated with a given channel number.
-
-    Args:
-        value: int channel number.
-
-    Returns:
-        int, frequency in MHz associated with the channel.
-
-    """
-    for frequency, channel_iter in \
-        hostapd_constants.CHANNEL_MAP.items():
-        if channel == channel_iter:
-            return frequency
-    else:
-        raise ValueError('Unknown channel value: %r.' % channel)
-
-
-def get_channel_for_frequency(frequency):
-    """The channel number associated with a given frequency.
-
-    Args:
-        value: int frequency in MHz.
-
-    Returns:
-        int, frequency associated with the channel.
-
-    """
-    return hostapd_constants.CHANNEL_MAP[frequency]
-
-
-class HostapdConfig(object):
-    """The root settings for the router.
-
-    All the settings for a router that are not part of an ssid.
-    """
-
-    def _get_11ac_center_channel_from_channel(self, channel):
-        """Returns the center channel of the selected channel band based
-           on the channel and channel bandwidth provided.
-        """
-        channel = int(channel)
-        center_channel_delta = hostapd_constants.CENTER_CHANNEL_MAP[
-            self._vht_oper_chwidth]['delta']
-
-        for channel_map in hostapd_constants.CENTER_CHANNEL_MAP[
-                self._vht_oper_chwidth]['channels']:
-            lower_channel_bound, upper_channel_bound = channel_map
-            if lower_channel_bound <= channel <= upper_channel_bound:
-                return lower_channel_bound + center_channel_delta
-        raise ValueError('Invalid channel for {channel_width}.'.format(
-            channel_width=self._vht_oper_chwidth))
-
-    @property
-    def _get_default_config(self):
-        """Returns: dict of default options for hostapd."""
-        if self.set_ap_defaults_profile == 'mistral':
-            return collections.OrderedDict([
-                ('logger_syslog', '-1'),
-                ('logger_syslog_level', '0'),
-                # default RTS and frag threshold to ``off''
-                ('rts_threshold', None),
-                ('fragm_threshold', None),
-                ('driver', hostapd_constants.DRIVER_NAME)
-            ])
-        else:
-            return collections.OrderedDict([
-                ('logger_syslog', '-1'),
-                ('logger_syslog_level', '0'),
-                # default RTS and frag threshold to ``off''
-                ('rts_threshold', '2347'),
-                ('fragm_threshold', '2346'),
-                ('driver', hostapd_constants.DRIVER_NAME)
-            ])
-
-    @property
-    def _hostapd_ht_capabilities(self):
-        """Returns: string suitable for the ht_capab= line in a hostapd config.
-        """
-        ret = []
-        for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
-            if cap in self._n_capabilities:
-                ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
-        return ''.join(ret)
-
-    @property
-    def _hostapd_vht_capabilities(self):
-        """Returns: string suitable for the vht_capab= line in a hostapd config.
-        """
-        ret = []
-        for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
-            if cap in self._ac_capabilities:
-                ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
-        return ''.join(ret)
-
-    @property
-    def _require_ht(self):
-        """Returns: True iff clients should be required to support HT."""
-        return self._mode == hostapd_constants.MODE_11N_PURE
-
-    @property
-    def _require_vht(self):
-        """Returns: True if clients should be required to support VHT."""
-        return self._mode == hostapd_constants.MODE_11AC_PURE
-
-    @property
-    def hw_mode(self):
-        """Returns: string hardware mode understood by hostapd."""
-        if self._mode == hostapd_constants.MODE_11A:
-            return hostapd_constants.MODE_11A
-        if self._mode == hostapd_constants.MODE_11B:
-            return hostapd_constants.MODE_11B
-        if self._mode == hostapd_constants.MODE_11G:
-            return hostapd_constants.MODE_11G
-        if self.is_11n or self.is_11ac:
-            # For their own historical reasons, hostapd wants it this way.
-            if self._frequency > 5000:
-                return hostapd_constants.MODE_11A
-            return hostapd_constants.MODE_11G
-        raise ValueError('Invalid mode.')
-
-    @property
-    def is_11n(self):
-        """Returns: True if we're trying to host an 802.11n network."""
-        return self._mode in (hostapd_constants.MODE_11N_MIXED,
-                              hostapd_constants.MODE_11N_PURE)
-
-    @property
-    def is_11ac(self):
-        """Returns: True if we're trying to host an 802.11ac network."""
-        return self._mode in (hostapd_constants.MODE_11AC_MIXED,
-                              hostapd_constants.MODE_11AC_PURE)
-
-    @property
-    def channel(self):
-        """Returns: int channel number for self.frequency."""
-        return get_channel_for_frequency(self.frequency)
-
-    @channel.setter
-    def channel(self, value):
-        """Sets the channel number to configure hostapd to listen on.
-
-        Args:
-            value: int, channel number.
-
-        """
-        self.frequency = get_frequency_for_channel(value)
-
-    @property
-    def bssid(self):
-        return self._bssid
-
-    @bssid.setter
-    def bssid(self, value):
-        self._bssid = value
-
-    @property
-    def frequency(self):
-        """Returns: int, frequency for hostapd to listen on."""
-        return self._frequency
-
-    @frequency.setter
-    def frequency(self, value):
-        """Sets the frequency for hostapd to listen on.
-
-        Args:
-            value: int, frequency in MHz.
-
-        """
-        if value not in hostapd_constants.CHANNEL_MAP:
-            raise ValueError('Tried to set an invalid frequency: %r.' % value)
-
-        self._frequency = value
-
-    @property
-    def bss_lookup(self):
-        return self._bss_lookup
-
-    @property
-    def ssid(self):
-        """Returns: SsidSettings, The root Ssid settings being used."""
-        return self._ssid
-
-    @ssid.setter
-    def ssid(self, value):
-        """Sets the ssid for the hostapd.
-
-        Args:
-            value: SsidSettings, new ssid settings to use.
-
-        """
-        self._ssid = value
-
-    @property
-    def hidden(self):
-        """Returns: bool, True if the ssid is hidden, false otherwise."""
-        return self._hidden
-
-    @hidden.setter
-    def hidden(self, value):
-        """Sets if this ssid is hidden.
-
-        Args:
-            value: bool, If true the ssid will be hidden.
-        """
-        self.hidden = value
-
-    @property
-    def security(self):
-        """Returns: The security type being used."""
-        return self._security
-
-    @security.setter
-    def security(self, value):
-        """Sets the security options to use.
-
-        Args:
-            value: Security, The type of security to use.
-        """
-        self._security = value
-
-    @property
-    def ht_packet_capture_mode(self):
-        """Get an appropriate packet capture HT parameter.
-
-        When we go to configure a raw monitor we need to configure
-        the phy to listen on the correct channel.  Part of doing
-        so is to specify the channel width for HT channels.  In the
-        case that the AP is configured to be either HT40+ or HT40-,
-        we could return the wrong parameter because we don't know which
-        configuration will be chosen by hostap.
-
-        Returns:
-            string, HT parameter for frequency configuration.
-
-        """
-        if not self.is_11n:
-            return None
-
-        if ht40_plus_allowed(self.channel):
-            return 'HT40+'
-
-        if ht40_minus_allowed(self.channel):
-            return 'HT40-'
-
-        return 'HT20'
-
-    @property
-    def beacon_footer(self):
-        """Returns: bool _beacon_footer value."""
-        return self._beacon_footer
-
-    def beacon_footer(self, value):
-        """Changes the beacon footer.
-
-        Args:
-            value: bool, The beacon footer vlaue.
-        """
-        self._beacon_footer = value
-
-    @property
-    def scenario_name(self):
-        """Returns: string _scenario_name value, or None."""
-        return self._scenario_name
-
-    @property
-    def min_streams(self):
-        """Returns: int, _min_streams value, or None."""
-        return self._min_streams
-
-    @property
-    def wnm_features(self) -> FrozenSet[hostapd_constants.WnmFeature]:
-        return self._wnm_features
-
-    @wnm_features.setter
-    def wnm_features(self, value: FrozenSet[hostapd_constants.WnmFeature]):
-        self._wnm_features = value
-
-    def __init__(self,
-                 interface=None,
-                 mode=None,
-                 channel=None,
-                 frequency=None,
-                 n_capabilities=[],
-                 beacon_interval=None,
-                 dtim_period=None,
-                 frag_threshold=None,
-                 rts_threshold=None,
-                 short_preamble=None,
-                 ssid=None,
-                 hidden=False,
-                 security=None,
-                 bssid=None,
-                 force_wmm=None,
-                 pmf_support=None,
-                 obss_interval=None,
-                 vht_channel_width=None,
-                 vht_center_channel=None,
-                 ac_capabilities=[],
-                 beacon_footer='',
-                 spectrum_mgmt_required=None,
-                 scenario_name=None,
-                 min_streams=None,
-                 wnm_features: FrozenSet[
-                     hostapd_constants.WnmFeature] = frozenset(),
-                 bss_settings=[],
-                 additional_parameters={},
-                 set_ap_defaults_profile='whirlwind'):
-        """Construct a HostapdConfig.
-
-        You may specify channel or frequency, but not both.  Both options
-        are checked for validity (i.e. you can't specify an invalid channel
-        or a frequency that will not be accepted).
-
-        Args:
-            interface: string, The name of the interface to use.
-            mode: string, MODE_11x defined above.
-            channel: int, channel number.
-            frequency: int, frequency of channel.
-            n_capabilities: list of N_CAPABILITY_x defined above.
-            beacon_interval: int, beacon interval of AP.
-            dtim_period: int, include a DTIM every |dtim_period| beacons.
-            frag_threshold: int, maximum outgoing data frame size.
-            rts_threshold: int, maximum packet size without requiring explicit
-                protection via rts/cts or cts to self.
-            short_preamble: Whether to use a short preamble.
-            ssid: string, The name of the ssid to brodcast.
-            hidden: bool, Should the ssid be hidden.
-            security: Security, the secuirty settings to use.
-            bssid: string, a MAC address like string for the BSSID.
-            force_wmm: True if we should force WMM on, False if we should
-                force it off, None if we shouldn't force anything.
-            pmf_support: one of PMF_SUPPORT_* above.  Controls whether the
-                client supports/must support 802.11w. If None, defaults to
-                required with wpa3, else defaults to disabled.
-            obss_interval: int, interval in seconds that client should be
-                required to do background scans for overlapping BSSes.
-            vht_channel_width: object channel width
-            vht_center_channel: int, center channel of segment 0.
-            ac_capabilities: list of AC_CAPABILITY_x defined above.
-            beacon_footer: string, containing (unvalidated) IE data to be
-                placed at the end of the beacon.
-            spectrum_mgmt_required: True if we require the DUT to support
-                spectrum management.
-            scenario_name: string to be included in file names, instead
-                of the interface name.
-            min_streams: int, number of spatial streams required.
-            wnm_features: WNM features to enable on the AP.
-            control_interface: The file name to use as the control interface.
-            bss_settings: The settings for all bss.
-            additional_parameters: A dictionary of additional parameters to add
-                to the hostapd config.
-            set_ap_defaults_profile: profile name to load defaults from
-        """
-        self.set_ap_defaults_profile = set_ap_defaults_profile
-        self._interface = interface
-        if channel is not None and frequency is not None:
-            raise ValueError('Specify either frequency or channel '
-                             'but not both.')
-
-        self._wmm_enabled = False
-        unknown_caps = [
-            cap for cap in n_capabilities
-            if cap not in hostapd_constants.N_CAPABILITIES_MAPPING
-        ]
-        if unknown_caps:
-            raise ValueError('Unknown capabilities: %r' % unknown_caps)
-
-        self._frequency = None
-        if channel:
-            self.channel = channel
-        elif frequency:
-            self.frequency = frequency
-        else:
-            raise ValueError('Specify either frequency or channel.')
-        '''
-        if set_ap_defaults_model:
-            ap_default_config = hostapd_ap_default_configs.APDefaultConfig(
-                profile_name=set_ap_defaults_model, frequency=self.frequency)
-            force_wmm = ap_default_config.force_wmm
-            beacon_interval = ap_default_config.beacon_interval
-            dtim_period = ap_default_config.dtim_period
-            short_preamble = ap_default_config.short_preamble
-            self._interface = ap_default_config.interface
-            mode = ap_default_config.mode
-            if ap_default_config.n_capabilities:
-                n_capabilities = ap_default_config.n_capabilities
-            if ap_default_config.ac_capabilities:
-                ap_default_config = ap_default_config.ac_capabilities
-        '''
-
-        self._n_capabilities = set(n_capabilities)
-        if self._n_capabilities:
-            self._wmm_enabled = True
-        if self._n_capabilities and mode is None:
-            mode = hostapd_constants.MODE_11N_PURE
-        self._mode = mode
-
-        if not self.supports_frequency(self.frequency):
-            raise ValueError('Configured a mode %s that does not support '
-                             'frequency %d' % (self._mode, self.frequency))
-
-        self._beacon_interval = beacon_interval
-        self._dtim_period = dtim_period
-        self._frag_threshold = frag_threshold
-        self._rts_threshold = rts_threshold
-        self._short_preamble = short_preamble
-        self._ssid = ssid
-        self._hidden = hidden
-        self._security = security
-        self._bssid = bssid
-        if force_wmm is not None:
-            if force_wmm:
-                self._wmm_enabled = 1
-            else:
-                self._wmm_enabled = 0
-        # Default PMF Values
-        if pmf_support is None:
-            if (self.security and self.security.security_mode_string ==
-                    hostapd_constants.WPA3_STRING):
-                # Set PMF required for WP3
-                self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED
-            elif (self.security and self.security.security_mode_string in
-                  hostapd_constants.WPA3_MODE_STRINGS):
-                # Default PMF to enabled for WPA3 mixed modes (can be
-                # overwritten by explicitly provided value)
-                self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED
-            else:
-                # Default PMD to disabled for all other modes (can be
-                # overwritten by explicitly provided value)
-                self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED
-        elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES:
-            raise ValueError('Invalid value for pmf_support: %r' % pmf_support)
-        elif (pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
-              and self.security and self.security.security_mode_string ==
-              hostapd_constants.WPA3_STRING):
-            raise ValueError('PMF support must be required with wpa3.')
-        else:
-            self._pmf_support = pmf_support
-        self._obss_interval = obss_interval
-        if self.is_11ac:
-            if str(vht_channel_width) == '40' or str(
-                    vht_channel_width) == '20':
-                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40
-            elif str(vht_channel_width) == '80':
-                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80
-            elif str(vht_channel_width) == '160':
-                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160
-            elif str(vht_channel_width) == '80+80':
-                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80_80
-            elif vht_channel_width is not None:
-                raise ValueError('Invalid channel width')
-            else:
-                logging.warning(
-                    'No channel bandwidth specified.  Using 80MHz for 11ac.')
-                self._vht_oper_chwidth = 1
-            if vht_center_channel is not None:
-                self._vht_oper_centr_freq_seg0_idx = vht_center_channel
-            elif vht_channel_width == 20:
-                self._vht_oper_centr_freq_seg0_idx = channel
-            else:
-                self._vht_oper_centr_freq_seg0_idx = self._get_11ac_center_channel_from_channel(
-                    self.channel)
-            self._ac_capabilities = set(ac_capabilities)
-        self._beacon_footer = beacon_footer
-        self._spectrum_mgmt_required = spectrum_mgmt_required
-        self._scenario_name = scenario_name
-        self._min_streams = min_streams
-        self._wnm_features = wnm_features
-        self._additional_parameters = additional_parameters
-
-        self._bss_lookup = collections.OrderedDict()
-        for bss in bss_settings:
-            if bss.name in self._bss_lookup:
-                raise ValueError('Cannot have multiple bss settings with the'
-                                 ' same name.')
-            self._bss_lookup[bss.name] = bss
-
-    def __repr__(self):
-        return (
-            '%s(mode=%r, channel=%r, frequency=%r, '
-            'n_capabilities=%r, beacon_interval=%r, '
-            'dtim_period=%r, frag_threshold=%r, ssid=%r, bssid=%r, '
-            'wmm_enabled=%r, security_config=%r, '
-            'spectrum_mgmt_required=%r)' %
-            (self.__class__.__name__, self._mode, self.channel, self.frequency,
-             self._n_capabilities, self._beacon_interval, self._dtim_period,
-             self._frag_threshold, self._ssid, self._bssid, self._wmm_enabled,
-             self._security, self._spectrum_mgmt_required))
-
-    def supports_channel(self, value):
-        """Check whether channel is supported by the current hardware mode.
-
-        @param value: int channel to check.
-        @return True iff the current mode supports the band of the channel.
-
-        """
-        for freq, channel in hostapd_constants.CHANNEL_MAP.iteritems():
-            if channel == value:
-                return self.supports_frequency(freq)
-
-        return False
-
-    def supports_frequency(self, frequency):
-        """Check whether frequency is supported by the current hardware mode.
-
-        @param frequency: int frequency to check.
-        @return True iff the current mode supports the band of the frequency.
-
-        """
-        if self._mode == hostapd_constants.MODE_11A and frequency < 5000:
-            return False
-
-        if self._mode in (hostapd_constants.MODE_11B,
-                          hostapd_constants.MODE_11G) and frequency > 5000:
-            return False
-
-        if frequency not in hostapd_constants.CHANNEL_MAP:
-            return False
-
-        channel = hostapd_constants.CHANNEL_MAP[frequency]
-        supports_plus = (channel in hostapd_constants.HT40_ALLOW_MAP[
-            hostapd_constants.N_CAPABILITY_HT40_PLUS_CHANNELS])
-        supports_minus = (channel in hostapd_constants.HT40_ALLOW_MAP[
-            hostapd_constants.N_CAPABILITY_HT40_MINUS_CHANNELS])
-        if (hostapd_constants.N_CAPABILITY_HT40_PLUS in self._n_capabilities
-                and not supports_plus):
-            return False
-
-        if (hostapd_constants.N_CAPABILITY_HT40_MINUS in self._n_capabilities
-                and not supports_minus):
-            return False
-
-        return True
-
-    def add_bss(self, bss):
-        """Adds a new bss setting.
-
-        Args:
-            bss: The bss settings to add.
-        """
-        if bss.name in self._bss_lookup:
-            raise ValueError('A bss with the same name already exists.')
-
-        self._bss_lookup[bss.name] = bss
-
-    def remove_bss(self, bss_name):
-        """Removes a bss setting from the config."""
-        del self._bss_lookup[bss_name]
-
-    def package_configs(self):
-        """Package the configs.
-
-        Returns:
-            A list of dictionaries, one dictionary for each section of the
-            config.
-        """
-        # Start with the default config parameters.
-        conf = self._get_default_config
-
-        if self._interface:
-            conf['interface'] = self._interface
-        if self._bssid:
-            conf['bssid'] = self._bssid
-        if self._ssid:
-            conf['ssid'] = self._ssid
-            conf['ignore_broadcast_ssid'] = 1 if self._hidden else 0
-        conf['channel'] = self.channel
-        conf['hw_mode'] = self.hw_mode
-        if self.is_11n or self.is_11ac:
-            conf['ieee80211n'] = 1
-            conf['ht_capab'] = self._hostapd_ht_capabilities
-        if self.is_11ac:
-            conf['ieee80211ac'] = 1
-            conf['vht_oper_chwidth'] = self._vht_oper_chwidth
-            conf['vht_oper_centr_freq_seg0_idx'] = \
-                    self._vht_oper_centr_freq_seg0_idx
-            conf['vht_capab'] = self._hostapd_vht_capabilities
-        if self._wmm_enabled is not None:
-            conf['wmm_enabled'] = self._wmm_enabled
-        if self._require_ht:
-            conf['require_ht'] = 1
-        if self._require_vht:
-            conf['require_vht'] = 1
-        if self._beacon_interval:
-            conf['beacon_int'] = self._beacon_interval
-        if self._dtim_period:
-            conf['dtim_period'] = self._dtim_period
-        if self._frag_threshold:
-            conf['fragm_threshold'] = self._frag_threshold
-        if self._rts_threshold:
-            conf['rts_threshold'] = self._rts_threshold
-        if self._pmf_support:
-            conf['ieee80211w'] = self._pmf_support
-        if self._obss_interval:
-            conf['obss_interval'] = self._obss_interval
-        if self._short_preamble:
-            conf['preamble'] = 1
-        if self._spectrum_mgmt_required:
-            # To set spectrum_mgmt_required, we must first set
-            # local_pwr_constraint. And to set local_pwr_constraint,
-            # we must first set ieee80211d. And to set ieee80211d, ...
-            # Point being: order matters here.
-            conf['country_code'] = 'US'  # Required for local_pwr_constraint
-            conf['ieee80211d'] = 1  # Required for local_pwr_constraint
-            conf['local_pwr_constraint'] = 0  # No local constraint
-            conf['spectrum_mgmt_required'] = 1  # Requires local_pwr_constraint
-
-        if self._security:
-            for k, v in self._security.generate_dict().items():
-                conf[k] = v
-
-        all_conf = [conf]
-
-        for bss in self._bss_lookup.values():
-            bss_conf = collections.OrderedDict()
-            for k, v in (bss.generate_dict()).items():
-                bss_conf[k] = v
-            all_conf.append(bss_conf)
-
-        for wnm_feature in self._wnm_features:
-            if wnm_feature == hostapd_constants.WnmFeature.TIME_ADVERTISEMENT:
-                conf.update(hostapd_constants.ENABLE_WNM_TIME_ADVERTISEMENT)
-            elif wnm_feature == hostapd_constants.WnmFeature.WNM_SLEEP_MODE:
-                conf.update(hostapd_constants.ENABLE_WNM_SLEEP_MODE)
-            elif wnm_feature == hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT:
-                conf.update(
-                    hostapd_constants.ENABLE_WNM_BSS_TRANSITION_MANAGEMENT)
-            elif wnm_feature == hostapd_constants.WnmFeature.PROXY_ARP:
-                conf.update(hostapd_constants.ENABLE_WNM_PROXY_ARP)
-            elif wnm_feature == hostapd_constants.WnmFeature.IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST:
-                conf.update(
-                    hostapd_constants.
-                    ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
-                )
-
-        if self._additional_parameters:
-            all_conf.append(self._additional_parameters)
-
-        return all_conf
diff --git a/src/antlion/controllers/ap_lib/hostapd_constants.py b/src/antlion/controllers/ap_lib/hostapd_constants.py
deleted file mode 100755
index 3a95ffa..0000000
--- a/src/antlion/controllers/ap_lib/hostapd_constants.py
+++ /dev/null
@@ -1,1410 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-
-from enum import Enum, auto, unique
-
-BAND_2G = '2g'
-BAND_5G = '5g'
-CHANNEL_BANDWIDTH_20MHZ = 20
-CHANNEL_BANDWIDTH_40MHZ = 40
-CHANNEL_BANDWIDTH_80MHZ = 80
-CHANNEL_BANDWIDTH_160MHZ = 160
-WEP = 0
-WPA1 = 1
-WPA2 = 2
-WPA3 = 2  # same as wpa2 and wpa2/wpa3, distinguished by wpa_key_mgmt
-MIXED = 3  # applies to wpa/wpa2, and wpa/wpa2/wpa3, distinquished by wpa_key_mgmt
-ENT = 4  # get the correct constant
-MAX_WPA_PSK_LENGTH = 64
-MIN_WPA_PSK_LENGTH = 8
-MAX_WPA_PASSWORD_LENGTH = 63
-WPA_STRICT_REKEY = 1
-WPA_DEFAULT_CIPHER = 'TKIP'
-WPA2_DEFAULT_CIPER = 'CCMP'
-WPA_GROUP_KEY_ROTATION_TIME = 600
-WPA_STRICT_REKEY_DEFAULT = True
-WEP_STRING = 'wep'
-WPA_STRING = 'wpa'
-WPA2_STRING = 'wpa2'
-WPA_MIXED_STRING = 'wpa/wpa2'
-WPA3_STRING = 'wpa3'
-WPA2_WPA3_MIXED_STRING = 'wpa2/wpa3'
-WPA_WPA2_WPA3_MIXED_STRING = 'wpa/wpa2/wpa3'
-ENT_STRING = 'ent'
-ENT_KEY_MGMT = 'WPA-EAP'
-WPA_PSK_KEY_MGMT = 'WPA-PSK'
-SAE_KEY_MGMT = 'SAE'
-DUAL_WPA_PSK_SAE_KEY_MGMT = 'WPA-PSK SAE'
-SECURITY_STRING_TO_SECURITY_MODE_INT = {
-    WPA_STRING: WPA1,
-    WPA2_STRING: WPA2,
-    WPA_MIXED_STRING: MIXED,
-    WPA3_STRING: WPA3,
-    WPA2_WPA3_MIXED_STRING: WPA3,
-    WPA_WPA2_WPA3_MIXED_STRING: MIXED,
-    WEP_STRING: WEP,
-    ENT_STRING: ENT
-}
-SECURITY_STRING_TO_WPA_KEY_MGMT = {
-    WPA_STRING: WPA_PSK_KEY_MGMT,
-    WPA2_STRING: WPA_PSK_KEY_MGMT,
-    WPA_MIXED_STRING: WPA_PSK_KEY_MGMT,
-    WPA3_STRING: SAE_KEY_MGMT,
-    WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
-    WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT
-}
-WPA3_MODE_STRINGS = {
-    WPA3_STRING, WPA2_WPA3_MIXED_STRING, WPA_WPA2_WPA3_MIXED_STRING
-}
-
-SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY = {
-    WEP_STRING: WEP_STRING,
-    WPA_STRING: WPA_STRING,
-    WPA2_STRING: WPA2_STRING,
-    WPA_MIXED_STRING: WPA2_STRING,
-    WPA3_STRING: WPA3_STRING,
-    WPA2_WPA3_MIXED_STRING: WPA3_STRING,
-    WPA_WPA2_WPA3_MIXED_STRING: WPA3_STRING
-}
-
-IEEE8021X = 1
-WLAN0_STRING = 'wlan0'
-WLAN1_STRING = 'wlan1'
-WLAN2_STRING = 'wlan2'
-WLAN3_STRING = 'wlan3'
-WLAN0_GALE = 'wlan-2400mhz'
-WLAN1_GALE = 'wlan-5000mhz'
-WEP_DEFAULT_KEY = 0
-WEP_HEX_LENGTH = [10, 26, 32, 58]
-WEP_STR_LENGTH = [5, 13, 16]
-WEP_DEFAULT_STR_LENGTH = 13
-AP_DEFAULT_CHANNEL_2G = 6
-AP_DEFAULT_CHANNEL_5G = 36
-AP_DEFAULT_MAX_SSIDS_2G = 8
-AP_DEFAULT_MAX_SSIDS_5G = 8
-AP_SSID_LENGTH_2G = 8
-AP_SSID_MIN_LENGTH_2G = 1
-AP_SSID_MAX_LENGTH_2G = 32
-AP_PASSPHRASE_LENGTH_2G = 10
-AP_SSID_LENGTH_5G = 8
-AP_SSID_MIN_LENGTH_5G = 1
-AP_SSID_MAX_LENGTH_5G = 32
-AP_PASSPHRASE_LENGTH_5G = 10
-INTERFACE_2G_LIST = [WLAN0_STRING, WLAN0_GALE]
-INTERFACE_5G_LIST = [WLAN1_STRING, WLAN1_GALE]
-HIGH_BEACON_INTERVAL = 300
-LOW_BEACON_INTERVAL = 100
-HIGH_DTIM = 3
-LOW_DTIM = 1
-
-# A mapping of frequency to channel number.  This includes some
-# frequencies used outside the US.
-CHANNEL_MAP = {
-    2412: 1,
-    2417: 2,
-    2422: 3,
-    2427: 4,
-    2432: 5,
-    2437: 6,
-    2442: 7,
-    2447: 8,
-    2452: 9,
-    2457: 10,
-    2462: 11,
-    # 12, 13 are only legitimate outside the US.
-    2467: 12,
-    2472: 13,
-    # 14 is for Japan, DSSS and CCK only.
-    2484: 14,
-    # 34 valid in Japan.
-    5170: 34,
-    # 36-116 valid in the US, except 38, 42, and 46, which have
-    # mixed international support.
-    5180: 36,
-    5190: 38,
-    5200: 40,
-    5210: 42,
-    5220: 44,
-    5230: 46,
-    5240: 48,
-    # DFS channels.
-    5260: 52,
-    5280: 56,
-    5300: 60,
-    5320: 64,
-    5500: 100,
-    5520: 104,
-    5540: 108,
-    5560: 112,
-    5580: 116,
-    # 120, 124, 128 valid in Europe/Japan.
-    5600: 120,
-    5620: 124,
-    5640: 128,
-    # 132+ valid in US.
-    5660: 132,
-    5680: 136,
-    5700: 140,
-    # 144 is supported by a subset of WiFi chips
-    # (e.g. bcm4354, but not ath9k).
-    5720: 144,
-    # End DFS channels.
-    5745: 149,
-    5755: 151,
-    5765: 153,
-    5775: 155,
-    5795: 159,
-    5785: 157,
-    5805: 161,
-    5825: 165
-}
-FREQUENCY_MAP = {v: k for k, v in CHANNEL_MAP.items()}
-
-US_CHANNELS_2G = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
-US_CHANNELS_5G = [
-    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128,
-    132, 136, 140, 144, 149, 153, 157, 161, 165
-]
-
-LOWEST_5G_CHANNEL = 36
-
-MODE_11A = 'a'
-MODE_11B = 'b'
-MODE_11G = 'g'
-MODE_11N_MIXED = 'n-mixed'
-MODE_11N_PURE = 'n-only'
-MODE_11AC_MIXED = 'ac-mixed'
-MODE_11AC_PURE = 'ac-only'
-
-N_CAPABILITY_LDPC = object()
-N_CAPABILITY_HT20 = object()
-N_CAPABILITY_HT40_PLUS = object()
-N_CAPABILITY_HT40_MINUS = object()
-N_CAPABILITY_GREENFIELD = object()
-N_CAPABILITY_SGI20 = object()
-N_CAPABILITY_SGI40 = object()
-N_CAPABILITY_TX_STBC = object()
-N_CAPABILITY_RX_STBC1 = object()
-N_CAPABILITY_RX_STBC12 = object()
-N_CAPABILITY_RX_STBC123 = object()
-N_CAPABILITY_DSSS_CCK_40 = object()
-N_CAPABILITY_LSIG_TXOP_PROT = object()
-N_CAPABILITY_40_INTOLERANT = object()
-N_CAPABILITY_MAX_AMSDU_7935 = object()
-N_CAPABILITY_DELAY_BLOCK_ACK = object()
-N_CAPABILITY_SMPS_STATIC = object()
-N_CAPABILITY_SMPS_DYNAMIC = object()
-N_CAPABILITIES_MAPPING = {
-    N_CAPABILITY_LDPC: '[LDPC]',
-    N_CAPABILITY_HT20: '[HT20]',
-    N_CAPABILITY_HT40_PLUS: '[HT40+]',
-    N_CAPABILITY_HT40_MINUS: '[HT40-]',
-    N_CAPABILITY_GREENFIELD: '[GF]',
-    N_CAPABILITY_SGI20: '[SHORT-GI-20]',
-    N_CAPABILITY_SGI40: '[SHORT-GI-40]',
-    N_CAPABILITY_TX_STBC: '[TX-STBC]',
-    N_CAPABILITY_RX_STBC1: '[RX-STBC1]',
-    N_CAPABILITY_RX_STBC12: '[RX-STBC12]',
-    N_CAPABILITY_RX_STBC123: '[RX-STBC123]',
-    N_CAPABILITY_DSSS_CCK_40: '[DSSS_CCK-40]',
-    N_CAPABILITY_LSIG_TXOP_PROT: '[LSIG-TXOP-PROT]',
-    N_CAPABILITY_40_INTOLERANT: '[40-INTOLERANT]',
-    N_CAPABILITY_MAX_AMSDU_7935: '[MAX-AMSDU-7935]',
-    N_CAPABILITY_DELAY_BLOCK_ACK: '[DELAYED-BA]',
-    N_CAPABILITY_SMPS_STATIC: '[SMPS-STATIC]',
-    N_CAPABILITY_SMPS_DYNAMIC: '[SMPS-DYNAMIC]'
-}
-N_CAPABILITIES_MAPPING_INVERSE = {
-    v: k
-    for k, v in N_CAPABILITIES_MAPPING.items()
-}
-N_CAPABILITY_HT40_MINUS_CHANNELS = object()
-N_CAPABILITY_HT40_PLUS_CHANNELS = object()
-AC_CAPABILITY_VHT160 = object()
-AC_CAPABILITY_VHT160_80PLUS80 = object()
-AC_CAPABILITY_RXLDPC = object()
-AC_CAPABILITY_SHORT_GI_80 = object()
-AC_CAPABILITY_SHORT_GI_160 = object()
-AC_CAPABILITY_TX_STBC_2BY1 = object()
-AC_CAPABILITY_RX_STBC_1 = object()
-AC_CAPABILITY_RX_STBC_12 = object()
-AC_CAPABILITY_RX_STBC_123 = object()
-AC_CAPABILITY_RX_STBC_1234 = object()
-AC_CAPABILITY_SU_BEAMFORMER = object()
-AC_CAPABILITY_SU_BEAMFORMEE = object()
-AC_CAPABILITY_BF_ANTENNA_2 = object()
-AC_CAPABILITY_BF_ANTENNA_3 = object()
-AC_CAPABILITY_BF_ANTENNA_4 = object()
-AC_CAPABILITY_SOUNDING_DIMENSION_2 = object()
-AC_CAPABILITY_SOUNDING_DIMENSION_3 = object()
-AC_CAPABILITY_SOUNDING_DIMENSION_4 = object()
-AC_CAPABILITY_MU_BEAMFORMER = object()
-AC_CAPABILITY_MU_BEAMFORMEE = object()
-AC_CAPABILITY_VHT_TXOP_PS = object()
-AC_CAPABILITY_HTC_VHT = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6 = object()
-AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7 = object()
-AC_CAPABILITY_VHT_LINK_ADAPT2 = object()
-AC_CAPABILITY_VHT_LINK_ADAPT3 = object()
-AC_CAPABILITY_RX_ANTENNA_PATTERN = object()
-AC_CAPABILITY_TX_ANTENNA_PATTERN = object()
-AC_CAPABILITY_MAX_MPDU_7991 = object()
-AC_CAPABILITY_MAX_MPDU_11454 = object()
-AC_CAPABILITIES_MAPPING = {
-    AC_CAPABILITY_VHT160: '[VHT160]',
-    AC_CAPABILITY_VHT160_80PLUS80: '[VHT160-80PLUS80]',
-    AC_CAPABILITY_RXLDPC: '[RXLDPC]',
-    AC_CAPABILITY_SHORT_GI_80: '[SHORT-GI-80]',
-    AC_CAPABILITY_SHORT_GI_160: '[SHORT-GI-160]',
-    AC_CAPABILITY_TX_STBC_2BY1: '[TX-STBC-2BY1]',
-    AC_CAPABILITY_RX_STBC_1: '[RX-STBC-1]',
-    AC_CAPABILITY_RX_STBC_12: '[RX-STBC-12]',
-    AC_CAPABILITY_RX_STBC_123: '[RX-STBC-123]',
-    AC_CAPABILITY_RX_STBC_1234: '[RX-STBC-1234]',
-    AC_CAPABILITY_SU_BEAMFORMER: '[SU-BEAMFORMER]',
-    AC_CAPABILITY_SU_BEAMFORMEE: '[SU-BEAMFORMEE]',
-    AC_CAPABILITY_BF_ANTENNA_2: '[BF-ANTENNA-2]',
-    AC_CAPABILITY_BF_ANTENNA_3: '[BF-ANTENNA-3]',
-    AC_CAPABILITY_BF_ANTENNA_4: '[BF-ANTENNA-4]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_2: '[SOUNDING-DIMENSION-2]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_3: '[SOUNDING-DIMENSION-3]',
-    AC_CAPABILITY_SOUNDING_DIMENSION_4: '[SOUNDING-DIMENSION-4]',
-    AC_CAPABILITY_MU_BEAMFORMER: '[MU-BEAMFORMER]',
-    AC_CAPABILITY_MU_BEAMFORMEE: '[MU-BEAMFORMEE]',
-    AC_CAPABILITY_VHT_TXOP_PS: '[VHT-TXOP-PS]',
-    AC_CAPABILITY_HTC_VHT: '[HTC-VHT]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0: '[MAX-A-MPDU-LEN-EXP0]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1: '[MAX-A-MPDU-LEN-EXP1]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2: '[MAX-A-MPDU-LEN-EXP2]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3: '[MAX-A-MPDU-LEN-EXP3]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4: '[MAX-A-MPDU-LEN-EXP4]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5: '[MAX-A-MPDU-LEN-EXP5]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6: '[MAX-A-MPDU-LEN-EXP6]',
-    AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7: '[MAX-A-MPDU-LEN-EXP7]',
-    AC_CAPABILITY_VHT_LINK_ADAPT2: '[VHT-LINK-ADAPT2]',
-    AC_CAPABILITY_VHT_LINK_ADAPT3: '[VHT-LINK-ADAPT3]',
-    AC_CAPABILITY_RX_ANTENNA_PATTERN: '[RX-ANTENNA-PATTERN]',
-    AC_CAPABILITY_TX_ANTENNA_PATTERN: '[TX-ANTENNA-PATTERN]',
-    AC_CAPABILITY_MAX_MPDU_11454: '[MAX-MPDU-11454]',
-    AC_CAPABILITY_MAX_MPDU_7991: '[MAX-MPDU-7991]'
-}
-AC_CAPABILITIES_MAPPING_INVERSE = {
-    v: k
-    for k, v in AC_CAPABILITIES_MAPPING.items()
-}
-VHT_CHANNEL_WIDTH_40 = 0
-VHT_CHANNEL_WIDTH_80 = 1
-VHT_CHANNEL_WIDTH_160 = 2
-VHT_CHANNEL_WIDTH_80_80 = 3
-
-VHT_CHANNEL = {
-    40: VHT_CHANNEL_WIDTH_40,
-    80: VHT_CHANNEL_WIDTH_80,
-    160: VHT_CHANNEL_WIDTH_160
-}
-
-# This is a loose merging of the rules for US and EU regulatory
-# domains as taken from IEEE Std 802.11-2012 Appendix E.  For instance,
-# we tolerate HT40 in channels 149-161 (not allowed in EU), but also
-# tolerate HT40+ on channel 7 (not allowed in the US).  We take the loose
-# definition so that we don't prohibit testing in either domain.
-HT40_ALLOW_MAP = {
-    N_CAPABILITY_HT40_MINUS_CHANNELS:
-    tuple(
-        itertools.chain(range(6, 14), range(40, 65, 8), range(104, 145, 8),
-                        [153, 161])),
-    N_CAPABILITY_HT40_PLUS_CHANNELS:
-    tuple(
-        itertools.chain(range(1, 8), range(36, 61, 8), range(100, 141, 8),
-                        [149, 157]))
-}
-
-PMF_SUPPORT_DISABLED = 0
-PMF_SUPPORT_ENABLED = 1
-PMF_SUPPORT_REQUIRED = 2
-PMF_SUPPORT_VALUES = (PMF_SUPPORT_DISABLED, PMF_SUPPORT_ENABLED,
-                      PMF_SUPPORT_REQUIRED)
-
-DRIVER_NAME = 'nl80211'
-
-CENTER_CHANNEL_MAP = {
-    VHT_CHANNEL_WIDTH_40: {
-        'delta':
-        2,
-        'channels': ((36, 40), (44, 48), (52, 56), (60, 64), (100, 104),
-                     (108, 112), (116, 120), (124, 128), (132, 136),
-                     (140, 144), (149, 153), (157, 161))
-    },
-    VHT_CHANNEL_WIDTH_80: {
-        'delta':
-        6,
-        'channels':
-        ((36, 48), (52, 64), (100, 112), (116, 128), (132, 144), (149, 161))
-    },
-    VHT_CHANNEL_WIDTH_160: {
-        'delta': 14,
-        'channels': ((36, 64), (100, 128))
-    }
-}
-
-OFDM_DATA_RATES = {'supported_rates': '60 90 120 180 240 360 480 540'}
-
-CCK_DATA_RATES = {'supported_rates': '10 20 55 110'}
-
-CCK_AND_OFDM_DATA_RATES = {
-    'supported_rates': '10 20 55 110 60 90 120 180 240 360 480 540'
-}
-
-OFDM_ONLY_BASIC_RATES = {'basic_rates': '60 120 240'}
-
-CCK_AND_OFDM_BASIC_RATES = {'basic_rates': '10 20 55 110'}
-
-WEP_AUTH = {
-    'open': {
-        'auth_algs': 1
-    },
-    'shared': {
-        'auth_algs': 2
-    },
-    'open_and_shared': {
-        'auth_algs': 3
-    }
-}
-
-WMM_11B_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 5,
-    'wmm_ac_bk_cwmax': 10,
-    'wmm_ac_bk_aifs': 7,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 3,
-    'wmm_ac_be_cwmin': 5,
-    'wmm_ac_be_cwmax': 7,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 4,
-    'wmm_ac_vi_cwmax': 5,
-    'wmm_ac_vi_txop_limit': 188,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 3,
-    'wmm_ac_vo_cwmax': 4,
-    'wmm_ac_vo_txop_limit': 102
-}
-
-WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 4,
-    'wmm_ac_bk_cwmax': 10,
-    'wmm_ac_bk_aifs': 7,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 3,
-    'wmm_ac_be_cwmin': 4,
-    'wmm_ac_be_cwmax': 10,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 3,
-    'wmm_ac_vi_cwmax': 4,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 2,
-    'wmm_ac_vo_cwmax': 3,
-    'wmm_ac_vo_txop_limit': 47
-}
-
-WMM_NON_DEFAULT_PARAMS = {
-    'wmm_ac_bk_cwmin': 5,
-    'wmm_ac_bk_cwmax': 9,
-    'wmm_ac_bk_aifs': 3,
-    'wmm_ac_bk_txop_limit': 94,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 2,
-    'wmm_ac_be_cwmax': 8,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 1,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 10,
-    'wmm_ac_vi_txop_limit': 47,
-    'wmm_ac_vo_aifs': 1,
-    'wmm_ac_vo_cwmin': 6,
-    'wmm_ac_vo_cwmax': 10,
-    'wmm_ac_vo_txop_limit': 94
-}
-
-WMM_DEGRADED_VO_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 2,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
-}
-
-WMM_DEGRADED_VI_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 2,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
-}
-
-WMM_IMPROVE_BE_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 10,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 2,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
-}
-
-WMM_IMPROVE_BK_PARAMS = {
-    'wmm_ac_bk_cwmin': 7,
-    'wmm_ac_bk_cwmax': 15,
-    'wmm_ac_bk_aifs': 2,
-    'wmm_ac_bk_txop_limit': 0,
-    'wmm_ac_be_aifs': 10,
-    'wmm_ac_be_cwmin': 7,
-    'wmm_ac_be_cwmax': 15,
-    'wmm_ac_be_txop_limit': 0,
-    'wmm_ac_vi_aifs': 10,
-    'wmm_ac_vi_cwmin': 7,
-    'wmm_ac_vi_cwmax': 15,
-    'wmm_ac_vi_txop_limit': 94,
-    'wmm_ac_vo_aifs': 10,
-    'wmm_ac_vo_cwmin': 7,
-    'wmm_ac_vo_cwmax': 15,
-    'wmm_ac_vo_txop_limit': 47
-}
-
-WMM_ACM_BK = {'wmm_ac_bk_acm': 1}
-WMM_ACM_BE = {'wmm_ac_be_acm': 1}
-WMM_ACM_VI = {'wmm_ac_vi_acm': 1}
-WMM_ACM_VO = {'wmm_ac_vo_acm': 1}
-
-UAPSD_ENABLED = {'uapsd_advertisement_enabled': 1}
-
-UTF_8_SSID = {'utf8_ssid': 1}
-
-ENABLE_RRM_BEACON_REPORT = {'rrm_beacon_report': 1}
-ENABLE_RRM_NEIGHBOR_REPORT = {'rrm_neighbor_report': 1}
-
-# Wireless Network Management (AKA 802.11v) features.
-ENABLE_WNM_TIME_ADVERTISEMENT = {'time_advertisement': 2, 'time_zone': 'EST5'}
-ENABLE_WNM_SLEEP_MODE = {'wnm_sleep_mode': 1}
-ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {'bss_transition': 1}
-ENABLE_WNM_PROXY_ARP = {'proxy_arp': 1}
-ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = {
-    'na_mcast_to_ucast': 1
-}
-
-VENDOR_IE = {
-    'correct_length_beacon': {
-        'vendor_elements': 'dd0411223301'
-    },
-    'too_short_length_beacon': {
-        'vendor_elements': 'dd0311223301'
-    },
-    'too_long_length_beacon': {
-        'vendor_elements': 'dd0511223301'
-    },
-    'zero_length_beacon_with_data': {
-        'vendor_elements': 'dd0011223301'
-    },
-    'zero_length_beacon_without_data': {
-        'vendor_elements': 'dd00'
-    },
-    'simliar_to_wpa': {
-        'vendor_elements': 'dd040050f203'
-    },
-    'correct_length_association_response': {
-        'assocresp_elements': 'dd0411223301'
-    },
-    'too_short_length_association_response': {
-        'assocresp_elements': 'dd0311223301'
-    },
-    'too_long_length_association_response': {
-        'assocresp_elements': 'dd0511223301'
-    },
-    'zero_length_association_response_with_data': {
-        'assocresp_elements': 'dd0011223301'
-    },
-    'zero_length_association_response_without_data': {
-        'assocresp_elements': 'dd00'
-    }
-}
-
-ENABLE_IEEE80211D = {'ieee80211d': 1}
-
-COUNTRY_STRING = {
-    'ALL': {
-        'country3': '0x20'
-    },
-    'OUTDOOR': {
-        'country3': '0x4f'
-    },
-    'INDOOR': {
-        'country3': '0x49'
-    },
-    'NONCOUNTRY': {
-        'country3': '0x58'
-    },
-    'GLOBAL': {
-        'country3': '0x04'
-    }
-}
-
-COUNTRY_CODE = {
-    'AFGHANISTAN': {
-        'country_code': 'AF'
-    },
-    'ALAND_ISLANDS': {
-        'country_code': 'AX'
-    },
-    'ALBANIA': {
-        'country_code': 'AL'
-    },
-    'ALGERIA': {
-        'country_code': 'DZ'
-    },
-    'AMERICAN_SAMOA': {
-        'country_code': 'AS'
-    },
-    'ANDORRA': {
-        'country_code': 'AD'
-    },
-    'ANGOLA': {
-        'country_code': 'AO'
-    },
-    'ANGUILLA': {
-        'country_code': 'AI'
-    },
-    'ANTARCTICA': {
-        'country_code': 'AQ'
-    },
-    'ANTIGUA_AND_BARBUDA': {
-        'country_code': 'AG'
-    },
-    'ARGENTINA': {
-        'country_code': 'AR'
-    },
-    'ARMENIA': {
-        'country_code': 'AM'
-    },
-    'ARUBA': {
-        'country_code': 'AW'
-    },
-    'AUSTRALIA': {
-        'country_code': 'AU'
-    },
-    'AUSTRIA': {
-        'country_code': 'AT'
-    },
-    'AZERBAIJAN': {
-        'country_code': 'AZ'
-    },
-    'BAHAMAS': {
-        'country_code': 'BS'
-    },
-    'BAHRAIN': {
-        'country_code': 'BH'
-    },
-    'BANGLADESH': {
-        'country_code': 'BD'
-    },
-    'BARBADOS': {
-        'country_code': 'BB'
-    },
-    'BELARUS': {
-        'country_code': 'BY'
-    },
-    'BELGIUM': {
-        'country_code': 'BE'
-    },
-    'BELIZE': {
-        'country_code': 'BZ'
-    },
-    'BENIN': {
-        'country_code': 'BJ'
-    },
-    'BERMUDA': {
-        'country_code': 'BM'
-    },
-    'BHUTAN': {
-        'country_code': 'BT'
-    },
-    'BOLIVIA': {
-        'country_code': 'BO'
-    },
-    'BONAIRE': {
-        'country_code': 'BQ'
-    },
-    'BOSNIA_AND_HERZEGOVINA': {
-        'country_code': 'BA'
-    },
-    'BOTSWANA': {
-        'country_code': 'BW'
-    },
-    'BOUVET_ISLAND': {
-        'country_code': 'BV'
-    },
-    'BRAZIL': {
-        'country_code': 'BR'
-    },
-    'BRITISH_INDIAN_OCEAN_TERRITORY': {
-        'country_code': 'IO'
-    },
-    'BRUNEI_DARUSSALAM': {
-        'country_code': 'BN'
-    },
-    'BULGARIA': {
-        'country_code': 'BG'
-    },
-    'BURKINA_FASO': {
-        'country_code': 'BF'
-    },
-    'BURUNDI': {
-        'country_code': 'BI'
-    },
-    'CAMBODIA': {
-        'country_code': 'KH'
-    },
-    'CAMEROON': {
-        'country_code': 'CM'
-    },
-    'CANADA': {
-        'country_code': 'CA'
-    },
-    'CAPE_VERDE': {
-        'country_code': 'CV'
-    },
-    'CAYMAN_ISLANDS': {
-        'country_code': 'KY'
-    },
-    'CENTRAL_AFRICAN_REPUBLIC': {
-        'country_code': 'CF'
-    },
-    'CHAD': {
-        'country_code': 'TD'
-    },
-    'CHILE': {
-        'country_code': 'CL'
-    },
-    'CHINA': {
-        'country_code': 'CN'
-    },
-    'CHRISTMAS_ISLAND': {
-        'country_code': 'CX'
-    },
-    'COCOS_ISLANDS': {
-        'country_code': 'CC'
-    },
-    'COLOMBIA': {
-        'country_code': 'CO'
-    },
-    'COMOROS': {
-        'country_code': 'KM'
-    },
-    'CONGO': {
-        'country_code': 'CG'
-    },
-    'DEMOCRATIC_REPUBLIC_CONGO': {
-        'country_code': 'CD'
-    },
-    'COOK_ISLANDS': {
-        'country_code': 'CK'
-    },
-    'COSTA_RICA': {
-        'country_code': 'CR'
-    },
-    'COTE_D_IVOIRE': {
-        'country_code': 'CI'
-    },
-    'CROATIA': {
-        'country_code': 'HR'
-    },
-    'CUBA': {
-        'country_code': 'CU'
-    },
-    'CURACAO': {
-        'country_code': 'CW'
-    },
-    'CYPRUS': {
-        'country_code': 'CY'
-    },
-    'CZECH_REPUBLIC': {
-        'country_code': 'CZ'
-    },
-    'DENMARK': {
-        'country_code': 'DK'
-    },
-    'DJIBOUTI': {
-        'country_code': 'DJ'
-    },
-    'DOMINICA': {
-        'country_code': 'DM'
-    },
-    'DOMINICAN_REPUBLIC': {
-        'country_code': 'DO'
-    },
-    'ECUADOR': {
-        'country_code': 'EC'
-    },
-    'EGYPT': {
-        'country_code': 'EG'
-    },
-    'EL_SALVADOR': {
-        'country_code': 'SV'
-    },
-    'EQUATORIAL_GUINEA': {
-        'country_code': 'GQ'
-    },
-    'ERITREA': {
-        'country_code': 'ER'
-    },
-    'ESTONIA': {
-        'country_code': 'EE'
-    },
-    'ETHIOPIA': {
-        'country_code': 'ET'
-    },
-    'FALKLAND_ISLANDS_(MALVINAS)': {
-        'country_code': 'FK'
-    },
-    'FAROE_ISLANDS': {
-        'country_code': 'FO'
-    },
-    'FIJI': {
-        'country_code': 'FJ'
-    },
-    'FINLAND': {
-        'country_code': 'FI'
-    },
-    'FRANCE': {
-        'country_code': 'FR'
-    },
-    'FRENCH_GUIANA': {
-        'country_code': 'GF'
-    },
-    'FRENCH_POLYNESIA': {
-        'country_code': 'PF'
-    },
-    'FRENCH_SOUTHERN_TERRITORIES': {
-        'country_code': 'TF'
-    },
-    'GABON': {
-        'country_code': 'GA'
-    },
-    'GAMBIA': {
-        'country_code': 'GM'
-    },
-    'GEORGIA': {
-        'country_code': 'GE'
-    },
-    'GERMANY': {
-        'country_code': 'DE'
-    },
-    'GHANA': {
-        'country_code': 'GH'
-    },
-    'GIBRALTAR': {
-        'country_code': 'GI'
-    },
-    'GREECE': {
-        'country_code': 'GR'
-    },
-    'GREENLAND': {
-        'country_code': 'GL'
-    },
-    'GRENADA': {
-        'country_code': 'GD'
-    },
-    'GUADELOUPE': {
-        'country_code': 'GP'
-    },
-    'GUAM': {
-        'country_code': 'GU'
-    },
-    'GUATEMALA': {
-        'country_code': 'GT'
-    },
-    'GUERNSEY': {
-        'country_code': 'GG'
-    },
-    'GUINEA': {
-        'country_code': 'GN'
-    },
-    'GUINEA-BISSAU': {
-        'country_code': 'GW'
-    },
-    'GUYANA': {
-        'country_code': 'GY'
-    },
-    'HAITI': {
-        'country_code': 'HT'
-    },
-    'HEARD_ISLAND_AND_MCDONALD_ISLANDS': {
-        'country_code': 'HM'
-    },
-    'VATICAN_CITY_STATE': {
-        'country_code': 'VA'
-    },
-    'HONDURAS': {
-        'country_code': 'HN'
-    },
-    'HONG_KONG': {
-        'country_code': 'HK'
-    },
-    'HUNGARY': {
-        'country_code': 'HU'
-    },
-    'ICELAND': {
-        'country_code': 'IS'
-    },
-    'INDIA': {
-        'country_code': 'IN'
-    },
-    'INDONESIA': {
-        'country_code': 'ID'
-    },
-    'IRAN': {
-        'country_code': 'IR'
-    },
-    'IRAQ': {
-        'country_code': 'IQ'
-    },
-    'IRELAND': {
-        'country_code': 'IE'
-    },
-    'ISLE_OF_MAN': {
-        'country_code': 'IM'
-    },
-    'ISRAEL': {
-        'country_code': 'IL'
-    },
-    'ITALY': {
-        'country_code': 'IT'
-    },
-    'JAMAICA': {
-        'country_code': 'JM'
-    },
-    'JAPAN': {
-        'country_code': 'JP'
-    },
-    'JERSEY': {
-        'country_code': 'JE'
-    },
-    'JORDAN': {
-        'country_code': 'JO'
-    },
-    'KAZAKHSTAN': {
-        'country_code': 'KZ'
-    },
-    'KENYA': {
-        'country_code': 'KE'
-    },
-    'KIRIBATI': {
-        'country_code': 'KI'
-    },
-    'DEMOCRATIC_PEOPLE_S_REPUBLIC_OF_KOREA': {
-        'country_code': 'KP'
-    },
-    'REPUBLIC_OF_KOREA': {
-        'country_code': 'KR'
-    },
-    'KUWAIT': {
-        'country_code': 'KW'
-    },
-    'KYRGYZSTAN': {
-        'country_code': 'KG'
-    },
-    'LAO': {
-        'country_code': 'LA'
-    },
-    'LATVIA': {
-        'country_code': 'LV'
-    },
-    'LEBANON': {
-        'country_code': 'LB'
-    },
-    'LESOTHO': {
-        'country_code': 'LS'
-    },
-    'LIBERIA': {
-        'country_code': 'LR'
-    },
-    'LIBYA': {
-        'country_code': 'LY'
-    },
-    'LIECHTENSTEIN': {
-        'country_code': 'LI'
-    },
-    'LITHUANIA': {
-        'country_code': 'LT'
-    },
-    'LUXEMBOURG': {
-        'country_code': 'LU'
-    },
-    'MACAO': {
-        'country_code': 'MO'
-    },
-    'MACEDONIA': {
-        'country_code': 'MK'
-    },
-    'MADAGASCAR': {
-        'country_code': 'MG'
-    },
-    'MALAWI': {
-        'country_code': 'MW'
-    },
-    'MALAYSIA': {
-        'country_code': 'MY'
-    },
-    'MALDIVES': {
-        'country_code': 'MV'
-    },
-    'MALI': {
-        'country_code': 'ML'
-    },
-    'MALTA': {
-        'country_code': 'MT'
-    },
-    'MARSHALL_ISLANDS': {
-        'country_code': 'MH'
-    },
-    'MARTINIQUE': {
-        'country_code': 'MQ'
-    },
-    'MAURITANIA': {
-        'country_code': 'MR'
-    },
-    'MAURITIUS': {
-        'country_code': 'MU'
-    },
-    'MAYOTTE': {
-        'country_code': 'YT'
-    },
-    'MEXICO': {
-        'country_code': 'MX'
-    },
-    'MICRONESIA': {
-        'country_code': 'FM'
-    },
-    'MOLDOVA': {
-        'country_code': 'MD'
-    },
-    'MONACO': {
-        'country_code': 'MC'
-    },
-    'MONGOLIA': {
-        'country_code': 'MN'
-    },
-    'MONTENEGRO': {
-        'country_code': 'ME'
-    },
-    'MONTSERRAT': {
-        'country_code': 'MS'
-    },
-    'MOROCCO': {
-        'country_code': 'MA'
-    },
-    'MOZAMBIQUE': {
-        'country_code': 'MZ'
-    },
-    'MYANMAR': {
-        'country_code': 'MM'
-    },
-    'NAMIBIA': {
-        'country_code': 'NA'
-    },
-    'NAURU': {
-        'country_code': 'NR'
-    },
-    'NEPAL': {
-        'country_code': 'NP'
-    },
-    'NETHERLANDS': {
-        'country_code': 'NL'
-    },
-    'NEW_CALEDONIA': {
-        'country_code': 'NC'
-    },
-    'NEW_ZEALAND': {
-        'country_code': 'NZ'
-    },
-    'NICARAGUA': {
-        'country_code': 'NI'
-    },
-    'NIGER': {
-        'country_code': 'NE'
-    },
-    'NIGERIA': {
-        'country_code': 'NG'
-    },
-    'NIUE': {
-        'country_code': 'NU'
-    },
-    'NORFOLK_ISLAND': {
-        'country_code': 'NF'
-    },
-    'NORTHERN_MARIANA_ISLANDS': {
-        'country_code': 'MP'
-    },
-    'NORWAY': {
-        'country_code': 'NO'
-    },
-    'OMAN': {
-        'country_code': 'OM'
-    },
-    'PAKISTAN': {
-        'country_code': 'PK'
-    },
-    'PALAU': {
-        'country_code': 'PW'
-    },
-    'PALESTINE': {
-        'country_code': 'PS'
-    },
-    'PANAMA': {
-        'country_code': 'PA'
-    },
-    'PAPUA_NEW_GUINEA': {
-        'country_code': 'PG'
-    },
-    'PARAGUAY': {
-        'country_code': 'PY'
-    },
-    'PERU': {
-        'country_code': 'PE'
-    },
-    'PHILIPPINES': {
-        'country_code': 'PH'
-    },
-    'PITCAIRN': {
-        'country_code': 'PN'
-    },
-    'POLAND': {
-        'country_code': 'PL'
-    },
-    'PORTUGAL': {
-        'country_code': 'PT'
-    },
-    'PUERTO_RICO': {
-        'country_code': 'PR'
-    },
-    'QATAR': {
-        'country_code': 'QA'
-    },
-    'RÉUNION': {
-        'country_code': 'RE'
-    },
-    'ROMANIA': {
-        'country_code': 'RO'
-    },
-    'RUSSIAN_FEDERATION': {
-        'country_code': 'RU'
-    },
-    'RWANDA': {
-        'country_code': 'RW'
-    },
-    'SAINT_BARTHELEMY': {
-        'country_code': 'BL'
-    },
-    'SAINT_KITTS_AND_NEVIS': {
-        'country_code': 'KN'
-    },
-    'SAINT_LUCIA': {
-        'country_code': 'LC'
-    },
-    'SAINT_MARTIN': {
-        'country_code': 'MF'
-    },
-    'SAINT_PIERRE_AND_MIQUELON': {
-        'country_code': 'PM'
-    },
-    'SAINT_VINCENT_AND_THE_GRENADINES': {
-        'country_code': 'VC'
-    },
-    'SAMOA': {
-        'country_code': 'WS'
-    },
-    'SAN_MARINO': {
-        'country_code': 'SM'
-    },
-    'SAO_TOME_AND_PRINCIPE': {
-        'country_code': 'ST'
-    },
-    'SAUDI_ARABIA': {
-        'country_code': 'SA'
-    },
-    'SENEGAL': {
-        'country_code': 'SN'
-    },
-    'SERBIA': {
-        'country_code': 'RS'
-    },
-    'SEYCHELLES': {
-        'country_code': 'SC'
-    },
-    'SIERRA_LEONE': {
-        'country_code': 'SL'
-    },
-    'SINGAPORE': {
-        'country_code': 'SG'
-    },
-    'SINT_MAARTEN': {
-        'country_code': 'SX'
-    },
-    'SLOVAKIA': {
-        'country_code': 'SK'
-    },
-    'SLOVENIA': {
-        'country_code': 'SI'
-    },
-    'SOLOMON_ISLANDS': {
-        'country_code': 'SB'
-    },
-    'SOMALIA': {
-        'country_code': 'SO'
-    },
-    'SOUTH_AFRICA': {
-        'country_code': 'ZA'
-    },
-    'SOUTH_GEORGIA': {
-        'country_code': 'GS'
-    },
-    'SOUTH_SUDAN': {
-        'country_code': 'SS'
-    },
-    'SPAIN': {
-        'country_code': 'ES'
-    },
-    'SRI_LANKA': {
-        'country_code': 'LK'
-    },
-    'SUDAN': {
-        'country_code': 'SD'
-    },
-    'SURINAME': {
-        'country_code': 'SR'
-    },
-    'SVALBARD_AND_JAN_MAYEN': {
-        'country_code': 'SJ'
-    },
-    'SWAZILAND': {
-        'country_code': 'SZ'
-    },
-    'SWEDEN': {
-        'country_code': 'SE'
-    },
-    'SWITZERLAND': {
-        'country_code': 'CH'
-    },
-    'SYRIAN_ARAB_REPUBLIC': {
-        'country_code': 'SY'
-    },
-    'TAIWAN': {
-        'country_code': 'TW'
-    },
-    'TAJIKISTAN': {
-        'country_code': 'TJ'
-    },
-    'TANZANIA': {
-        'country_code': 'TZ'
-    },
-    'THAILAND': {
-        'country_code': 'TH'
-    },
-    'TIMOR-LESTE': {
-        'country_code': 'TL'
-    },
-    'TOGO': {
-        'country_code': 'TG'
-    },
-    'TOKELAU': {
-        'country_code': 'TK'
-    },
-    'TONGA': {
-        'country_code': 'TO'
-    },
-    'TRINIDAD_AND_TOBAGO': {
-        'country_code': 'TT'
-    },
-    'TUNISIA': {
-        'country_code': 'TN'
-    },
-    'TURKEY': {
-        'country_code': 'TR'
-    },
-    'TURKMENISTAN': {
-        'country_code': 'TM'
-    },
-    'TURKS_AND_CAICOS_ISLANDS': {
-        'country_code': 'TC'
-    },
-    'TUVALU': {
-        'country_code': 'TV'
-    },
-    'UGANDA': {
-        'country_code': 'UG'
-    },
-    'UKRAINE': {
-        'country_code': 'UA'
-    },
-    'UNITED_ARAB_EMIRATES': {
-        'country_code': 'AE'
-    },
-    'UNITED_KINGDOM': {
-        'country_code': 'GB'
-    },
-    'UNITED_STATES': {
-        'country_code': 'US'
-    },
-    'UNITED_STATES_MINOR_OUTLYING_ISLANDS': {
-        'country_code': 'UM'
-    },
-    'URUGUAY': {
-        'country_code': 'UY'
-    },
-    'UZBEKISTAN': {
-        'country_code': 'UZ'
-    },
-    'VANUATU': {
-        'country_code': 'VU'
-    },
-    'VENEZUELA': {
-        'country_code': 'VE'
-    },
-    'VIETNAM': {
-        'country_code': 'VN'
-    },
-    'VIRGIN_ISLANDS_BRITISH': {
-        'country_code': 'VG'
-    },
-    'VIRGIN_ISLANDS_US': {
-        'country_code': 'VI'
-    },
-    'WALLIS_AND_FUTUNA': {
-        'country_code': 'WF'
-    },
-    'WESTERN_SAHARA': {
-        'country_code': 'EH'
-    },
-    'YEMEN': {
-        'country_code': 'YE'
-    },
-    'ZAMBIA': {
-        'country_code': 'ZM'
-    },
-    'ZIMBABWE': {
-        'country_code': 'ZW'
-    },
-    'NON_COUNTRY': {
-        'country_code': 'XX'
-    }
-}
-
-ALL_CHANNELS_2G = {
-    1: {20, 40},
-    2: {20, 40},
-    3: {20, 40},
-    4: {20, 40},
-    5: {20, 40},
-    6: {20, 40},
-    7: {20, 40},
-    8: {20, 40},
-    9: {20, 40},
-    10: {20, 40},
-    11: {20, 40},
-    12: {20, 40},
-    13: {20, 40},
-    14: {20}
-}
-
-ALL_CHANNELS_5G = {
-    36: {20, 40, 80},
-    40: {20, 40, 80},
-    44: {20, 40, 80},
-    48: {20, 40, 80},
-    52: {20, 40, 80},
-    56: {20, 40, 80},
-    60: {20, 40, 80},
-    64: {20, 40, 80},
-    100: {20, 40, 80},
-    104: {20, 40, 80},
-    108: {20, 40, 80},
-    112: {20, 40, 80},
-    116: {20, 40, 80},
-    120: {20, 40, 80},
-    124: {20, 40, 80},
-    128: {20, 40, 80},
-    132: {20, 40, 80},
-    136: {20, 40, 80},
-    140: {20, 40, 80},
-    144: {20, 40, 80},
-    149: {20, 40, 80},
-    153: {20, 40, 80},
-    157: {20, 40, 80},
-    161: {20, 40, 80},
-    165: {20}
-}
-
-ALL_CHANNELS = {**ALL_CHANNELS_2G, **ALL_CHANNELS_5G}
-
-
-@unique
-class WnmFeature(Enum):
-    """Wireless Network Management (AKA 802.11v) features hostapd supports."""
-    TIME_ADVERTISEMENT = auto()
-    WNM_SLEEP_MODE = auto()
-    BSS_TRANSITION_MANAGEMENT = auto()
-    PROXY_ARP = auto()
-    IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST = auto()
diff --git a/src/antlion/controllers/ap_lib/hostapd_security.py b/src/antlion/controllers/ap_lib/hostapd_security.py
deleted file mode 100644
index 372ca44..0000000
--- a/src/antlion/controllers/ap_lib/hostapd_security.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import string
-
-from antlion.controllers.ap_lib import hostapd_constants
-
-
-class Security(object):
-    """The Security class for hostapd representing some of the security
-       settings that are allowed in hostapd.  If needed more can be added.
-    """
-    def __init__(self,
-                 security_mode=None,
-                 password=None,
-                 wpa_cipher=hostapd_constants.WPA_DEFAULT_CIPHER,
-                 wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
-                 wpa_group_rekey=hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
-                 wpa_strict_rekey=hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
-                 wep_default_key=hostapd_constants.WEP_DEFAULT_KEY,
-                 radius_server_ip=None,
-                 radius_server_port=None,
-                 radius_server_secret=None):
-        """Gather all of the security settings for WPA-PSK.  This could be
-           expanded later.
-
-        Args:
-            security_mode: Type of security modes.
-                        Options: wep, wpa, wpa2, wpa/wpa2, wpa3, wpa2/wpa3,
-                        wpa/wpa2/wpa3
-            password: The PSK or passphrase for the security mode.
-            wpa_cipher: The cipher to be used for wpa.
-                        Options: TKIP, CCMP, TKIP CCMP
-                        Default: TKIP
-            wpa2_cipher: The cipher to be used for wpa2.
-                         Options: TKIP, CCMP, TKIP CCMP
-                         Default: CCMP
-            wpa_group_rekey: How often to refresh the GTK regardless of network
-                             changes.
-                             Options: An integrer in seconds, None
-                             Default: 600 seconds
-            wpa_strict_rekey: Whether to do a group key update when client
-                              leaves the network or not.
-                              Options: True, False
-                              Default: True
-            wep_default_key: The wep key number to use when transmitting.
-            radius_server_ip: Radius server IP for Enterprise auth.
-            radius_server_port: Radius server port for Enterprise auth.
-            radius_server_secret: Radius server secret for Enterprise auth.
-        """
-        self.security_mode_string = security_mode
-        self.wpa_cipher = wpa_cipher
-        self.wpa2_cipher = wpa2_cipher
-        self.wpa_group_rekey = wpa_group_rekey
-        self.wpa_strict_rekey = wpa_strict_rekey
-        self.wep_default_key = wep_default_key
-        self.radius_server_ip = radius_server_ip
-        self.radius_server_port = radius_server_port
-        self.radius_server_secret = radius_server_secret
-        self.security_mode = hostapd_constants.SECURITY_STRING_TO_SECURITY_MODE_INT.get(
-            security_mode, None)
-        if password:
-            if self.security_mode == hostapd_constants.WEP:
-                if len(password) in hostapd_constants.WEP_STR_LENGTH:
-                    self.password = '"%s"' % password
-                elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
-                        c in string.hexdigits for c in password):
-                    self.password = password
-                else:
-                    raise ValueError(
-                        'WEP key must be a hex string of %s characters' %
-                        hostapd_constants.WEP_HEX_LENGTH)
-            else:
-                if len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH or len(
-                        password) > hostapd_constants.MAX_WPA_PSK_LENGTH:
-                    raise ValueError(
-                        'Password must be a minumum of %s characters and a maximum of %s'
-                        % (hostapd_constants.MIN_WPA_PSK_LENGTH,
-                           hostapd_constants.MAX_WPA_PSK_LENGTH))
-                else:
-                    self.password = password
-
-    def generate_dict(self):
-        """Returns: an ordered dictionary of settings"""
-        settings = collections.OrderedDict()
-        if self.security_mode is not None:
-            if self.security_mode == hostapd_constants.WEP:
-                settings['wep_default_key'] = self.wep_default_key
-                settings['wep_key' + str(self.wep_default_key)] = self.password
-            elif self.security_mode == hostapd_constants.ENT:
-                settings['auth_server_addr'] = self.radius_server_ip
-                settings['auth_server_port'] = self.radius_server_port
-                settings[
-                    'auth_server_shared_secret'] = self.radius_server_secret
-                settings['wpa_key_mgmt'] = hostapd_constants.ENT_KEY_MGMT
-                settings['ieee8021x'] = hostapd_constants.IEEE8021X
-                settings['wpa'] = hostapd_constants.WPA2
-            else:
-                settings['wpa'] = self.security_mode
-                if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
-                    settings['wpa_psk'] = self.password
-                else:
-                    settings['wpa_passphrase'] = self.password
-                # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise
-                if self.security_mode == hostapd_constants.WPA1 or self.security_mode == hostapd_constants.MIXED:
-                    settings['wpa_pairwise'] = self.wpa_cipher
-                # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise
-                if self.security_mode == hostapd_constants.WPA2 or self.security_mode == hostapd_constants.MIXED:
-                    settings['rsn_pairwise'] = self.wpa2_cipher
-                # Add wpa_key_mgmt based on security mode string
-                if self.security_mode_string in hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT:
-                    settings[
-                        'wpa_key_mgmt'] = hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT[
-                            self.security_mode_string]
-                if self.wpa_group_rekey:
-                    settings['wpa_group_rekey'] = self.wpa_group_rekey
-                if self.wpa_strict_rekey:
-                    settings[
-                        'wpa_strict_rekey'] = hostapd_constants.WPA_STRICT_REKEY
-        return settings
diff --git a/src/antlion/controllers/ap_lib/hostapd_utils.py b/src/antlion/controllers/ap_lib/hostapd_utils.py
deleted file mode 100644
index 3387ed1..0000000
--- a/src/antlion/controllers/ap_lib/hostapd_utils.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-
-
-def generate_random_password(security_mode=None, length=None, hex=None):
-    """Generates a random password. Defaults to an 8 character ASCII password.
-
-    Args:
-        security_mode: optional string, security type. Used to determine if
-            length should be WEP compatible (useful for generated tests to simply
-            pass in security mode)
-        length: optional int, length of password to generate. Defaults to 8,
-            unless security_mode is WEP, then 13
-        hex: optional int, if True, generates a hex string, else ascii
-    """
-    if hex:
-        generator_func = utils.rand_hex_str
-    else:
-        generator_func = utils.rand_ascii_str
-
-    if length:
-        return generator_func(length)
-    if security_mode and security_mode.lower() == hostapd_constants.WEP_STRING:
-        return generator_func(hostapd_constants.WEP_DEFAULT_STR_LENGTH)
-    else:
-        return generator_func(hostapd_constants.MIN_WPA_PSK_LENGTH)
-
-
-def verify_interface(interface, valid_interfaces):
-    """Raises error if interface is missing or invalid
-    Args:
-        interface: string of interface name
-        valid_interfaces: list of valid interface names
-    """
-    if not interface:
-        raise ValueError('Required wlan interface is missing.')
-    if interface not in valid_interfaces:
-        raise ValueError('Invalid interface name was passed: %s' % interface)
-
-
-def verify_security_mode(security_profile, valid_security_modes):
-    """Raises error if security mode is not in list of valid security modes.
-
-    Args:
-        security_profile: a hostapd_security.Security object.
-        valid_security_modes: a list of valid security modes for a profile. Must
-            include None if open security is valid.
-    """
-    if security_profile is None:
-        if None not in valid_security_modes:
-            raise ValueError('Open security is not allowed for this profile.')
-    elif security_profile.security_mode not in valid_security_modes:
-        raise ValueError(
-            'Invalid Security Mode: %s. '
-            'Valid Security Modes for this profile: %s.' %
-            (security_profile.security_mode, valid_security_modes))
-
-
-def verify_cipher(security_profile, valid_ciphers):
-    """Raise error if cipher is not in list of valid ciphers.
-
-    Args:
-        security_profile: a hostapd_security.Security object.
-        valid_ciphers: a list of valid ciphers for a profile.
-    """
-    if security_profile is None:
-        raise ValueError('Security mode is open.')
-    elif security_profile.security_mode == hostapd_constants.WPA1:
-        if security_profile.wpa_cipher not in valid_ciphers:
-            raise ValueError('Invalid WPA Cipher: %s. '
-                             'Valid WPA Ciphers for this profile: %s' %
-                             (security_profile.wpa_cipher, valid_ciphers))
-    elif security_profile.security_mode == hostapd_constants.WPA2:
-        if security_profile.wpa2_cipher not in valid_ciphers:
-            raise ValueError('Invalid WPA2 Cipher: %s. '
-                             'Valid WPA2 Ciphers for this profile: %s' %
-                             (security_profile.wpa2_cipher, valid_ciphers))
-    else:
-        raise ValueError('Invalid Security Mode: %s' %
-                         security_profile.security_mode)
diff --git a/src/antlion/controllers/ap_lib/radio_measurement.py b/src/antlion/controllers/ap_lib/radio_measurement.py
deleted file mode 100644
index 254adc5..0000000
--- a/src/antlion/controllers/ap_lib/radio_measurement.py
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import IntEnum, unique
-
-
-@unique
-class ApReachability(IntEnum):
-    """Neighbor Report AP Reachability values.
-
-    See IEEE 802.11-2020 Figure 9-172.
-    """
-    NOT_REACHABLE = 1
-    UNKNOWN = 2
-    REACHABLE = 3
-
-
-class BssidInformationCapabilities:
-    """Representation of Neighbor Report BSSID Information Capabilities.
-
-    See IEEE 802.11-2020 Figure 9-338 and 9.4.1.4.
-    """
-
-    def __init__(self,
-                 spectrum_management: bool = False,
-                 qos: bool = False,
-                 apsd: bool = False,
-                 radio_measurement: bool = False):
-        """Create a capabilities object.
-
-        Args:
-            spectrum_management: whether spectrum management is required.
-            qos: whether QoS is implemented.
-            apsd: whether APSD is implemented.
-            radio_measurement: whether radio measurement is activated.
-        """
-        self._spectrum_management = spectrum_management
-        self._qos = qos
-        self._apsd = apsd
-        self._radio_measurement = radio_measurement
-
-    def __index__(self) -> int:
-        """Convert to numeric representation of the field's bits."""
-        return self.spectrum_management << 5 \
-            | self.qos << 4 \
-            | self.apsd << 3 \
-            | self.radio_measurement << 2
-
-    @property
-    def spectrum_management(self) -> bool:
-        return self._spectrum_management
-
-    @property
-    def qos(self) -> bool:
-        return self._qos
-
-    @property
-    def apsd(self) -> bool:
-        return self._apsd
-
-    @property
-    def radio_measurement(self) -> bool:
-        return self._radio_measurement
-
-
-class BssidInformation:
-    """Representation of Neighbor Report BSSID Information field.
-
-    BssidInformation contains info about a neighboring AP, to be included in a
-    neighbor report element. See IEEE 802.11-2020 Figure 9-337.
-    """
-
-    def __init__(self,
-                 ap_reachability: ApReachability = ApReachability.UNKNOWN,
-                 security: bool = False,
-                 key_scope: bool = False,
-                 capabilities:
-                 BssidInformationCapabilities = BssidInformationCapabilities(),
-                 mobility_domain: bool = False,
-                 high_throughput: bool = False,
-                 very_high_throughput: bool = False,
-                 ftm: bool = False):
-        """Create a BSSID Information object for a neighboring AP.
-
-        Args:
-            ap_reachability: whether this AP is reachable by the STA that
-                requested the neighbor report.
-            security: whether this AP is known to support the same security
-                provisioning as used by the STA in its current association.
-            key_scope: whether this AP is known to have the same
-                authenticator as the AP sending the report.
-            capabilities: selected capabilities of this AP.
-            mobility_domain: whether the AP is including an MDE in its beacon
-                frames and the contents of that MDE are identical to the MDE
-                advertised by the AP sending the report.
-            high_throughput: whether the AP is an HT AP including the HT
-                Capabilities element in its Beacons, and that the contents of
-                that HT capabilities element are identical to the HT
-                capabilities element advertised by the AP sending the report.
-            very_high_throughput: whether the AP is a VHT AP and the VHT
-                capabilities element, if included as a subelement, is
-                identical in content to the VHT capabilities element included
-                in the AP’s beacon.
-            ftm: whether the AP is known to have the Fine Timing Measurement
-                Responder extended capability.
-        """
-        self._ap_reachability = ap_reachability
-        self._security = security
-        self._key_scope = key_scope
-        self._capabilities = capabilities
-        self._mobility_domain = mobility_domain
-        self._high_throughput = high_throughput
-        self._very_high_throughput = very_high_throughput
-        self._ftm = ftm
-
-    def __index__(self) -> int:
-        """Convert to numeric representation of the field's bits."""
-        return self._ap_reachability << 30 \
-            | self.security << 29 \
-            | self.key_scope << 28 \
-            | int(self.capabilities) << 22 \
-            | self.mobility_domain << 21 \
-            | self.high_throughput << 20 \
-            | self.very_high_throughput << 19 \
-            | self.ftm << 18
-
-    @property
-    def security(self) -> bool:
-        return self._security
-
-    @property
-    def key_scope(self) -> bool:
-        return self._key_scope
-
-    @property
-    def capabilities(self) -> BssidInformationCapabilities:
-        return self._capabilities
-
-    @property
-    def mobility_domain(self) -> bool:
-        return self._mobility_domain
-
-    @property
-    def high_throughput(self) -> bool:
-        return self._high_throughput
-
-    @property
-    def very_high_throughput(self) -> bool:
-        return self._very_high_throughput
-
-    @property
-    def ftm(self) -> bool:
-        return self._ftm
-
-
-@unique
-class PhyType(IntEnum):
-    """PHY type values, see dot11PhyType in 802.11-2020 Annex C."""
-    DSSS = 2
-    OFDM = 4
-    HRDSS = 5
-    ERP = 6
-    HT = 7
-    DMG = 8
-    VHT = 9
-    TVHT = 10
-    S1G = 11
-    CDMG = 12
-    CMMG = 13
-
-
-class NeighborReportElement:
-    """Representation of Neighbor Report element.
-
-    See IEEE 802.11-2020 9.4.2.36.
-    """
-
-    def __init__(self, bssid: str, bssid_information: BssidInformation,
-                 operating_class: int, channel_number: int, phy_type: PhyType):
-        """Create a neighbor report element.
-
-        Args:
-            bssid: MAC address of the neighbor.
-            bssid_information: BSSID Information of the neigbor.
-            operating_class: operating class of the neighbor.
-            channel_number: channel number of the neighbor.
-            phy_type: dot11PhyType of the neighbor.
-        """
-        self._bssid = bssid
-        self._bssid_information = bssid_information
-
-        # Operating Class, IEEE 802.11-2020 Annex E.
-        self._operating_class = operating_class
-
-        self._channel_number = channel_number
-
-        # PHY Type, IEEE 802.11-2020 Annex C.
-        self._phy_type = phy_type
-
-    @property
-    def bssid(self) -> str:
-        return self._bssid
-
-    @property
-    def bssid_information(self) -> BssidInformation:
-        return self._bssid_information
-
-    @property
-    def operating_class(self) -> int:
-        return self._operating_class
-
-    @property
-    def channel_number(self) -> int:
-        return self._channel_number
-
-    @property
-    def phy_type(self) -> PhyType:
-        return self._phy_type
diff --git a/src/antlion/controllers/ap_lib/radvd.py b/src/antlion/controllers/ap_lib/radvd.py
deleted file mode 100644
index 9761c44..0000000
--- a/src/antlion/controllers/ap_lib/radvd.py
+++ /dev/null
@@ -1,203 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import shlex
-import tempfile
-import time
-
-from antlion.controllers.utils_lib.commands import shell
-from antlion.libs.proc import job
-
-
-class Error(Exception):
-    """An error caused by radvd."""
-
-
-class Radvd(object):
-    """Manages the radvd program.
-
-    https://en.wikipedia.org/wiki/Radvd
-    This implements the Router Advertisement Daemon of IPv6 router addresses
-    and IPv6 routing prefixes using the Neighbor Discovery Protocol.
-
-    Attributes:
-        config: The radvd configuration that is being used.
-    """
-    def __init__(self, runner, interface, working_dir=None, radvd_binary=None):
-        """
-        Args:
-            runner: Object that has run_async and run methods for executing
-                    shell commands (e.g. connection.SshConnection)
-            interface: string, The name of the interface to use (eg. wlan0).
-            working_dir: The directory to work out of.
-            radvd_binary: Location of the radvd binary
-        """
-        if not radvd_binary:
-            logging.debug('No radvd binary specified.  '
-                          'Assuming radvd is in the path.')
-            radvd_binary = 'radvd'
-        else:
-            logging.debug('Using radvd binary located at %s' % radvd_binary)
-        if working_dir is None and runner == job.run:
-            working_dir = tempfile.gettempdir()
-        else:
-            working_dir = '/tmp'
-        self._radvd_binary = radvd_binary
-        self._runner = runner
-        self._interface = interface
-        self._working_dir = working_dir
-        self.config = None
-        self._shell = shell.ShellCommand(runner, working_dir)
-        self._log_file = '%s/radvd-%s.log' % (working_dir, self._interface)
-        self._config_file = '%s/radvd-%s.conf' % (working_dir, self._interface)
-        self._pid_file = '%s/radvd-%s.pid' % (working_dir, self._interface)
-        self._ps_identifier = '%s.*%s' % (self._radvd_binary,
-                                          self._config_file)
-
-    def start(self, config, timeout=60):
-        """Starts radvd
-
-        Starts the radvd daemon and runs it in the background.
-
-        Args:
-            config: Configs to start the radvd with.
-            timeout: Time to wait for radvd  to come up.
-
-        Returns:
-            True if the daemon could be started. Note that the daemon can still
-            start and not work. Invalid configurations can take a long amount
-            of time to be produced, and because the daemon runs indefinitely
-            it's impossible to wait on. If you need to check if configs are ok
-            then periodic checks to is_running and logs should be used.
-        """
-        if self.is_alive():
-            self.stop()
-
-        self.config = config
-
-        self._shell.delete_file(self._log_file)
-        self._shell.delete_file(self._config_file)
-        self._write_configs(self.config)
-
-        radvd_command = '%s -C %s -p %s -m logfile -d 5 -l %s' % (
-            self._radvd_binary, shlex.quote(self._config_file),
-            shlex.quote(self._pid_file), self._log_file)
-        job_str = '%s > "%s" 2>&1' % (radvd_command, self._log_file)
-        self._runner.run_async(job_str)
-
-        try:
-            self._wait_for_process(timeout=timeout)
-        except Error:
-            self.stop()
-            raise
-
-    def stop(self):
-        """Kills the daemon if it is running."""
-        self._shell.kill(self._ps_identifier)
-
-    def is_alive(self):
-        """
-        Returns:
-            True if the daemon is running.
-        """
-        return self._shell.is_alive(self._ps_identifier)
-
-    def pull_logs(self):
-        """Pulls the log files from where radvd is running.
-
-        Returns:
-            A string of the radvd logs.
-        """
-        # TODO: Auto pulling of logs when stop is called.
-        return self._shell.read_file(self._log_file)
-
-    def _wait_for_process(self, timeout=60):
-        """Waits for the process to come up.
-
-        Waits until the radvd process is found running, or there is
-        a timeout. If the program never comes up then the log file
-        will be scanned for errors.
-
-        Raises: See _scan_for_errors
-        """
-        start_time = time.time()
-        while time.time() - start_time < timeout and not self.is_alive():
-            time.sleep(0.1)
-            self._scan_for_errors(False)
-        self._scan_for_errors(True)
-
-    def _scan_for_errors(self, should_be_up):
-        """Scans the radvd log for any errors.
-
-        Args:
-            should_be_up: If true then radvd program is expected to be alive.
-                          If it is found not alive while this is true an error
-                          is thrown.
-
-        Raises:
-            Error: Raised when a radvd error is found.
-        """
-        # Store this so that all other errors have priority.
-        is_dead = not self.is_alive()
-
-        exited_prematurely = self._shell.search_file('Exiting', self._log_file)
-        if exited_prematurely:
-            raise Error('Radvd exited prematurely.', self)
-        if should_be_up and is_dead:
-            raise Error('Radvd failed to start', self)
-
-    def _write_configs(self, config):
-        """Writes the configs to the radvd config file.
-
-        Args:
-            config: a RadvdConfig object.
-        """
-        self._shell.delete_file(self._config_file)
-        conf = config.package_configs()
-        lines = ['interface %s {' % self._interface]
-        for (interface_option_key,
-             interface_option) in conf['interface_options'].items():
-            lines.append('\t%s %s;' %
-                         (str(interface_option_key), str(interface_option)))
-        lines.append('\tprefix %s' % conf['prefix'])
-        lines.append('\t{')
-        for prefix_option in conf['prefix_options'].items():
-            lines.append('\t\t%s;' % ' '.join(map(str, prefix_option)))
-        lines.append('\t};')
-        if conf['clients']:
-            lines.append('\tclients')
-            lines.append('\t{')
-            for client in conf['clients']:
-                lines.append('\t\t%s;' % client)
-            lines.append('\t};')
-        if conf['route']:
-            lines.append('\troute %s {' % conf['route'])
-            for route_option in conf['route_options'].items():
-                lines.append('\t\t%s;' % ' '.join(map(str, route_option)))
-            lines.append('\t};')
-        if conf['rdnss']:
-            lines.append('\tRDNSS %s {' %
-                         ' '.join([str(elem) for elem in conf['rdnss']]))
-            for rdnss_option in conf['rdnss_options'].items():
-                lines.append('\t\t%s;' % ' '.join(map(str, rdnss_option)))
-            lines.append('\t};')
-        lines.append('};')
-        output_config = '\n'.join(lines)
-        logging.info('Writing %s' % self._config_file)
-        logging.debug('******************Start*******************')
-        logging.debug('\n%s' % output_config)
-        logging.debug('*******************End********************')
-
-        self._shell.write_file(self._config_file, output_config)
diff --git a/src/antlion/controllers/ap_lib/radvd_config.py b/src/antlion/controllers/ap_lib/radvd_config.py
deleted file mode 100644
index f8e583e..0000000
--- a/src/antlion/controllers/ap_lib/radvd_config.py
+++ /dev/null
@@ -1,290 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.ap_lib import radvd_constants
-
-import collections
-
-
-class RadvdConfig(object):
-    """The root settings for the router advertisement daemon.
-
-    All the settings for a router advertisement daemon.
-    """
-
-    def __init__(self,
-                 prefix=radvd_constants.DEFAULT_PREFIX,
-                 clients=[],
-                 route=None,
-                 rdnss=[],
-                 ignore_if_missing=None,
-                 adv_send_advert=radvd_constants.ADV_SEND_ADVERT_ON,
-                 unicast_only=None,
-                 max_rtr_adv_interval=None,
-                 min_rtr_adv_interval=None,
-                 min_delay_between_ras=None,
-                 adv_managed_flag=None,
-                 adv_other_config_flag=None,
-                 adv_link_mtu=None,
-                 adv_reachable_time=None,
-                 adv_retrans_timer=None,
-                 adv_cur_hop_limit=None,
-                 adv_default_lifetime=None,
-                 adv_default_preference=None,
-                 adv_source_ll_address=None,
-                 adv_home_agent_flag=None,
-                 adv_home_agent_info=None,
-                 home_agent_lifetime=None,
-                 home_agent_preference=None,
-                 adv_mob_rtr_support_flag=None,
-                 adv_interval_opt=None,
-                 adv_on_link=radvd_constants.ADV_ON_LINK_ON,
-                 adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON,
-                 adv_router_addr=None,
-                 adv_valid_lifetime=None,
-                 adv_preferred_lifetime=None,
-                 base_6to4_interface=None,
-                 adv_route_lifetime=None,
-                 adv_route_preference=None,
-                 adv_rdnss_preference=None,
-                 adv_rdnss_open=None,
-                 adv_rdnss_lifetime=None):
-        """Construct a RadvdConfig.
-
-        Args:
-            prefix: IPv6 prefix and length, ie fd::/64
-            clients: A list of IPv6 link local addresses that will be the only
-                clients served.  All other IPv6 addresses will be ignored if
-                this list is present.
-            route: A route for the router advertisement with prefix.
-            rdnss: A list of recursive DNS servers
-            ignore_if_missing: A flag indicating whether or not the interface
-                is ignored if it does not exist at start-up. By default,
-                radvd exits.
-            adv_send_advert: A flag indicating whether or not the router sends
-                periodic router advertisements and responds to router
-                solicitations.
-            unicast_only: Indicates that the interface link type only supports
-                unicast.
-            max_rtr_adv_interval:The maximum time allowed between sending
-                unsolicited multicast router advertisements from the interface,
-                in seconds. Must be no less than 4 seconds and no greater than
-                1800 seconds.
-            min_rtr_adv_interval: The minimum time allowed between sending
-                unsolicited multicast router advertisements from the interface,
-                in seconds. Must be no less than 3 seconds and no greater than
-                0.75 * max_rtr_adv_interval.
-            min_delay_between_ras: The minimum time allowed between sending
-                multicast router advertisements from the interface, in seconds.,
-            adv_managed_flag: When set, hosts use the administered (stateful)
-                protocol for address autoconfiguration in addition to any
-                addresses autoconfigured using stateless address
-                autoconfiguration. The use of this flag is described in
-                RFC 4862.
-            adv_other_config_flag: When set, hosts use the administered
-                (stateful) protocol for autoconfiguration of other (non-address)
-                information. The use of this flag is described in RFC 4862.
-            adv_link_mtu: The MTU option is used in router advertisement
-                messages to insure that all nodes on a link use the same MTU
-                value in those cases where the link MTU is not well known.
-            adv_reachable_time: The time, in milliseconds, that a node assumes
-                a neighbor is reachable after having received a reachability
-                confirmation. Used by the Neighbor Unreachability Detection
-                algorithm (see Section 7.3 of RFC 4861). A value of zero means
-                unspecified (by this router).
-            adv_retrans_timer: The time, in milliseconds, between retransmitted
-                Neighbor Solicitation messages. Used by address resolution and
-                the Neighbor Unreachability Detection algorithm (see Sections
-                7.2 and 7.3 of RFC 4861). A value of zero means unspecified
-                (by this router).
-            adv_cur_hop_limit: The default value that should be placed in the
-                Hop Count field of the IP header for outgoing (unicast) IP
-                packets. The value should be set to the current diameter of the
-                Internet. The value zero means unspecified (by this router).
-            adv_default_lifetime: The lifetime associated with the default
-                router in units of seconds. The maximum value corresponds to
-                18.2 hours. A lifetime of 0 indicates that the router is not a
-                default router and should not appear on the default router list.
-                The router lifetime applies only to the router's usefulness as
-                a default router; it does not apply to information contained in
-                other message fields or options. Options that need time limits
-                for their information include their own lifetime fields.
-            adv_default_preference: The preference associated with the default
-                router, as either "low", "medium", or "high".
-            adv_source_ll_address: When set, the link-layer address of the
-                outgoing interface is included in the RA.
-            adv_home_agent_flag: When set, indicates that sending router is able
-                to serve as Mobile IPv6 Home Agent. When set, minimum limits
-                specified by Mobile IPv6 are used for MinRtrAdvInterval and
-                MaxRtrAdvInterval.
-            adv_home_agent_info: When set, Home Agent Information Option
-                (specified by Mobile IPv6) is included in Router Advertisements.
-                adv_home_agent_flag must also be set when using this option.
-            home_agent_lifetime: The length of time in seconds (relative to the
-                time the packet is sent) that the router is offering Mobile IPv6
-                 Home Agent services. A value 0 must not be used. The maximum
-                 lifetime is 65520 seconds (18.2 hours). This option is ignored,
-                 if adv_home_agent_info is not set.
-            home_agent_preference: The preference for the Home Agent sending
-                this Router Advertisement. Values greater than 0 indicate more
-                preferable Home Agent, values less than 0 indicate less
-                preferable Home Agent. This option is ignored, if
-                adv_home_agent_info is not set.
-            adv_mob_rtr_support_flag: When set, the Home Agent signals it
-                supports Mobile Router registrations (specified by NEMO Basic).
-                adv_home_agent_info must also be set when using this option.
-            adv_interval_opt: When set, Advertisement Interval Option
-                (specified by Mobile IPv6) is included in Router Advertisements.
-                When set, minimum limits specified by Mobile IPv6 are used for
-                MinRtrAdvInterval and MaxRtrAdvInterval.
-            adv_on_linkWhen set, indicates that this prefix can be used for
-                on-link determination. When not set the advertisement makes no
-                statement about on-link or off-link properties of the prefix.
-                For instance, the prefix might be used for address configuration
-                 with some of the addresses belonging to the prefix being
-                 on-link and others being off-link.
-            adv_autonomous: When set, indicates that this prefix can be used for
-                autonomous address configuration as specified in RFC 4862.
-            adv_router_addr: When set, indicates that the address of interface
-                is sent instead of network prefix, as is required by Mobile
-                IPv6. When set, minimum limits specified by Mobile IPv6 are used
-                for MinRtrAdvInterval and MaxRtrAdvInterval.
-            adv_valid_lifetime: The length of time in seconds (relative to the
-                time the packet is sent) that the prefix is valid for the
-                purpose of on-link determination. The symbolic value infinity
-                represents infinity (i.e. a value of all one bits (0xffffffff)).
-                 The valid lifetime is also used by RFC 4862.
-            adv_preferred_lifetimeThe length of time in seconds (relative to the
-                time the packet is sent) that addresses generated from the
-                prefix via stateless address autoconfiguration remain preferred.
-                The symbolic value infinity represents infinity (i.e. a value of
-                all one bits (0xffffffff)). See RFC 4862.
-            base_6to4_interface: If this option is specified, this prefix will
-                be combined with the IPv4 address of interface name to produce
-                a valid 6to4 prefix. The first 16 bits of this prefix will be
-                replaced by 2002 and the next 32 bits of this prefix will be
-                replaced by the IPv4 address assigned to interface name at
-                configuration time. The remaining 80 bits of the prefix
-                (including the SLA ID) will be advertised as specified in the
-                configuration file.
-            adv_route_lifetime: The lifetime associated with the route in units
-                of seconds. The symbolic value infinity represents infinity
-                (i.e. a value of all one bits (0xffffffff)).
-            adv_route_preference: The preference associated with the default
-                router, as either "low", "medium", or "high".
-            adv_rdnss_preference: The preference of the DNS server, compared to
-                other DNS servers advertised and used. 0 to 7 means less
-                important than manually configured nameservers in resolv.conf,
-                while 12 to 15 means more important.
-            adv_rdnss_open: "Service Open" flag. When set, indicates that RDNSS
-                continues to be available to hosts even if they moved to a
-                different subnet.
-            adv_rdnss_lifetime: The maximum duration how long the RDNSS entries
-                are used for name resolution. A value of 0 means the nameserver
-                should no longer be used. The maximum duration how long the
-                RDNSS entries are used for name resolution. A value of 0 means
-                the nameserver should no longer be used. The value, if not 0,
-                must be at least max_rtr_adv_interval. To ensure stale RDNSS
-                info gets removed in a timely fashion, this should not be
-                greater than 2*max_rtr_adv_interval.
-        """
-        self._prefix = prefix
-        self._clients = clients
-        self._route = route
-        self._rdnss = rdnss
-        self._ignore_if_missing = ignore_if_missing
-        self._adv_send_advert = adv_send_advert
-        self._unicast_only = unicast_only
-        self._max_rtr_adv_interval = max_rtr_adv_interval
-        self._min_rtr_adv_interval = min_rtr_adv_interval
-        self._min_delay_between_ras = min_delay_between_ras
-        self._adv_managed_flag = adv_managed_flag
-        self._adv_other_config_flag = adv_other_config_flag
-        self._adv_link_mtu = adv_link_mtu
-        self._adv_reachable_time = adv_reachable_time
-        self._adv_retrans_timer = adv_retrans_timer
-        self._adv_cur_hop_limit = adv_cur_hop_limit
-        self._adv_default_lifetime = adv_default_lifetime
-        self._adv_default_preference = adv_default_preference
-        self._adv_source_ll_address = adv_source_ll_address
-        self._adv_home_agent_flag = adv_home_agent_flag
-        self._adv_home_agent_info = adv_home_agent_info
-        self._home_agent_lifetime = home_agent_lifetime
-        self._home_agent_preference = home_agent_preference
-        self._adv_mob_rtr_support_flag = adv_mob_rtr_support_flag
-        self._adv_interval_opt = adv_interval_opt
-        self._adv_on_link = adv_on_link
-        self._adv_autonomous = adv_autonomous
-        self._adv_router_addr = adv_router_addr
-        self._adv_valid_lifetime = adv_valid_lifetime
-        self._adv_preferred_lifetime = adv_preferred_lifetime
-        self._base_6to4_interface = base_6to4_interface
-        self._adv_route_lifetime = adv_route_lifetime
-        self._adv_route_preference = adv_route_preference
-        self._adv_rdnss_preference = adv_rdnss_preference
-        self._adv_rdnss_open = adv_rdnss_open
-        self._adv_rdnss_lifetime = adv_rdnss_lifetime
-
-    def package_configs(self):
-        conf = dict()
-        conf['prefix'] = self._prefix
-        conf['clients'] = self._clients
-        conf['route'] = self._route
-        conf['rdnss'] = self._rdnss
-
-        conf['interface_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('IgnoreIfMissing', self._ignore_if_missing),
-                    ('AdvSendAdvert', self._adv_send_advert),
-                    ('UnicastOnly', self._unicast_only),
-                    ('MaxRtrAdvInterval', self._max_rtr_adv_interval),
-                    ('MinRtrAdvInterval', self._min_rtr_adv_interval),
-                    ('MinDelayBetweenRAs', self._min_delay_between_ras),
-                    ('AdvManagedFlag', self._adv_managed_flag),
-                    ('AdvOtherConfigFlag', self._adv_other_config_flag),
-                    ('AdvLinkMTU', self._adv_link_mtu),
-                    ('AdvReachableTime', self._adv_reachable_time),
-                    ('AdvRetransTimer', self._adv_retrans_timer),
-                    ('AdvCurHopLimit', self._adv_cur_hop_limit),
-                    ('AdvDefaultLifetime', self._adv_default_lifetime),
-                    ('AdvDefaultPreference', self._adv_default_preference),
-                    ('AdvSourceLLAddress', self._adv_source_ll_address),
-                    ('AdvHomeAgentFlag', self._adv_home_agent_flag),
-                    ('AdvHomeAgentInfo', self._adv_home_agent_info),
-                    ('HomeAgentLifetime', self._home_agent_lifetime),
-                    ('HomeAgentPreference', self._home_agent_preference),
-                    ('AdvMobRtrSupportFlag', self._adv_mob_rtr_support_flag),
-                    ('AdvIntervalOpt', self._adv_interval_opt))))
-
-        conf['prefix_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvOnLink', self._adv_on_link),
-                    ('AdvAutonomous', self._adv_autonomous),
-                    ('AdvRouterAddr', self._adv_router_addr),
-                    ('AdvValidLifetime', self._adv_valid_lifetime),
-                    ('AdvPreferredLifetime', self._adv_preferred_lifetime),
-                    ('Base6to4Interface', self._base_6to4_interface))))
-
-        conf['route_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvRouteLifetime', self._adv_route_lifetime),
-                    ('AdvRoutePreference', self._adv_route_preference))))
-
-        conf['rdnss_options'] = collections.OrderedDict(
-            filter(lambda pair: pair[1] is not None,
-                   (('AdvRDNSSPreference', self._adv_rdnss_preference),
-                    ('AdvRDNSSOpen', self._adv_rdnss_open),
-                    ('AdvRDNSSLifetime', self._adv_rdnss_lifetime))))
-
-        return conf
diff --git a/src/antlion/controllers/ap_lib/radvd_constants.py b/src/antlion/controllers/ap_lib/radvd_constants.py
deleted file mode 100644
index 172a660..0000000
--- a/src/antlion/controllers/ap_lib/radvd_constants.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-DEFAULT_PREFIX = 'fd00::/64'
-
-IGNORE_IF_MISSING_ON = 'on'
-IGNORE_IF_MISSING_OFF = 'off'
-
-ADV_SEND_ADVERT_ON = 'on'
-ADV_SEND_ADVERT_OFF = 'off'
-
-UNICAST_ONLY_ON = 'on'
-UNICAST_ONLY_OFF = 'off'
-
-ADV_MANAGED_FLAG_ON = 'on'
-ADV_MANAGED_FLAG_OFF = 'off'
-
-ADV_OTHER_CONFIG_FLAG_ON = 'on'
-ADV_OTHER_CONFIG_FLAG_OFF = 'off'
-
-ADV_DEFAULT_PREFERENCE_ON = 'on'
-ADV_DEFAULT_PREFERENCE_OFF = 'off'
-
-ADV_SOURCE_LL_ADDRESS_ON = 'on'
-ADV_SOURCE_LL_ADDRESS_OFF = 'off'
-
-ADV_HOME_AGENT_FLAG_ON = 'on'
-ADV_HOME_AGENT_FLAG_OFF = 'off'
-
-ADV_HOME_AGENT_INFO_ON = 'on'
-ADV_HOME_AGENT_INFO_OFF = 'off'
-
-ADV_MOB_RTR_SUPPORT_FLAG_ON = 'on'
-ADV_MOB_RTR_SUPPORT_FLAG_OFF = 'off'
-
-ADV_INTERVAL_OPT_ON = 'on'
-ADV_INTERVAL_OPT_OFF = 'off'
-
-ADV_ON_LINK_ON = 'on'
-ADV_ON_LINK_OFF = 'off'
-
-ADV_AUTONOMOUS_ON = 'on'
-ADV_AUTONOMOUS_OFF = 'off'
-
-ADV_ROUTER_ADDR_ON = 'on'
-ADV_ROUTER_ADDR_OFF = 'off'
-
-ADV_ROUTE_PREFERENCE_LOW = 'low'
-ADV_ROUTE_PREFERENCE_MED = 'medium'
-ADV_ROUTE_PREFERENCE_HIGH = 'high'
-
-ADV_RDNSS_OPEN_ON = 'on'
-ADV_RDNSS_OPEN_OFF = 'off'
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
deleted file mode 100644
index 78931e9..0000000
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
-
-
-def actiontec_pk5000(iface_wlan_2g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
-    """A simulated implementation of what a Actiontec PK5000 AP
-    Args:
-        iface_wlan_2g: The 2.4 interface of the test AP.
-        channel: What channel to use.  Only 2.4Ghz is supported for this profile
-        security: A security profile.  Must be none or WPA2 as this is what is
-            supported by the PK5000.
-        ssid: Network name
-    Returns:
-        A hostapd config
-
-    Differences from real pk5000:
-        Supported Rates IE:
-            PK5000: Supported: 1, 2, 5.5, 11
-                    Extended: 6, 9, 12, 18, 24, 36, 48, 54
-            Simulated: Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                       Extended: 24, 36, 48, 54
-    """
-    if channel > 11:
-        # Technically this should be 14 but since the PK5000 is a US only AP,
-        # 11 is the highest allowable channel.
-        raise ValueError('The Actiontec PK5000 does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    interface = iface_wlan_2g
-    short_preamble = False
-    force_wmm = False
-    beacon_interval = 100
-    dtim_period = 3
-    # Sets the basic rates and supported rates of the PK5000
-    additional_params = utils.merge_dicts(
-        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=hostapd_constants.MODE_11G,
-        force_wmm=force_wmm,
-        beacon_interval=beacon_interval,
-        dtim_period=dtim_period,
-        short_preamble=short_preamble,
-        additional_parameters=additional_params)
-
-    return config
-
-
-def actiontec_mi424wr(iface_wlan_2g=None,
-                      channel=None,
-                      security=None,
-                      ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    """A simulated implementation of an Actiontec MI424WR AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        channel: What channel to use (2.4Ghz or 5Ghz).
-        security: A security profile.
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-
-    Differences from real MI424WR:
-        HT Capabilities:
-            MI424WR:
-                HT Rx STBC: Support for 1, 2, and 3
-            Simulated:
-                HT Rx STBC: Support for 1
-        HT Information:
-            MI424WR:
-                RIFS: Premitted
-            Simulated:
-                RIFS: Prohibited
-    """
-    if channel > 11:
-        raise ValueError('The Actiontec MI424WR does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
-    ]
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_DATA_RATES,
-                              hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-    # Proprietary Atheros Communication: Adv Capability IE
-    # Proprietary Atheros Communication: Unknown IE
-    # Country Info: US Only IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd0a00037f04010000000000'
-        '0706555320010b1b'
-    }
-
-    additional_params = utils.merge_dicts(rates, vendor_elements)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=iface_wlan_2g,
-        mode=hostapd_constants.MODE_11N_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=True,
-        n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
-
-    return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
deleted file mode 100644
index 376d02c..0000000
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
+++ /dev/null
@@ -1,569 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
-
-
-def asus_rtac66u(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
-    """A simulated implementation of an Asus RTAC66U AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5Ghz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
-            supported by the RTAC66U.
-        ssid: Network name
-    Returns:
-        A hostapd config
-    Differences from real RTAC66U:
-        2.4 GHz:
-            Rates:
-                RTAC66U:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-            HT Capab:
-                Info
-                    RTAC66U: Green Field supported
-                    Simulated: Green Field not supported on Whirlwind.
-        5GHz:
-            VHT Capab:
-                RTAC66U:
-                    SU Beamformer Supported,
-                    SU Beamformee Supported,
-                    Beamformee STS Capability: 3,
-                    Number of Sounding Dimensions: 3,
-                    VHT Link Adaptation: Both
-                Simulated:
-                    Above are not supported on Whirlwind.
-            VHT Operation Info:
-                RTAC66U: Basic MCS Map (0x0000)
-                Simulated: Basic MCS Map (0xfffc)
-            VHT Tx Power Envelope:
-                RTAC66U: Local Max Tx Pwr Constraint: 1.0 dBm
-                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
-        Both:
-            HT Capab:
-                A-MPDU
-                    RTAC66U: MPDU Density 4
-                    Simulated: MPDU Density 8
-            HT Info:
-                RTAC66U: RIFS Permitted
-                Simulated: RIFS Prohibited
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    vht_channel_width = 20
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_LDPC,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
-        hostapd_constants.N_CAPABILITY_SGI20
-    ]
-    # WPS IE
-    # Broadcom IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a00011010440001021047001093689729d373c26cb1563c6c570f33'
-        'd7103c0001031049000600372a000120'
-        'dd090010180200001c0000'
-    }
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        mode = hostapd_constants.MODE_11N_MIXED
-        ac_capabilities = None
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        mode = hostapd_constants.MODE_11AC_MIXED
-        ac_capabilities = [
-            hostapd_constants.AC_CAPABILITY_RXLDPC,
-            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
-        ]
-
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=mode,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=3,
-        short_preamble=False,
-        n_capabilities=n_capabilities,
-        ac_capabilities=ac_capabilities,
-        vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
-
-    return config
-
-
-def asus_rtac86u(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
-    """A simulated implementation of an Asus RTAC86U AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5Ghz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
-            supported by the RTAC86U.
-        ssid: Network name
-    Returns:
-        A hostapd config
-    Differences from real RTAC86U:
-        2.4GHz:
-            Rates:
-                RTAC86U:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-        5GHz:
-            Country Code:
-                Simulated: Has two country code IEs, one that matches
-                the actual, and another explicit IE that was required for
-                hostapd's 802.11d to work.
-        Both:
-            RSN Capabilities (w/ WPA2):
-                RTAC86U:
-                    RSN PTKSA Replay Counter Capab: 16
-                Simulated:
-                    RSN PTKSA Replay Counter Capab: 1
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        mode = hostapd_constants.MODE_11G
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        spectrum_mgmt = False
-        # Measurement Pilot Transmission IE
-        vendor_elements = {'vendor_elements': '42020000'}
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        mode = hostapd_constants.MODE_11A
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        spectrum_mgmt = True,
-        # Country Information IE (w/ individual channel info)
-        # TPC Report Transmit Power IE
-        # Measurement Pilot Transmission IE
-        vendor_elements = {
-            'vendor_elements':
-            '074255532024011e28011e2c011e30011e34011e38011e3c011e40011e64011e'
-            '68011e6c011e70011e74011e84011e88011e8c011e95011e99011e9d011ea1011e'
-            'a5011e'
-            '23021300'
-            '42020000'
-        }
-
-    additional_params = utils.merge_dicts(rates, qbss, vendor_elements)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=mode,
-        force_wmm=False,
-        beacon_interval=100,
-        dtim_period=3,
-        short_preamble=False,
-        spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
-    return config
-
-
-def asus_rtac5300(iface_wlan_2g=None,
-                  iface_wlan_5g=None,
-                  channel=None,
-                  security=None,
-                  ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
-    """A simulated implementation of an Asus RTAC5300 AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5Ghz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
-            supported by the RTAC5300.
-        ssid: Network name
-    Returns:
-        A hostapd config
-    Differences from real RTAC5300:
-        2.4GHz:
-            Rates:
-                RTAC86U:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-        5GHz:
-            VHT Capab:
-                RTAC5300:
-                    SU Beamformer Supported,
-                    SU Beamformee Supported,
-                    Beamformee STS Capability: 4,
-                    Number of Sounding Dimensions: 4,
-                    MU Beamformer Supported,
-                    VHT Link Adaptation: Both
-                Simulated:
-                    Above are not supported on Whirlwind.
-            VHT Operation Info:
-                RTAC5300: Basic MCS Map (0x0000)
-                Simulated: Basic MCS Map (0xfffc)
-            VHT Tx Power Envelope:
-                RTAC5300: Local Max Tx Pwr Constraint: 1.0 dBm
-                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
-        Both:
-            HT Capab:
-                A-MPDU
-                    RTAC5300: MPDU Density 4
-                    Simulated: MPDU Density 8
-            HT Info:
-                RTAC5300: RIFS Permitted
-                Simulated: RIFS Prohibited
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    vht_channel_width = 20
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_LDPC,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_SGI20
-    ]
-
-    # Broadcom IE
-    vendor_elements = {'vendor_elements': 'dd090010180200009c0000'}
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        mode = hostapd_constants.MODE_11N_MIXED
-        # AsusTek IE
-        # Epigram 2.4GHz IE
-        vendor_elements['vendor_elements'] += 'dd25f832e4010101020100031411b5' \
-        '2fd437509c30b3d7f5cf5754fb125aed3b8507045aed3b85' \
-        'dd1e00904c0418bf0cb2798b0faaff0000aaff0000c0050001000000c3020002'
-        ac_capabilities = None
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        mode = hostapd_constants.MODE_11AC_MIXED
-        # Epigram 5GHz IE
-        vendor_elements['vendor_elements'] += 'dd0500904c0410'
-        ac_capabilities = [
-            hostapd_constants.AC_CAPABILITY_RXLDPC,
-            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
-        ]
-
-    additional_params = utils.merge_dicts(rates, qbss, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=mode,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=3,
-        short_preamble=False,
-        n_capabilities=n_capabilities,
-        ac_capabilities=ac_capabilities,
-        vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
-    return config
-
-
-def asus_rtn56u(iface_wlan_2g=None,
-                iface_wlan_5g=None,
-                channel=None,
-                security=None,
-                ssid=None):
-    """A simulated implementation of an Asus RTN56U AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5Ghz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
-            supported by the RTN56U.
-        ssid: Network name
-    Returns:
-        A hostapd config
-    Differences from real RTN56U:
-        2.4GHz:
-            Rates:
-                RTN56U:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-        Both:
-            Fixed Parameters:
-                RTN56U: APSD Implemented
-                Simulated: APSD Not Implemented
-            HT Capab:
-                A-MPDU
-                    RTN56U: MPDU Density 4
-                    Simulated: MPDU Density 8
-            RSN Capabilities (w/ WPA2):
-                RTN56U:
-                    RSN PTKSA Replay Counter Capab: 1
-                Simulated:
-                    RSN PTKSA Replay Counter Capab: 16
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_SGI40,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
-    ]
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        # Ralink Technology IE
-        # US Country Code IE
-        # AP Channel Report IEs (2)
-        # WPS IE
-        vendor_elements = {
-            'vendor_elements':
-            'dd07000c4307000000'
-            '0706555320010b14'
-            '33082001020304050607'
-            '33082105060708090a0b'
-            'dd270050f204104a000110104400010210470010bc329e001dd811b286011c872c'
-            'd33448103c000101'
-        }
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        # Ralink Technology IE
-        # US Country Code IE
-        vendor_elements = {
-            'vendor_elements': 'dd07000c4307000000'
-            '0706555320010b14'
-        }
-
-    additional_params = utils.merge_dicts(rates, vendor_elements, qbss,
-                                          hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=hostapd_constants.MODE_11N_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=False,
-        n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
-
-    return config
-
-
-def asus_rtn66u(iface_wlan_2g=None,
-                iface_wlan_5g=None,
-                channel=None,
-                security=None,
-                ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    """A simulated implementation of an Asus RTN66U AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5Ghz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
-            supported by the RTN66U.
-        ssid: Network name
-    Returns:
-        A hostapd config
-    Differences from real RTN66U:
-        2.4GHz:
-            Rates:
-                RTN66U:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-        Both:
-            HT Info:
-                RTN66U: RIFS Permitted
-                Simulated: RIFS Prohibited
-            HT Capab:
-                Info:
-                    RTN66U: Green Field supported
-                    Simulated: Green Field not supported on Whirlwind.
-                A-MPDU
-                    RTN66U: MPDU Density 4
-                    Simulated: MPDU Density 8
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_LDPC,
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
-    ]
-    # Broadcom IE
-    vendor_elements = {'vendor_elements': 'dd090010180200001c0000'}
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=hostapd_constants.MODE_11N_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=3,
-        short_preamble=False,
-        n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
-
-    return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
deleted file mode 100644
index 957e214..0000000
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
-
-
-def belkin_f9k1001v5(iface_wlan_2g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    """A simulated implementation of what a Belkin F9K1001v5 AP
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real F9K1001v5:
-        Rates:
-            F9K1001v5:
-                Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                Extended: 6, 9, 12, 48
-            Simulated:
-                Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                Extended: 24, 36, 48, 54
-        HT Info:
-            F9K1001v5:
-                RIFS: Permitted
-            Simulated:
-                RIFS: Prohibited
-        RSN Capabilities (w/ WPA2):
-            F9K1001v5:
-                RSN PTKSA Replay Counter Capab: 1
-            Simulated:
-                RSN PTKSA Replay Counter Capab: 16
-    """
-    if channel > 11:
-        raise ValueError('The Belkin F9k1001v5 does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_SGI40,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-    ]
-
-    rates = additional_params = utils.merge_dicts(
-        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES)
-
-    # Broadcom IE
-    # WPS IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd090010180200100c0000'
-        'dd180050f204104a00011010440001021049000600372a000120'
-    }
-
-    additional_params = utils.merge_dicts(rates, vendor_elements)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=iface_wlan_2g,
-        mode=hostapd_constants.MODE_11N_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=3,
-        short_preamble=False,
-        n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
-
-    return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
deleted file mode 100644
index 64d76f6..0000000
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
+++ /dev/null
@@ -1,312 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
-
-
-def linksys_ea4500(iface_wlan_2g=None,
-                   iface_wlan_5g=None,
-                   channel=None,
-                   security=None,
-                   ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
-    """A simulated implementation of what a Linksys EA4500 AP
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5GHz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real EA4500:
-        CF (Contention-Free) Parameter IE:
-            EA4500: has CF Parameter IE
-            Simulated: does not have CF Parameter IE
-        HT Capab:
-            Info:
-                EA4500: Green Field supported
-                Simulated: Green Field not supported on Whirlwind.
-            A-MPDU
-                RTAC66U: MPDU Density 4
-                Simulated: MPDU Density 8
-        RSN Capab (w/ WPA2):
-            EA4500:
-                RSN PTKSA Replay Counter Capab: 1
-            Simulated:
-                RSN PTKSA Replay Counter Capab: 16
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_SGI40,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-    ]
-
-    # Epigram HT Capabilities IE
-    # Epigram HT Additional Capabilities IE
-    # Marvell Semiconductor, Inc. IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd1e00904c33fc0117ffffff0000000000000000000000000000000000000000'
-        'dd1a00904c3424000000000000000000000000000000000000000000'
-        'dd06005043030000'
-    }
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        obss_interval = 180
-        n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        obss_interval = None
-
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=hostapd_constants.MODE_11N_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=True,
-        obss_interval=obss_interval,
-        n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
-
-    return config
-
-
-def linksys_ea9500(iface_wlan_2g=None,
-                   iface_wlan_5g=None,
-                   channel=None,
-                   security=None,
-                   ssid=None):
-    """A simulated implementation of what a Linksys EA9500 AP
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5GHz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real EA9500:
-        2.4GHz:
-            Rates:
-                EA9500:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-        RSN Capab (w/ WPA2):
-            EA9500:
-                RSN PTKSA Replay Counter Capab: 16
-            Simulated:
-                RSN PTKSA Replay Counter Capab: 1
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
-    # Measurement Pilot Transmission IE
-    vendor_elements = {'vendor_elements': '42020000'}
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        mode = hostapd_constants.MODE_11G
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        mode = hostapd_constants.MODE_11A
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-
-    additional_params = utils.merge_dicts(rates, qbss, vendor_elements)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=mode,
-        force_wmm=False,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=False,
-        additional_parameters=additional_params)
-    return config
-
-
-def linksys_wrt1900acv2(iface_wlan_2g=None,
-                        iface_wlan_5g=None,
-                        channel=None,
-                        security=None,
-                        ssid=None):
-    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
-    """A simulated implementation of what a Linksys WRT1900ACV2 AP
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5GHz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real WRT1900ACV2:
-        5 GHz:
-            Simulated: Has two country code IEs, one that matches
-                the actual, and another explicit IE that was required for
-                hostapd's 802.11d to work.
-        Both:
-            HT Capab:
-                A-MPDU
-                    WRT1900ACV2: MPDU Density 4
-                    Simulated: MPDU Density 8
-            VHT Capab:
-                WRT1900ACV2:
-                    SU Beamformer Supported,
-                    SU Beamformee Supported,
-                    Beamformee STS Capability: 4,
-                    Number of Sounding Dimensions: 4,
-                Simulated:
-                    Above are not supported on Whirlwind.
-            RSN Capabilities (w/ WPA2):
-                WRT1900ACV2:
-                    RSN PTKSA Replay Counter Capab: 1
-                Simulated:
-                    RSN PTKSA Replay Counter Capab: 16
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_LDPC,
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_SGI40
-    ]
-    ac_capabilities = [
-        hostapd_constants.AC_CAPABILITY_RXLDPC,
-        hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-        hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-        hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-        hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
-        hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
-    ]
-    vht_channel_width = 20
-    # Epigram, Inc. HT Capabilities IE
-    # Epigram, Inc. HT Additional Capabilities IE
-    # Marvell Semiconductor IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd1e00904c336c0017ffffff0001000000000000000000000000001fff071800'
-        'dd1a00904c3424000000000000000000000000000000000000000000'
-        'dd06005043030000'
-    }
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        obss_interval = 180
-        spectrum_mgmt = False
-        local_pwr_constraint = {}
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        obss_interval = None
-        spectrum_mgmt = True,
-        local_pwr_constraint = {'local_pwr_constraint': 3}
-        # Country Information IE (w/ individual channel info)
-        vendor_elements['vendor_elements'] += '071e5553202401112801112c011130' \
-            '01119501179901179d0117a10117a50117'
-
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED,
-                                          local_pwr_constraint)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=hostapd_constants.MODE_11AC_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=True,
-        obss_interval=obss_interval,
-        n_capabilities=n_capabilities,
-        ac_capabilities=ac_capabilities,
-        vht_channel_width=vht_channel_width,
-        spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
-    return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
deleted file mode 100644
index cf9bc93..0000000
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
+++ /dev/null
@@ -1,272 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
-
-
-def netgear_r7000(iface_wlan_2g=None,
-                  iface_wlan_5g=None,
-                  channel=None,
-                  security=None,
-                  ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
-    """A simulated implementation of what a Netgear R7000 AP
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5GHz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real R7000:
-        2.4GHz:
-            Rates:
-                R7000:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48,
-        5GHz:
-            VHT Capab:
-                R7000:
-                    SU Beamformer Supported,
-                    SU Beamformee Supported,
-                    Beamformee STS Capability: 3,
-                    Number of Sounding Dimensions: 3,
-                    VHT Link Adaptation: Both
-                Simulated:
-                    Above are not supported on Whirlwind.
-            VHT Operation Info:
-                R7000: Basic MCS Map (0x0000)
-                Simulated: Basic MCS Map (0xfffc)
-            VHT Tx Power Envelope:
-                R7000: Local Max Tx Pwr Constraint: 1.0 dBm
-                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
-        Both:
-            HT Capab:
-                A-MPDU
-                    R7000: MPDU Density 4
-                    Simulated: MPDU Density 8
-            HT Info:
-                R7000: RIFS Permitted
-                Simulated: RIFS Prohibited
-            RM Capabilities:
-                R7000:
-                    Beacon Table Measurement: Not Supported
-                    Statistic Measurement: Enabled
-                    AP Channel Report Capability: Enabled
-                Simulated:
-                    Beacon Table Measurement: Supported
-                    Statistic Measurement: Disabled
-                    AP Channel Report Capability: Disabled
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    vht_channel_width = 80
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_LDPC,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-        hostapd_constants.N_CAPABILITY_SGI20,
-    ]
-    # Netgear IE
-    # WPS IE
-    # Epigram, Inc. IE
-    # Broadcom IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd0600146c000000'
-        'dd310050f204104a00011010440001021047001066189606f1e967f9c0102048817a7'
-        '69e103c0001031049000600372a000120'
-        'dd1e00904c0408bf0cb259820feaff0000eaff0000c0050001000000c3020002'
-        'dd090010180200001c0000'
-    }
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        mode = hostapd_constants.MODE_11N_MIXED
-        obss_interval = 300
-        ac_capabilities = None
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        mode = hostapd_constants.MODE_11AC_MIXED
-        n_capabilities += [
-            hostapd_constants.N_CAPABILITY_SGI40,
-        ]
-
-        if hostapd_config.ht40_plus_allowed(channel):
-            n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
-        elif hostapd_config.ht40_minus_allowed(channel):
-            n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_MINUS)
-
-        obss_interval = None
-        ac_capabilities = [
-            hostapd_constants.AC_CAPABILITY_RXLDPC,
-            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7
-        ]
-
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, qbss,
-        hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-        hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=mode,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=2,
-        short_preamble=False,
-        obss_interval=obss_interval,
-        n_capabilities=n_capabilities,
-        ac_capabilities=ac_capabilities,
-        vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
-    return config
-
-
-def netgear_wndr3400(iface_wlan_2g=None,
-                     iface_wlan_5g=None,
-                     channel=None,
-                     security=None,
-                     ssid=None):
-    # TODO(b/143104825): Permit RIFS on 5GHz once it is supported
-    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
-    """A simulated implementation of what a Netgear WNDR3400 AP
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5GHz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real WNDR3400:
-        2.4GHz:
-            Rates:
-                WNDR3400:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48,
-        5GHz:
-            HT Info:
-                WNDR3400: RIFS Permitted
-                Simulated: RIFS Prohibited
-        Both:
-            HT Capab:
-                A-MPDU
-                    WNDR3400: MPDU Density 16
-                    Simulated: MPDU Density 8
-                Info
-                    WNDR3400: Green Field supported
-                    Simulated: Green Field not supported on Whirlwind.
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_SGI40,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-    ]
-    # WPS IE
-    # Broadcom IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a0001101044000102104700108c403eb883e7e225ab139828703ade'
-        'dc103c0001031049000600372a000120'
-        'dd090010180200f0040000'
-    }
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        obss_interval = 300
-        n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        obss_interval = None
-        n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
-
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=hostapd_constants.MODE_11N_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=2,
-        short_preamble=False,
-        obss_interval=obss_interval,
-        n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
-
-    return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
deleted file mode 100644
index b552b28..0000000
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
-
-
-def securifi_almond(iface_wlan_2g=None, channel=None, security=None,
-                    ssid=None):
-    """A simulated implementation of a Securifi Almond AP
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real Almond:
-            Rates:
-                Almond:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-            HT Capab:
-                A-MPDU
-                    Almond: MPDU Density 4
-                    Simulated: MPDU Density 8
-            RSN Capab (w/ WPA2):
-                Almond:
-                    RSN PTKSA Replay Counter Capab: 1
-                Simulated:
-                    RSN PTKSA Replay Counter Capab: 16
-    """
-    if channel > 11:
-        raise ValueError('The Securifi Almond does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_HT40_PLUS,
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_SGI40,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_DSSS_CCK_40
-    ]
-
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-                              hostapd_constants.CCK_AND_OFDM_DATA_RATES)
-
-    # Ralink Technology IE
-    # Country Information IE
-    # AP Channel Report IEs
-    vendor_elements = {
-        'vendor_elements':
-        'dd07000c4307000000'
-        '0706555320010b14'
-        '33082001020304050607'
-        '33082105060708090a0b'
-    }
-
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
-
-    additional_params = utils.merge_dicts(rates, vendor_elements, qbss)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=iface_wlan_2g,
-        mode=hostapd_constants.MODE_11N_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=True,
-        obss_interval=300,
-        n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
-
-    return config
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py b/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
deleted file mode 100644
index 8911e3e..0000000
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
+++ /dev/null
@@ -1,468 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
-
-
-def tplink_archerc5(iface_wlan_2g=None,
-                    iface_wlan_5g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
-    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
-    """A simulated implementation of an TPLink ArcherC5 AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5GHz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real ArcherC5:
-        2.4GHz:
-            Rates:
-                ArcherC5:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-            HT Capab:
-                Info:
-                    ArcherC5: Green Field supported
-                    Simulated: Green Field not supported on Whirlwind.
-        5GHz:
-            VHT Capab:
-                ArcherC5:
-                    SU Beamformer Supported,
-                    SU Beamformee Supported,
-                    Beamformee STS Capability: 3,
-                    Number of Sounding Dimensions: 3,
-                    VHT Link Adaptation: Both
-                Simulated:
-                    Above are not supported on Whirlwind.
-            VHT Operation Info:
-                ArcherC5: Basic MCS Map (0x0000)
-                Simulated: Basic MCS Map (0xfffc)
-            VHT Tx Power Envelope:
-                ArcherC5: Local Max Tx Pwr Constraint: 1.0 dBm
-                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
-        Both:
-            HT Capab:
-                A-MPDU
-                    ArcherC5: MPDU Density 4
-                    Simulated: MPDU Density 8
-            HT Info:
-                ArcherC5: RIFS Permitted
-                Simulated: RIFS Prohibited
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    vht_channel_width = 20
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
-    ]
-    # WPS IE
-    # Broadcom IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd310050f204104a000110104400010210470010d96c7efc2f8938f1efbd6e5148bfa8'
-        '12103c0001031049000600372a000120'
-        'dd090010180200001c0000'
-    }
-    qbss = {'bss_load_update_period': 50, 'chan_util_avg_period': 600}
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        short_preamble = True
-        mode = hostapd_constants.MODE_11N_MIXED
-        n_capabilities.append(hostapd_constants.N_CAPABILITY_DSSS_CCK_40)
-        ac_capabilities = None
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        short_preamble = False
-        mode = hostapd_constants.MODE_11AC_MIXED
-        n_capabilities.append(hostapd_constants.N_CAPABILITY_LDPC)
-        ac_capabilities = [
-            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-            hostapd_constants.AC_CAPABILITY_RXLDPC,
-            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-        ]
-
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, qbss,
-        hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-        hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=mode,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=short_preamble,
-        n_capabilities=n_capabilities,
-        ac_capabilities=ac_capabilities,
-        vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
-    return config
-
-
-def tplink_archerc7(iface_wlan_2g=None,
-                    iface_wlan_5g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    """A simulated implementation of an TPLink ArcherC7 AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5GHz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real ArcherC7:
-        5GHz:
-            Country Code:
-                Simulated: Has two country code IEs, one that matches
-                the actual, and another explicit IE that was required for
-                hostapd's 802.11d to work.
-        Both:
-            HT Info:
-                ArcherC7: RIFS Permitted
-                Simulated: RIFS Prohibited
-            RSN Capabilities (w/ WPA2):
-                ArcherC7:
-                    RSN PTKSA Replay Counter Capab: 1
-                Simulated:
-                    RSN PTKSA Replay Counter Capab: 16
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    vht_channel_width = 80
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_LDPC,
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
-    ]
-    # Atheros IE
-    # WPS IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd180050f204104a00011010440001021049000600372a000120'
-    }
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        short_preamble = True
-        mode = hostapd_constants.MODE_11N_MIXED
-        spectrum_mgmt = False
-        pwr_constraint = {}
-        ac_capabilities = None
-        vht_channel_width = None
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        short_preamble = False
-        mode = hostapd_constants.MODE_11AC_MIXED
-        spectrum_mgmt = True
-        # Country Information IE (w/ individual channel info)
-        vendor_elements['vendor_elements'] += (
-            '074255532024011e28011e2c011e30'
-            '011e3401173801173c01174001176401176801176c0117700117740117840117'
-            '8801178c011795011e99011e9d011ea1011ea5011e')
-        pwr_constraint = {'local_pwr_constraint': 3}
-        n_capabilities += [
-            hostapd_constants.N_CAPABILITY_SGI40,
-            hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
-        ]
-
-        if hostapd_config.ht40_plus_allowed(channel):
-            n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
-        elif hostapd_config.ht40_minus_allowed(channel):
-            n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_MINUS)
-
-        ac_capabilities = [
-            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_RXLDPC,
-            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-            hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-            hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN
-        ]
-
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED,
-                                          pwr_constraint)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=mode,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=short_preamble,
-        n_capabilities=n_capabilities,
-        ac_capabilities=ac_capabilities,
-        vht_channel_width=vht_channel_width,
-        spectrum_mgmt_required=spectrum_mgmt,
-        additional_parameters=additional_params)
-    return config
-
-
-def tplink_c1200(iface_wlan_2g=None,
-                 iface_wlan_5g=None,
-                 channel=None,
-                 security=None,
-                 ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    # TODO(b/144446076): Address non-whirlwind hardware capabilities.
-    """A simulated implementation of an TPLink C1200 AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        iface_wlan_5g: The 5GHz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real C1200:
-        2.4GHz:
-            Rates:
-                C1200:
-                    Supported: 1, 2, 5.5, 11, 18, 24, 36, 54
-                    Extended: 6, 9, 12, 48
-                Simulated:
-                    Supported: 1, 2, 5.5, 11, 6, 9, 12, 18
-                    Extended: 24, 36, 48, 54
-            HT Capab:
-                Info:
-                    C1200: Green Field supported
-                    Simulated: Green Field not supported on Whirlwind.
-        5GHz:
-            VHT Operation Info:
-                C1200: Basic MCS Map (0x0000)
-                Simulated: Basic MCS Map (0xfffc)
-            VHT Tx Power Envelope:
-                C1200: Local Max Tx Pwr Constraint: 7.0 dBm
-                Simulated: Local Max Tx Pwr Constraint: 23.0 dBm
-        Both:
-            HT Info:
-                C1200: RIFS Permitted
-                Simulated: RIFS Prohibited
-    """
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_interface(iface_wlan_5g,
-                                   hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    # Common Parameters
-    rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    vht_channel_width = 20
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1,
-        hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935
-    ]
-    # WPS IE
-    # Broadcom IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd350050f204104a000110104400010210470010000000000000000000000000000000'
-        '00103c0001031049000a00372a00012005022688'
-        'dd090010180200000c0000'
-    }
-
-    # 2.4GHz
-    if channel <= 11:
-        interface = iface_wlan_2g
-        rates.update(hostapd_constants.CCK_AND_OFDM_BASIC_RATES)
-        short_preamble = True
-        mode = hostapd_constants.MODE_11N_MIXED
-        ac_capabilities = None
-
-    # 5GHz
-    else:
-        interface = iface_wlan_5g
-        rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        short_preamble = False
-        mode = hostapd_constants.MODE_11AC_MIXED
-        n_capabilities.append(hostapd_constants.N_CAPABILITY_LDPC)
-        ac_capabilities = [
-            hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-            hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-            hostapd_constants.AC_CAPABILITY_RXLDPC,
-            hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-            hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-            hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-        ]
-
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-        hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=interface,
-        mode=mode,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=short_preamble,
-        n_capabilities=n_capabilities,
-        ac_capabilities=ac_capabilities,
-        vht_channel_width=vht_channel_width,
-        additional_parameters=additional_params)
-    return config
-
-
-def tplink_tlwr940n(iface_wlan_2g=None,
-                    channel=None,
-                    security=None,
-                    ssid=None):
-    # TODO(b/143104825): Permit RIFS once it is supported
-    """A simulated implementation of an TPLink TLWR940N AP.
-    Args:
-        iface_wlan_2g: The 2.4Ghz interface of the test AP.
-        channel: What channel to use.
-        security: A security profile (None or WPA2).
-        ssid: The network name.
-    Returns:
-        A hostapd config.
-    Differences from real TLWR940N:
-        HT Info:
-            TLWR940N: RIFS Permitted
-            Simulated: RIFS Prohibited
-        RSN Capabilities (w/ WPA2):
-            TLWR940N:
-                RSN PTKSA Replay Counter Capab: 1
-            Simulated:
-                RSN PTKSA Replay Counter Capab: 16
-    """
-    if channel > 11:
-        raise ValueError('The mock TP-Link TLWR940N does not support 5Ghz. '
-                         'Invalid channel (%s)' % channel)
-    # Verify interface and security
-    hostapd_utils.verify_interface(iface_wlan_2g,
-                                   hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security,
-                                       [None, hostapd_constants.WPA2])
-    if security:
-        hostapd_utils.verify_cipher(security,
-                                    [hostapd_constants.WPA2_DEFAULT_CIPER])
-
-    n_capabilities = [
-        hostapd_constants.N_CAPABILITY_SGI20,
-        hostapd_constants.N_CAPABILITY_TX_STBC,
-        hostapd_constants.N_CAPABILITY_RX_STBC1
-    ]
-
-    rates = utils.merge_dicts(hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-                              hostapd_constants.CCK_AND_OFDM_DATA_RATES)
-
-    # Atheros Communications, Inc. IE
-    # WPS IE
-    vendor_elements = {
-        'vendor_elements':
-        'dd0900037f01010000ff7f'
-        'dd260050f204104a0001101044000102104900140024e2600200010160000002000160'
-        '0100020001'
-    }
-
-    additional_params = utils.merge_dicts(rates, vendor_elements,
-                                          hostapd_constants.UAPSD_ENABLED)
-
-    config = hostapd_config.HostapdConfig(
-        ssid=ssid,
-        channel=channel,
-        hidden=False,
-        security=security,
-        interface=iface_wlan_2g,
-        mode=hostapd_constants.MODE_11N_MIXED,
-        force_wmm=True,
-        beacon_interval=100,
-        dtim_period=1,
-        short_preamble=True,
-        n_capabilities=n_capabilities,
-        additional_parameters=additional_params)
-
-    return config
diff --git a/src/antlion/controllers/ap_lib/wireless_network_management.py b/src/antlion/controllers/ap_lib/wireless_network_management.py
deleted file mode 100644
index ecd5b3b..0000000
--- a/src/antlion/controllers/ap_lib/wireless_network_management.py
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import List, NewType, Optional
-
-from antlion.controllers.ap_lib.radio_measurement import NeighborReportElement
-
-BssTransitionCandidateList = NewType('BssTransitionCandidateList',
-                                     List[NeighborReportElement])
-
-
-class BssTerminationDuration:
-    """Representation of BSS Termination Duration subelement.
-
-    See IEEE 802.11-2020 Figure 9-341.
-    """
-
-    def __init__(self, duration: int):
-        """Create a BSS Termination Duration subelement.
-
-        Args:
-            duration: number of minutes the BSS will be offline.
-        """
-        # Note: hostapd does not currently support setting BSS Termination TSF,
-        # which is the other value held in this subelement.
-        self._duration = duration
-
-    @property
-    def duration(self) -> int:
-        return self._duration
-
-
-class BssTransitionManagementRequest:
-    """Representation of BSS Transition Management request.
-
-    See IEEE 802.11-2020 9.6.13.9.
-    """
-
-    def __init__(
-            self,
-            preferred_candidate_list_included: bool = False,
-            abridged: bool = False,
-            disassociation_imminent: bool = False,
-            ess_disassociation_imminent: bool = False,
-            disassociation_timer: int = 0,
-            validity_interval: int = 1,
-            bss_termination_duration: Optional[BssTerminationDuration] = None,
-            session_information_url: Optional[str] = None,
-            candidate_list: Optional[BssTransitionCandidateList] = None):
-        """Create a BSS Transition Management request.
-
-        Args:
-            preferred_candidate_list_included: whether the candidate list is a
-                preferred candidate list, or (if False) a list of known
-                candidates.
-            abridged: whether a preference value of 0 is assigned to all BSSIDs
-                that do not appear in the candidate list, or (if False) AP has
-                no recommendation for/against anything not in the candidate
-                list.
-            disassociation_imminent: whether the STA is about to be
-                disassociated by the AP.
-            ess_disassociation_imminent: whether the STA will be disassociated
-                from the ESS.
-            disassociation_timer: the number of beacon transmission times
-                (TBTTs) until the AP disassociates this STA (default 0, meaning
-                AP has not determined when it will disassociate this STA).
-            validity_interval: number of TBTTs until the candidate list is no
-                longer valid (default 1).
-            bss_termination_duration: BSS Termination Duration subelement.
-            session_information_url: this URL is included if ESS disassociation
-                is immiment.
-            candidate_list: zero or more neighbor report elements.
-        """
-        # Request mode field, see IEEE 802.11-2020 Figure 9-924.
-        self._preferred_candidate_list_included = preferred_candidate_list_included
-        self._abridged = abridged
-        self._disassociation_imminent = disassociation_imminent
-        self._ess_disassociation_imminent = ess_disassociation_imminent
-
-        # Disassociation Timer, see IEEE 802.11-2020 Figure 9-925
-        self._disassociation_timer = disassociation_timer
-
-        # Validity Interval, see IEEE 802.11-2020 9.6.13.9
-        self._validity_interval = validity_interval
-
-        # BSS Termination Duration, see IEEE 802.11-2020 9.6.13.9 and Figure 9-341
-        self._bss_termination_duration = bss_termination_duration
-
-        # Session Information URL, see IEEE 802.11-2020 Figure 9-926
-        self._session_information_url = session_information_url
-
-        # BSS Transition Candidate List Entries, IEEE 802.11-2020 9.6.13.9.
-        self._candidate_list = candidate_list
-
-    @property
-    def preferred_candidate_list_included(self) -> bool:
-        return self._preferred_candidate_list_included
-
-    @property
-    def abridged(self) -> bool:
-        return self._abridged
-
-    @property
-    def disassociation_imminent(self) -> bool:
-        return self._disassociation_imminent
-
-    @property
-    def bss_termination_included(self) -> bool:
-        return self._bss_termination_duration is not None
-
-    @property
-    def ess_disassociation_imminent(self) -> bool:
-        return self._ess_disassociation_imminent
-
-    @property
-    def disassociation_timer(self) -> Optional[int]:
-        if self.disassociation_imminent:
-            return self._disassociation_timer
-        # Otherwise, field is reserved.
-        return None
-
-    @property
-    def validity_interval(self) -> int:
-        return self._validity_interval
-
-    @property
-    def bss_termination_duration(self) -> Optional[BssTerminationDuration]:
-        return self._bss_termination_duration
-
-    @property
-    def session_information_url(self) -> Optional[str]:
-        return self._session_information_url
-
-    @property
-    def candidate_list(self) -> Optional[BssTransitionCandidateList]:
-        return self._candidate_list
diff --git a/src/antlion/controllers/arduino_wifi_dongle.py b/src/antlion/controllers/arduino_wifi_dongle.py
deleted file mode 100644
index 18f57e9..0000000
--- a/src/antlion/controllers/arduino_wifi_dongle.py
+++ /dev/null
@@ -1,390 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import re
-import subprocess
-import threading
-import time
-from datetime import datetime
-
-from serial import Serial
-
-from antlion import logger
-from antlion import signals
-from antlion import utils
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'ArduinoWifiDongle'
-ACTS_CONTROLLER_REFERENCE_NAME = 'arduino_wifi_dongles'
-
-WIFI_DONGLE_EMPTY_CONFIG_MSG = 'Configuration is empty, abort!'
-WIFI_DONGLE_NOT_LIST_CONFIG_MSG = 'Configuration should be a list, abort!'
-
-DEV = '/dev/'
-IP = 'IP: '
-STATUS = 'STATUS: '
-SSID = 'SSID: '
-RSSI = 'RSSI: '
-PING = 'PING: '
-SCAN_BEGIN = 'Scan Begin'
-SCAN_END = 'Scan End'
-READ_TIMEOUT = 10
-BAUD_RATE = 9600
-TMP_DIR = 'tmp/'
-SSID_KEY = 'SSID'
-PWD_KEY = 'password'
-
-
-class ArduinoWifiDongleError(signals.ControllerError):
-    pass
-
-
-def create(configs):
-    """Creates ArduinoWifiDongle objects.
-
-    Args:
-        configs: A list of dicts or a list of serial numbers, each representing
-                 a configuration of a arduino wifi dongle.
-
-    Returns:
-        A list of Wifi dongle objects.
-    """
-    if not configs:
-        raise ArduinoWifiDongleError(WIFI_DONGLE_EMPTY_CONFIG_MSG)
-    elif not isinstance(configs, list):
-        raise ArduinoWifiDongleError(WIFI_DONGLE_NOT_LIST_CONFIG_MSG)
-    elif isinstance(configs[0], str):
-        # Configs is a list of serials.
-        return get_instances(configs)
-    else:
-        # Configs is a list of dicts.
-        return get_instances_with_configs(configs)
-
-
-def destroy(wcs):
-    for wc in wcs:
-        wc.clean_up()
-
-
-def get_instances(configs):
-    wcs = []
-    for s in configs:
-        wcs.append(ArduinoWifiDongle(s))
-    return wcs
-
-
-def get_instances_with_configs(configs):
-    wcs = []
-    for c in configs:
-        try:
-            s = c.pop('serial')
-        except KeyError:
-            raise ArduinoWifiDongleError(
-                '"serial" is missing for ArduinoWifiDongle config %s.' % c)
-        wcs.append(ArduinoWifiDongle(s))
-    return wcs
-
-
-class ArduinoWifiDongle(object):
-    """Class representing an arduino wifi dongle.
-
-    Each object of this class represents one wifi dongle in ACTS.
-
-    Attribtues:
-        serial: Short serial number of the wifi dongle in string.
-        port: The terminal port the dongle is connected to in string.
-        log: A logger adapted from root logger with added token specific to an
-             ArduinoWifiDongle instance.
-        log_file_fd: File handle of the log file.
-        set_logging: Logging for the dongle is enabled when this param is set
-        lock: Lock to acquire and release set_logging variable
-        ssid: SSID of the wifi network the dongle is connected to.
-        ip_addr: IP address on the wifi interface.
-        scan_results: Most recent scan results.
-        ping: Ping status in bool - ping to www.google.com
-    """
-
-    def __init__(self, serial):
-        """Initializes the ArduinoWifiDongle object.
-
-        Args:
-            serial: The serial number for the wifi dongle.
-        """
-        if not serial:
-            raise ArduinoWifiDongleError(
-                'The ArduinoWifiDongle serial number must not be empty.')
-        self.serial = serial
-        self.port = self._get_serial_port()
-        self.log = logger.create_tagged_trace_logger(
-            'ArduinoWifiDongle|%s' % self.serial)
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_file_path = os.path.join(
-            log_path_base, 'ArduinoWifiDongle_%s_serial_log.txt' % self.serial)
-        self.log_file_fd = open(self.log_file_path, 'a')
-
-        self.set_logging = True
-        self.lock = threading.Lock()
-        self.start_controller_log()
-
-        self.ssid = None
-        self.ip_addr = None
-        self.status = 0
-        self.scan_results = []
-        self.scanning = False
-        self.ping = False
-
-        os.makedirs(TMP_DIR, exist_ok=True)
-
-    def clean_up(self):
-        """Cleans up the controller and releases any resources it claimed."""
-        self.stop_controller_log()
-        self.log_file_fd.close()
-
-    def _get_serial_port(self):
-        """Get the serial port for a given ArduinoWifiDongle serial number.
-
-        Returns:
-            Serial port in string if the dongle is attached.
-        """
-        cmd = 'ls %s' % DEV
-        serial_ports = utils.exe_cmd(cmd).decode('utf-8', 'ignore').split('\n')
-        for port in serial_ports:
-            if 'USB' not in port:
-                continue
-            tty_port = '%s%s' % (DEV, port)
-            cmd = 'udevadm info %s' % tty_port
-            udev_output = utils.exe_cmd(cmd).decode('utf-8', 'ignore')
-            result = re.search('ID_SERIAL_SHORT=(.*)\n', udev_output)
-            if self.serial == result.group(1):
-                logging.info('Found wifi dongle %s at serial port %s' %
-                             (self.serial, tty_port))
-                return tty_port
-        raise ArduinoWifiDongleError('Wifi dongle %s is specified in config'
-                                     ' but is not attached.' % self.serial)
-
-    def write(self, arduino, file_path, network=None):
-        """Write an ino file to the arduino wifi dongle.
-
-        Args:
-            arduino: path of the arduino executable.
-            file_path: path of the ino file to flash onto the dongle.
-            network: wifi network to connect to.
-
-        Returns:
-            True: if the write is sucessful.
-            False: if not.
-        """
-        return_result = True
-        self.stop_controller_log('Flashing %s\n' % file_path)
-        cmd = arduino + file_path + ' --upload --port ' + self.port
-        if network:
-            cmd = self._update_ino_wifi_network(arduino, file_path, network)
-        self.log.info('Command is %s' % cmd)
-        proc = subprocess.Popen(cmd,
-                                stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-                                shell=True)
-        _, _ = proc.communicate()
-        return_code = proc.returncode
-        if return_code != 0:
-            self.log.error('Failed to write file %s' % return_code)
-            return_result = False
-        self.start_controller_log('Flashing complete\n')
-        return return_result
-
-    def _update_ino_wifi_network(self, arduino, file_path, network):
-        """Update wifi network in the ino file.
-
-        Args:
-            arduino: path of the arduino executable.
-            file_path: path of the ino file to flash onto the dongle
-            network: wifi network to update the ino file with
-
-        Returns:
-            cmd: arduino command to run to flash the .ino file
-        """
-        tmp_file = '%s%s' % (TMP_DIR, file_path.split('/')[-1])
-        utils.exe_cmd('cp %s %s' % (file_path, tmp_file))
-        ssid = network[SSID_KEY]
-        pwd = network[PWD_KEY]
-        sed_cmd = 'sed -i \'s/"wifi_tethering_test"/"%s"/\' %s' % (
-            ssid, tmp_file)
-        utils.exe_cmd(sed_cmd)
-        sed_cmd = 'sed -i  \'s/"password"/"%s"/\' %s' % (pwd, tmp_file)
-        utils.exe_cmd(sed_cmd)
-        cmd = "%s %s --upload --port %s" % (arduino, tmp_file, self.port)
-        return cmd
-
-    def start_controller_log(self, msg=None):
-        """Reads the serial port and writes the data to ACTS log file.
-
-        This method depends on the logging enabled in the .ino files. The logs
-        are read from the serial port and are written to the ACTS log after
-        adding a timestamp to the data.
-
-        Args:
-            msg: Optional param to write to the log file.
-        """
-        if msg:
-            curr_time = str(datetime.now())
-            self.log_file_fd.write(curr_time + ' INFO: ' + msg)
-        t = threading.Thread(target=self._start_log)
-        t.daemon = True
-        t.start()
-
-    def stop_controller_log(self, msg=None):
-        """Stop the controller log.
-
-        Args:
-            msg: Optional param to write to the log file.
-        """
-        with self.lock:
-            self.set_logging = False
-        if msg:
-            curr_time = str(datetime.now())
-            self.log_file_fd.write(curr_time + ' INFO: ' + msg)
-
-    def _start_log(self):
-        """Target method called by start_controller_log().
-
-        This method is called as a daemon thread, which continuously reads the
-        serial port. Stops when set_logging is set to False or when the test
-        ends.
-        """
-        self.set_logging = True
-        ser = Serial(self.port, BAUD_RATE)
-        while True:
-            curr_time = str(datetime.now())
-            data = ser.readline().decode('utf-8', 'ignore')
-            self._set_vars(data)
-            with self.lock:
-                if not self.set_logging:
-                    break
-            self.log_file_fd.write(curr_time + " " + data)
-
-    def _set_vars(self, data):
-        """Sets the variables by reading from the serial port.
-
-        Wifi dongle data such as wifi status, ip address, scan results
-        are read from the serial port and saved inside the class.
-
-        Args:
-            data: New line from the serial port.
-        """
-        # 'data' represents each line retrieved from the device's serial port.
-        # since we depend on the serial port logs to get the attributes of the
-        # dongle, every line has the format of {ino_file: method: param: value}.
-        # We look for the attribute in the log and retrieve its value.
-        # Ex: data = "connect_wifi: loop(): STATUS: 3" then val = "3"
-        # Similarly, we check when the scan has begun and ended and get all the
-        # scan results in between.
-        if data.count(':') != 3:
-            return
-        val = data.split(':')[-1].lstrip().rstrip()
-        if SCAN_BEGIN in data:
-            self.scan_results = []
-            self.scanning = True
-        elif SCAN_END in data:
-            self.scanning = False
-        elif self.scanning:
-            self.scan_results.append(data)
-        elif IP in data:
-            self.ip_addr = None if val == '0.0.0.0' else val
-        elif SSID in data:
-            self.ssid = val
-        elif STATUS in data:
-            self.status = int(val)
-        elif PING in data:
-            self.ping = int(val) != 0
-
-    def ip_address(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get the ip address of the wifi dongle.
-
-        Args:
-            exp_result: True if IP address is expected (wifi connected).
-            timeout: Optional param that specifies the wait time for the IP
-                     address to come up on the dongle.
-
-        Returns:
-            IP: addr in string, if wifi connected.
-                None if not connected.
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.ip_addr) or (
-                    not exp_result and not self.ip_addr):
-                break
-            time.sleep(1)
-        return self.ip_addr
-
-    def wifi_status(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get wifi status on the dongle.
-
-        Returns:
-            True: if wifi is connected.
-            False: if not connected.
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.status == 3) or (
-                    not exp_result and not self.status):
-                break
-            time.sleep(1)
-        return self.status == 3
-
-    def wifi_scan(self, exp_result=True, timeout=READ_TIMEOUT):
-        """Get the wifi scan results.
-
-        Args:
-            exp_result: True if scan results are expected.
-            timeout: Optional param that specifies the wait time for the scan
-                     results to come up on the dongle.
-
-        Returns:
-            list of dictionaries each with SSID and RSSI of the network
-            found in the scan.
-        """
-        scan_networks = []
-        d = {}
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.scan_results) or (
-                    not exp_result and not self.scan_results):
-                break
-            time.sleep(1)
-        for i in range(len(self.scan_results)):
-            if SSID in self.scan_results[i]:
-                d.clear()
-                d[SSID] = self.scan_results[i].split(':')[-1].rstrip()
-            elif RSSI in self.scan_results[i]:
-                d[RSSI] = self.scan_results[i].split(':')[-1].rstrip()
-                scan_networks.append(d)
-
-        return scan_networks
-
-    def ping_status(self, exp_result=True, timeout=READ_TIMEOUT):
-        """ Get ping status on the dongle.
-
-        Returns:
-            True: if ping is successful
-            False: if not successful
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + timeout:
-            if (exp_result and self.ping) or (not exp_result and not self.ping):
-                break
-            time.sleep(1)
-        return self.ping
diff --git a/src/antlion/controllers/asus_axe11000_ap.py b/src/antlion/controllers/asus_axe11000_ap.py
deleted file mode 100644
index d4372ac..0000000
--- a/src/antlion/controllers/asus_axe11000_ap.py
+++ /dev/null
@@ -1,763 +0,0 @@
-"""Controller for Asus AXE11000 access point."""
-
-import time
-from antlion import logger
-from selenium import webdriver
-from selenium.common.exceptions import NoSuchElementException
-from selenium.webdriver.chrome.options import Options
-from selenium.webdriver.support.ui import Select
-
-MOBLY_CONTROLLER_CONFIG_NAME = "AsusAXE11000AP"
-ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
-
-# Access point UI parameters
-USERNAME = "login_username"
-PASSWORD = "login_passwd"
-SIGN_IN_ID = "button"
-APPLY_BUTTON = "apply_btn"
-APPLY_BUTTON_ID = "applyButton"
-WIRELESS_SETTINGS = "Advanced_Wireless_Content_menu"
-GENERAL_TAB = "Advanced_Wireless_Content_tab"
-PROFESSIONAL_TAB = "Advanced_WAdvanced_Content_tab"
-HE_MODE_ID = "he_mode_field"
-WL_UNIT = "wl_unit"
-WL_11AX = "wl_11ax"
-WL_RADIO = "wl_radio"
-WL_CLOSED = "wl_closed"
-RADIO = "radio"
-BAND_2G_CHANNEL = "band0_channel"
-BAND_5G_CHANNEL = "band1_channel"
-BAND_6G_CHANNEL = "band2_channel"
-BAND_2G_AUTH = "band0_auth_mode_x"
-BAND_5G_AUTH = "band1_auth_mode_x"
-BAND_6G_AUTH = "band2_auth_mode_x"
-BAND_2G_SSID = "band0_ssid"
-BAND_5G_SSID = "band1_ssid"
-BAND_6G_SSID = "band2_ssid"
-BAND_2G_PSK = "band0_wpa_psk"
-BAND_5G_PSK = "band1_wpa_psk"
-BAND_6G_PSK = "band2_wpa_psk"
-BAND_2G_RAD_IP = "band0_radius_ipaddr"
-BAND_5G_RAD_IP = "band1_radius_ipaddr"
-BAND_2G_RAD_PORT = "band0_radius_port"
-BAND_5G_RAD_PORT = "band1_radius_port"
-BAND_2G_RAD_KEY = "band0_radius_key"
-BAND_5G_RAD_KEY = "band1_radius_key"
-SMART_CONNECT = "smartcon_enable_field"
-BROWSER_WAIT_SHORT_TIMEOUT = 6
-BROWSER_WAIT_TIMEOUT = 15
-BROWSER_WAIT_LONG_TIMEOUT = 90
-BROWSER_WAIT_VERY_LONG_TIMEOUT = 180
-
-# Access point supported modes, channels
-VALID_BANDS = ["2g", "5g", "6g"]
-WL_BAND_VALUE = {"2g": "0", "5g": "1", "6g": "2"}
-CHANNELS_2G = {
-    0: "0",
-    1: "1",
-    2: "2",
-    3: "3",
-    4: "4",
-    5: "5",
-    6: "6",
-    7: "7",
-    8: "8",
-    9: "9",
-    10: "10",
-    11: "11"
-}
-CHANNELS_5G = {
-    0: "0",
-    36: "36/160",
-    40: "40/160",
-    44: "44/160",
-    48: "48/160",
-    52: "52/160",
-    56: "56/160",
-    60: "60/160",
-    64: "64/160",
-    100: "100/160",
-    104: "104/160",
-    108: "108/160",
-    112: "112/160",
-    116: "116/160",
-    120: "120/160",
-    124: "124/160",
-    128: "128/160",
-    132: "132/80",
-    136: "136/80",
-    140: "140/80",
-    144: "144/80",
-    149: "149/80",
-    153: "153/80",
-    157: "157/80",
-    161: "161/80",
-    165: "165"
-}
-CHANNELS_6G = {
-    0: "0",
-    37: "6g37/160",
-    53: "6g53/160",
-    69: "6g69/160",
-    85: "6g85/160",
-    101: "6g101/160",
-    117: "6g117/160",
-    133: "6g133/160",
-    149: "6g149/160",
-    165: "6g165/160",
-    181: "6g181/160",
-    197: "6g197/160",
-    213: "6g213/160"
-}
-
-
-def create(configs):
-  """Creates ap controllers from a json config."""
-  return [AsusAXE11000AP(c) for c in configs]
-
-
-def destroy(aps):
-  """Destroys a list of ap controllers."""
-  for ap in aps:
-    ap.reset_to_default_ap_settings()
-    ap.driver.quit()
-
-
-class AsusAXE11000AP(object):
-  """Asus AXE11000 AccessPoint controller.
-
-  Controller class for Asus AXE11000 6GHz AP. This class provides methods to
-  configure the AP with different settings required for 11ax and 6GHz testing.
-  The controller uses chrome webdriver to communicate with the AP.
-
-  The controller object is initiated in the test class. The ACTS test runner
-  calls this controller using the 'AsusAXE11000AP' keyword in the ACTS config
-  file. The AP is reset to default settings and this is handled during the
-  test teardown.
-
-  Attributes:
-    ip: IP address to reach the AP.
-    port: Port numnber to reach the AP.
-    protocol: Protcol to reach the AP (http/https).
-    username: Username to login to the AP.
-    password: Password to login to the AP.
-    config_page: web url to login to the AP.
-    ap_settings: AP settings configured at any given point.
-    default_ap_settings: Default AP settings before running the tests.
-    driver: chrome webdriver object to update the settings.
-  """
-
-  def __init__(self, config):
-    """Initialize AP.
-
-    Creates a chrome webdriver object based on the router parameters.
-    The webdriver will login to the router and goes to the wireless settings
-    page. This object will be used to change the router settings required for
-    the test cases. Required parameters are <ip_address>, <port>, <protocol>,
-    <admin_username> and <admin_password>.
-
-    Url: <procotol>://<ip_address>:<port>/Main_Login.asp
-    Login: <admin_username>/<admin_password>
-
-    Args:
-      config: dict, dictionary of router parameters required for webdriver.
-    """
-    self.ip = config["ip_address"]
-    self.port = config["port"]
-    self.protocol = config["protocol"]
-    self.username = config["admin_username"]
-    self.password = config["admin_password"]
-    lambda_msg = lambda msg: "[AsusAXE11000AP|%s] %s" % (self.ip, msg)
-    self.log = logger.create_logger(lambda_msg)
-    self.ap_settings = {"2g": {}, "5g": {}, "6g": {},}
-    self.config_page = (
-        "{protocol}://{ip_address}:{port}/Main_Login.asp").format(
-            protocol=self.protocol, ip_address=self.ip, port=self.port)
-    self.chrome_options = Options()
-    self.chrome_options.add_argument("--headless")
-    self.chrome_options.add_argument("--no-sandbox")
-    self.driver = webdriver.Chrome(options=self.chrome_options)
-    self.driver.implicitly_wait(BROWSER_WAIT_TIMEOUT*2)
-    self.driver.get(self.config_page)
-    self.driver.find_element_by_name(USERNAME).send_keys(self.username)
-    self.driver.find_element_by_name(PASSWORD).send_keys(self.password)
-    self.driver.find_element_by_id(SIGN_IN_ID).click()
-    self._wait_for_web_element(self.driver.find_element_by_id,
-                               WIRELESS_SETTINGS)
-    self.driver.find_element_by_id(WIRELESS_SETTINGS).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-    self._update_ap_settings()
-    self.default_ap_settings = self.ap_settings.copy()
-
-  ### Helper methods ###
-
-  def _wait_for_web_element(self,
-                            find_element,
-                            element,
-                            attribute=None,
-                            value=None):
-    """Verifies click actions/selections work.
-
-    Args:
-      find_element: func(), webdriver method to call
-      element: str, web element to look for. Ex: id, class, name
-      attribute: str, attribute to get from a webelement
-      value: str, verify attribute is set to the correct value
-
-    Raises:
-      ValueError: An error occurred if expected attribute not found.
-    """
-    curr_time = time.time()
-    while time.time() < curr_time + BROWSER_WAIT_TIMEOUT*4:
-      time.sleep(2)
-      try:
-        x = find_element(element)
-        if attribute and str(value) not in x.get_attribute(attribute):
-          raise ValueError("Attribute is not set to the right value")
-        return
-      except NoSuchElementException:
-        pass
-    raise ValueError("Failed to find web element: %s" % element)
-
-  def _update_ap_settings_2g_band(self):
-    """Read settings configured on 2g band.
-
-    Parameters Updated:
-      security type: open, wpa2-psk, wpa3-sae or wpa2-ent.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if psk or sae network).
-      radius server ip: Radius server IP addr (if ent network).
-      radius server port: Radius server Port number (if ent network).
-      radius server secret: Radius server secret (if ent network).
-      channel: 2G band channel.
-    """
-    dict_2g = {}
-    dict_2g["security"] = self.driver.find_element_by_name(
-        BAND_2G_AUTH).get_attribute("value")
-    dict_2g["SSID"] = self.driver.find_element_by_name(
-        BAND_2G_SSID).get_attribute("value")
-    if dict_2g["security"] == "psk2" or dict_2g["security"] == "sae":
-      dict_2g["password"] = self.driver.find_element_by_name(
-          BAND_2G_PSK).get_attribute("value")
-    elif dict_2g["security"] == "wpa2":
-      dict_2g["radius_ip_addr"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_IP).get_attribute("value")
-      dict_2g["radius_port"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_PORT).get_attribute("value")
-      dict_2g["radius_secret"] = self.driver.find_element_by_name(
-          BAND_2G_RAD_KEY).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("2g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_2G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["2g"] = dict_2g.copy()
-    self.ap_settings["2g"]["channel"] = channel
-
-  def _update_ap_settings_5g_band(self):
-    """Read settings configured on 5g band.
-
-    Parameters Updated:
-      security type: open, wpa2-psk, wpa3-sae or wpa2-ent.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if psk or sae network).
-      radius server ip: Radius server IP addr (if ent network).
-      radius server port: Radius server Port number (if ent network).
-      radius server secret: Radius server secret (if ent network).
-      channel: 5G band channel.
-    """
-    dict_5g = {}
-    dict_5g["security"] = self.driver.find_element_by_name(
-        BAND_5G_AUTH).get_attribute("value")
-    dict_5g["SSID"] = self.driver.find_element_by_name(
-        BAND_5G_SSID).get_attribute("value")
-    if dict_5g["security"] == "psk2" or dict_5g["security"] == "sae":
-      dict_5g["password"] = self.driver.find_element_by_name(
-          BAND_5G_PSK).get_attribute("value")
-    elif dict_5g["security"] == "wpa2":
-      dict_5g["radius_ip_addr"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_IP).get_attribute("value")
-      dict_5g["radius_port"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_PORT).get_attribute("value")
-      dict_5g["radius_secret"] = self.driver.find_element_by_name(
-          BAND_5G_RAD_KEY).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("5g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_5G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["5g"] = dict_5g.copy()
-    self.ap_settings["5g"]["channel"] = channel
-
-  def _update_ap_settings_6g_band(self):
-    """Read settings configured on 6g band.
-
-    Parameters Updated:
-      security type: wpa3-owe, wpa3-sae.
-      ssid: SSID of the wifi network.
-      password: password of the wifi network (if sae network).
-      channel: 6G band channel.
-    """
-    dict_6g = {}
-    dict_6g["security"] = self.driver.find_element_by_name(
-        BAND_6G_AUTH).get_attribute("value")
-    dict_6g["SSID"] = self.driver.find_element_by_name(
-        BAND_6G_SSID).get_attribute("value")
-    if dict_6g["security"] == "sae":
-      dict_6g["password"] = self.driver.find_element_by_name(
-          BAND_6G_PSK).get_attribute("value")
-    channel_field = self._get_webdriver_elements_for_channels("6g")
-    ch_val = self.driver.find_element_by_name(channel_field).get_attribute(
-        "value")
-    channel = 0
-    for key, val in CHANNELS_6G.items():
-      if val == ch_val:
-        channel = key
-        break
-    self.ap_settings["6g"] = dict_6g.copy()
-    self.ap_settings["6g"]["channel"] = channel
-
-  def _update_ap_settings(self):
-    """Read AP settings of 2G, 5G and 6G bands.
-
-    This method reads the wifi network currently configured on any particular
-    band. The settings are updated to self.ap_settings object.
-    """
-    self.driver.refresh()
-    self._update_ap_settings_2g_band()
-    self._update_ap_settings_5g_band()
-    self._update_ap_settings_6g_band()
-
-  def _get_webdriver_elements_for_channels(self, band):
-    """Return webdriver elements for the band to configure channel.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      channel field for the specific band.
-    """
-    channel_field = BAND_2G_CHANNEL
-    if band == "5g":
-      channel_field = BAND_5G_CHANNEL
-    elif band == "6g":
-      channel_field = BAND_6G_CHANNEL
-    return channel_field
-
-  def _set_channel(self, band, channel):
-    """Configure channel on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      channel: int, Channel to set.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if (band == "2g" and channel not in CHANNELS_2G) or (
-        band == "5g" and
-        channel not in CHANNELS_5G) or (band == "6g" and
-                                        channel not in CHANNELS_6G):
-      raise ValueError("Channel %s is not supported in band %s" %
-                       (channel, band))
-    channel_field = self._get_webdriver_elements_for_channels(band)
-    channels_val_dict = CHANNELS_6G
-    if band == "2g":
-      channels_val_dict = CHANNELS_2G
-    elif band == "5g":
-      channels_val_dict = CHANNELS_5G
-    channel = channels_val_dict[channel]
-
-    # Set channel
-    if self.driver.find_element_by_name(channel_field).get_attribute(
-        "value") != channel:
-      css_selector = "select[name=%s]" % channel_field
-      Select(self.driver.find_element_by_css_selector(
-          css_selector)).select_by_value(channel)
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-  def _configure_personal_network(self, band, auth, ssid=None, password=None):
-    """Configure wpa3 sae/wpa2 psk network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      auth: str, WPA2 PSK or WPA3 SAE security.
-      ssid: str, ssid of the wifi network.
-      password: str, password of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g" and auth == "psk2":
-      raise ValueError("AP doesn't support WPA2 PSK on 6g band.")
-    (auth_field, ssid_field,
-     psk_field) = self._get_webdriver_elements_for_personal_auth(band)
-
-    # configure personal network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value(auth)
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-    if password:
-      self.driver.find_element_by_name(psk_field).clear()
-      self.driver.find_element_by_name(psk_field).send_keys(password)
-
-  def _configure_open_owe_network(self, band, auth, ssid=None):
-    """Configure wpa3 owe/open network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      auth: str, WPA2 PSK or WPA3 SAE security.
-      ssid: str, ssid of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g" and auth == "open":
-      raise ValueError("AP doesn't support open network on 6g band.")
-    if (band == "2g" or band == "5g") and auth == "owe":
-      raise ValueError("AP doesn't support OWE on 2g and 5g bands.")
-    (auth_field, ssid_field,
-     _) = self._get_webdriver_elements_for_personal_auth(band)
-
-    # Configure wifi network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value(auth)
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-
-  def _configure_wpa2_ent_network(self, band, radius_ip, radius_port,
-                                  radius_secret, ssid=None):
-    """Configure wpa2 ent network on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g.
-      radius_ip: str, radius server ip addr.
-      radius_port: str, radius server port number.
-      radius_secret: str, radius server secret.
-      ssid: str, ssid of the wifi network.
-
-    Raises:
-      ValueError: An error occurred due to invalid band or configuration.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    if band == "6g":
-      raise ValueError("6GHz doesn't support enterprise network on this AP.")
-    (auth_field, ssid_field,
-     _) = self._get_webdriver_elements_for_personal_auth(band)
-    (rad_ip_field, rad_port_field,
-     rad_key_field) = self._get_webdriver_elements_for_ent_auth(band)
-
-    # Set enterprise network
-    css_selector = "select[name=%s]" % auth_field
-    Select(self.driver.find_element_by_css_selector(
-        css_selector)).select_by_value("wpa2")
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-    if ssid:
-      self.driver.find_element_by_name(ssid_field).clear()
-      self.driver.find_element_by_name(ssid_field).send_keys(ssid)
-    self.driver.find_element_by_name(rad_ip_field).clear()
-    self.driver.find_element_by_name(rad_ip_field).send_keys(radius_ip)
-    self.driver.find_element_by_name(rad_port_field).clear()
-    self.driver.find_element_by_name(rad_port_field).send_keys(radius_port)
-    self.driver.find_element_by_name(rad_key_field).clear()
-    self.driver.find_element_by_name(rad_key_field).send_keys(radius_secret)
-
-  def _get_webdriver_elements_for_personal_auth(self, band):
-    """Return webdriver elements for the band to configure personal auth.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      tuple of auth, ssid, psk field for the band.
-    """
-    auth_field = BAND_2G_AUTH
-    ssid_field = BAND_2G_SSID
-    psk_field = BAND_2G_PSK
-    if band == "5g":
-      auth_field = BAND_5G_AUTH
-      ssid_field = BAND_5G_SSID
-      psk_field = BAND_5G_PSK
-    elif band == "6g":
-      auth_field = BAND_6G_AUTH
-      ssid_field = BAND_6G_SSID
-      psk_field = BAND_6G_PSK
-    return (auth_field, ssid_field, psk_field)
-
-  def _get_webdriver_elements_for_ent_auth(self, band):
-    """Return webdriver elements for the band to configure ent auth.
-
-    Args:
-      band: str, Wifi band to configure. Ex: 2g, 5g, 6g.
-
-    Returns:
-      tuple of radius server IP, port, secret for the band.
-    """
-    rad_ip_field = BAND_2G_RAD_IP
-    rad_port_field = BAND_2G_RAD_PORT
-    rad_key_field = BAND_2G_RAD_KEY
-    if band == "5g":
-      rad_ip_field = BAND_5G_RAD_IP
-      rad_port_field = BAND_5G_RAD_PORT
-      rad_key_field = BAND_5G_RAD_KEY
-    return (rad_ip_field, rad_port_field, rad_key_field)
-
-  ### Methods to configure AP ###
-
-  def set_channel_and_apply(self, band, channel):
-    """Set channel for specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      channel: int, Channel to set.
-    """
-    # Go back to General tab in advanced settings
-    self.driver.find_element_by_id(GENERAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-    channel_field = self._get_webdriver_elements_for_channels(band)
-    self._set_channel(band, channel)
-    self.driver.find_element_by_id(APPLY_BUTTON_ID).click()
-    time.sleep(BROWSER_WAIT_LONG_TIMEOUT)
-    self._wait_for_web_element(self.driver.find_element_by_name,
-                               channel_field, "value", channel)
-    self._update_ap_settings()
-
-  def get_configured_channel(self, band):
-    """Get the channel configured on specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: eg, 5g, 6g.
-
-    Returns:
-      Channel configured on the band.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    return self.ap_settings[band]["channel"]
-
-  def configure_ap(self, network_dict):
-    """Configure AP with settings for different bands.
-
-    Args:
-      network_dict: dict, dictionary that holds configuration for each band.
-    """
-    # Go back to General tab in advanced settings
-    self.driver.refresh()
-    self.driver.find_element_by_id(GENERAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-    # configure wireless settings
-    self.log.info("Network dictionary: %s" % network_dict)
-    for band in network_dict:
-      security = network_dict[band]["security"]
-      ssid = network_dict[band]["SSID"] if "SSID" in network_dict[
-          band] else None
-      password = network_dict[band]["password"] if "password" in network_dict[
-          band] else None
-      if security == "open" or security == "owe":
-        self._configure_open_owe_network(band, security, ssid)
-      elif security == "psk2" or security == "sae":
-        self._configure_personal_network(band, security, ssid, password)
-      elif network_dict[band]["security"] == "wpa2":
-        self._configure_wpa2_ent_network(
-            band,
-            network_dict[band]["radius_server_ip"],
-            network_dict[band]["radius_server_port"],
-            network_dict[band]["radius_server_secret"],
-            ssid)
-
-    for band in network_dict:
-      if "channel" in network_dict[band]:
-        self._set_channel(band, network_dict[band]["channel"])
-    self.driver.find_element_by_id(APPLY_BUTTON_ID).click()
-    time.sleep(BROWSER_WAIT_LONG_TIMEOUT)
-
-    # update ap settings
-    self._update_ap_settings()
-
-    # configure hidden or 11ax mode
-    for band in network_dict:
-      apply_settings = False
-      if "hidden" in network_dict[band]:
-        res = self._configure_hidden_network(band, network_dict[band]["hidden"])
-        apply_settings = apply_settings or res
-      if "11ax" in network_dict[band]:
-        res = self._configure_11ax_mode(band, network_dict[band]["11ax"])
-        apply_settings = apply_settings or res
-      if apply_settings:
-        self.driver.find_element_by_id(APPLY_BUTTON).click()
-        time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-
-  def get_wifi_network(self, band):
-    """Get wifi network configured on the AP for the specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-
-    Returns:
-      Wifi network as a dictionary.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-    wifi_network = {}
-    wifi_network["SSID"] = self.ap_settings[band]["SSID"]
-    if "password" in self.ap_settings[band]:
-      wifi_network["password"] = self.ap_settings[band]["password"]
-    security = self.ap_settings[band]["security"]
-    if security == "sae" or security == "owe":
-      wifi_network["security"] = security
-    return wifi_network
-
-  def _configure_hidden_network(self, band, val):
-    """Configure hidden network for a specific band.
-
-    Args:
-      band: str, Wifi band to configure hidden network.
-      val: str, String value to configure.
-
-    Returns:
-      True if settings applied, False if not.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-
-    # Go to Professional tab in advanced settings
-    self.driver.find_element_by_id(PROFESSIONAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, HE_MODE_ID)
-
-    # Select the requested band from the drop down menu
-    css_selector = "select[name=%s]" % WL_UNIT
-    Select(
-        self.driver.find_element_by_css_selector(css_selector)).select_by_value(
-            WL_BAND_VALUE[band])  # (TODO: gmoturu@) find if selection worked
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-    # Configure hidden network
-    state = True if val == "1" else False
-    return_result = False
-    if self.driver.find_element_by_name(WL_CLOSED).is_selected() != state:
-      css_selector = "input[name='%s'][value='%s']" % (WL_CLOSED, val)
-      self.driver.find_element_by_css_selector(css_selector).click()
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-      return_result = True
-
-    return return_result
-
-  def configure_hidden_network_and_apply(self, band, state=True):
-    """Configure hidden network for a specific band.
-
-    Args:
-      band: str, Wifi band to configure hidden network.
-      state: bool, Set the wifi network as hidden if True, False if not.
-    """
-    val = "1" if state else "0"
-    if self._configure_hidden_network(band, val):
-      self.driver.find_element_by_id(APPLY_BUTTON).click()
-      time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-      if self.driver.find_element_by_name(WL_CLOSED).is_selected() != state:
-        raise ValueError("Failed to configure hidden network on band: %s" % band)
-
-      # Go back to General tab in advanced settings
-      self.driver.find_element_by_id(GENERAL_TAB).click()
-      self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-  def _configure_11ax_mode(self, band, val):
-    """Configure 11ax mode on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      val: str, String value to configure.
-
-    Returns:
-      True if settings are applied, False if not.
-
-    Raises:
-      ValueError: An error occurred due to invalid band.
-    """
-    band = band.lower()
-    if band not in VALID_BANDS:
-      raise ValueError("Band %s is not valid" % band)
-
-    # Go to Professional tab in advanced settings
-    self.driver.find_element_by_id(PROFESSIONAL_TAB).click()
-    self._wait_for_web_element(self.driver.find_element_by_id, HE_MODE_ID)
-
-    # Select the requested band from the drop down menu
-    css_selector = "select[name=%s]" % WL_UNIT
-    Select(
-        self.driver.find_element_by_css_selector(css_selector)).select_by_value(
-            WL_BAND_VALUE[band])  # (TODO: gmoturu@) find if selection worked
-    time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-
-    # Configure 11ax
-    return_result = False
-    if self.driver.find_element_by_name(WL_11AX).get_attribute(
-        "value") != val:
-      css_selector = "select[name=%s]" % WL_11AX
-      Select(self.driver.find_element_by_css_selector(
-          css_selector)).select_by_value(val)
-      time.sleep(BROWSER_WAIT_SHORT_TIMEOUT)
-      return_result = True
-
-    return return_result
-
-  def configure_11ax_mode_and_apply(self, band, state=True):
-    """Configure 11ax mode on a specific band.
-
-    Args:
-      band: str, Wifi band to check. Ex: 2g, 5g, 6g.
-      state: bool, Enable 11ax if True, disable if False
-    """
-    val = "1" if state else "0"
-    if self._configure_11ax_mode(band, val):
-      self.driver.find_element_by_id(APPLY_BUTTON).click()
-      time.sleep(BROWSER_WAIT_VERY_LONG_TIMEOUT)
-      self._wait_for_web_element(self.driver.find_element_by_name, WL_11AX,
-                                 "value", val)
-
-      # Go back to General tab in advanced settings
-      self.driver.find_element_by_id(GENERAL_TAB).click()
-      self._wait_for_web_element(self.driver.find_element_by_id, SMART_CONNECT)
-
-  def reset_to_default_ap_settings(self):
-    """Reset AP to the default settings."""
-    if self.default_ap_settings != self.ap_settings:
-      self.configure_ap(self.default_ap_settings)
-
diff --git a/src/antlion/controllers/attenuator.py b/src/antlion/controllers/attenuator.py
deleted file mode 100644
index ff68f5b..0000000
--- a/src/antlion/controllers/attenuator.py
+++ /dev/null
@@ -1,415 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import importlib
-import logging
-
-from antlion.keys import Config
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'Attenuator'
-ACTS_CONTROLLER_REFERENCE_NAME = 'attenuators'
-_ATTENUATOR_OPEN_RETRIES = 3
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        attn_model = c['Model']
-        # Default to telnet.
-        protocol = c.get('Protocol', 'telnet')
-        module_name = 'antlion.controllers.attenuator_lib.%s.%s' % (attn_model,
-                                                                 protocol)
-        module = importlib.import_module(module_name)
-        inst_cnt = c['InstrumentCount']
-        attn_inst = module.AttenuatorInstrument(inst_cnt)
-        attn_inst.model = attn_model
-
-        ip_address = c[Config.key_address.value]
-        port = c[Config.key_port.value]
-
-        for attempt_number in range(1, _ATTENUATOR_OPEN_RETRIES + 1):
-            try:
-                attn_inst.open(ip_address, port)
-            except Exception as e:
-                logging.error('Attempt %s to open connection to attenuator '
-                              'failed: %s' % (attempt_number, e))
-                if attempt_number == _ATTENUATOR_OPEN_RETRIES:
-                    ping_output = job.run('ping %s -c 1 -w 1' % ip_address,
-                                          ignore_status=True)
-                    if ping_output.exit_status == 1:
-                        logging.error('Unable to ping attenuator at %s' %
-                                      ip_address)
-                    else:
-                        logging.error('Able to ping attenuator at %s' %
-                                      ip_address)
-                        job.run('echo "q" | telnet %s %s' % (ip_address, port),
-                                ignore_status=True)
-                    raise
-        for i in range(inst_cnt):
-            attn = Attenuator(attn_inst, idx=i)
-            if 'Paths' in c:
-                try:
-                    setattr(attn, 'path', c['Paths'][i])
-                except IndexError:
-                    logging.error('No path specified for attenuator %d.', i)
-                    raise
-            objs.append(attn)
-    return objs
-
-
-def get_info(attenuators):
-    """Get information on a list of Attenuator objects.
-
-    Args:
-        attenuators: A list of Attenuator objects.
-
-    Returns:
-        A list of dict, each representing info for Attenuator objects.
-    """
-    device_info = []
-    for attenuator in attenuators:
-        info = {
-            "Address": attenuator.instrument.address,
-            "Attenuator_Port": attenuator.idx
-        }
-        device_info.append(info)
-    return device_info
-
-
-def destroy(objs):
-    for attn in objs:
-        attn.instrument.close()
-
-
-def get_attenuators_for_device(device_attenuator_configs, attenuators,
-                               attenuator_key):
-    """Gets the list of attenuators associated to a specified device and builds
-    a list of the attenuator objects associated to the ip address in the
-    device's section of the ACTS config and the Attenuator's IP address.  In the
-    example below the access point object has an attenuator dictionary with
-    IP address associated to an attenuator object.  The address is the only
-    mandatory field and the 'attenuator_ports_wifi_2g' and
-    'attenuator_ports_wifi_5g' are the attenuator_key specified above.  These
-    can be anything and is sent in as a parameter to this function.  The numbers
-    in the list are ports that are in the attenuator object.  Below is an
-    standard Access_Point object and the link to a standard Attenuator object.
-    Notice the link is the IP address, which is why the IP address is mandatory.
-
-    "AccessPoint": [
-        {
-          "ssh_config": {
-            "user": "root",
-            "host": "192.168.42.210"
-          },
-          "Attenuator": [
-            {
-              "Address": "192.168.42.200",
-              "attenuator_ports_wifi_2g": [
-                0,
-                1,
-                3
-              ],
-              "attenuator_ports_wifi_5g": [
-                0,
-                1
-              ]
-            }
-          ]
-        }
-      ],
-      "Attenuator": [
-        {
-          "Model": "minicircuits",
-          "InstrumentCount": 4,
-          "Address": "192.168.42.200",
-          "Port": 23
-        }
-      ]
-    Args:
-        device_attenuator_configs: A list of attenuators config information in
-            the acts config that are associated a particular device.
-        attenuators: A list of all of the available attenuators objects
-            in the testbed.
-        attenuator_key: A string that is the key to search in the device's
-            configuration.
-
-    Returns:
-        A list of attenuator objects for the specified device and the key in
-        that device's config.
-    """
-    attenuator_list = []
-    for device_attenuator_config in device_attenuator_configs:
-        for attenuator_port in device_attenuator_config[attenuator_key]:
-            for attenuator in attenuators:
-                if (attenuator.instrument.address ==
-                        device_attenuator_config['Address']
-                        and attenuator.idx is attenuator_port):
-                    attenuator_list.append(attenuator)
-    return attenuator_list
-
-
-"""Classes for accessing, managing, and manipulating attenuators.
-
-Users will instantiate a specific child class, but almost all operation should
-be performed on the methods and data members defined here in the base classes
-or the wrapper classes.
-"""
-
-
-class AttenuatorError(Exception):
-    """Base class for all errors generated by Attenuator-related modules."""
-
-
-class InvalidDataError(AttenuatorError):
-    """"Raised when an unexpected result is seen on the transport layer.
-
-    When this exception is seen, closing an re-opening the link to the
-    attenuator instrument is probably necessary. Something has gone wrong in
-    the transport.
-    """
-
-
-class InvalidOperationError(AttenuatorError):
-    """Raised when the attenuator's state does not allow the given operation.
-
-    Certain methods may only be accessed when the instance upon which they are
-    invoked is in a certain state. This indicates that the object is not in the
-    correct state for a method to be called.
-    """
-
-
-class AttenuatorInstrument(object):
-    """Defines the primitive behavior of all attenuator instruments.
-
-    The AttenuatorInstrument class is designed to provide a simple low-level
-    interface for accessing any step attenuator instrument comprised of one or
-    more attenuators and a controller. All AttenuatorInstruments should override
-    all the methods below and call AttenuatorInstrument.__init__ in their
-    constructors. Outside of setup/teardown, devices should be accessed via
-    this generic "interface".
-    """
-    model = None
-    INVALID_MAX_ATTEN = 999.9
-
-    def __init__(self, num_atten=0):
-        """This is the Constructor for Attenuator Instrument.
-
-        Args:
-            num_atten: The number of attenuators contained within the
-                instrument. In some instances setting this number to zero will
-                allow the driver to auto-determine the number of attenuators;
-                however, this behavior is not guaranteed.
-
-        Raises:
-            NotImplementedError if initialization is called from this class.
-        """
-
-        if type(self) is AttenuatorInstrument:
-            raise NotImplementedError(
-                'Base class should not be instantiated directly!')
-
-        self.num_atten = num_atten
-        self.max_atten = AttenuatorInstrument.INVALID_MAX_ATTEN
-        self.properties = None
-
-    def set_atten(self, idx, value, strict=True, retry=False):
-        """Sets the attenuation given its index in the instrument.
-
-        Args:
-            idx: A zero based index used to identify a particular attenuator in
-                an instrument.
-            value: a floating point value for nominal attenuation to be set.
-            strict: if True, function raises an error when given out of
-                bounds attenuation values, if false, the function sets out of
-                bounds values to 0 or max_atten.
-            retry: if True, command will be retried if possible
-        """
-        raise NotImplementedError('Base class should not be called directly!')
-
-    def get_atten(self, idx, retry=False):
-        """Returns the current attenuation of the attenuator at index idx.
-
-        Args:
-            idx: A zero based index used to identify a particular attenuator in
-                an instrument.
-            retry: if True, command will be retried if possible
-
-        Returns:
-            The current attenuation value as a floating point value
-        """
-        raise NotImplementedError('Base class should not be called directly!')
-
-
-class Attenuator(object):
-    """An object representing a single attenuator in a remote instrument.
-
-    A user wishing to abstract the mapping of attenuators to physical
-    instruments should use this class, which provides an object that abstracts
-    the physical implementation and allows the user to think only of attenuators
-    regardless of their location.
-    """
-
-    def __init__(self, instrument, idx=0, offset=0):
-        """This is the constructor for Attenuator
-
-        Args:
-            instrument: Reference to an AttenuatorInstrument on which the
-                Attenuator resides
-            idx: This zero-based index is the identifier for a particular
-                attenuator in an instrument.
-            offset: A power offset value for the attenuator to be used when
-                performing future operations. This could be used for either
-                calibration or to allow group operations with offsets between
-                various attenuators.
-
-        Raises:
-            TypeError if an invalid AttenuatorInstrument is passed in.
-            IndexError if the index is out of range.
-        """
-        if not isinstance(instrument, AttenuatorInstrument):
-            raise TypeError('Must provide an Attenuator Instrument Ref')
-        self.model = instrument.model
-        self.instrument = instrument
-        self.idx = idx
-        self.offset = offset
-
-        if self.idx >= instrument.num_atten:
-            raise IndexError(
-                'Attenuator index out of range for attenuator instrument')
-
-    def set_atten(self, value, strict=True, retry=False):
-        """Sets the attenuation.
-
-        Args:
-            value: A floating point value for nominal attenuation to be set.
-            strict: if True, function raises an error when given out of
-                bounds attenuation values, if false, the function sets out of
-                bounds values to 0 or max_atten.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            ValueError if value + offset is greater than the maximum value.
-        """
-        if value + self.offset > self.instrument.max_atten and strict:
-            raise ValueError(
-                'Attenuator Value+Offset greater than Max Attenuation!')
-
-        self.instrument.set_atten(self.idx,
-                                  value + self.offset,
-                                  strict=strict,
-                                  retry=retry)
-
-    def get_atten(self, retry=False):
-        """Returns the attenuation as a float, normalized by the offset."""
-        return self.instrument.get_atten(self.idx, retry) - self.offset
-
-    def get_max_atten(self):
-        """Returns the max attenuation as a float, normalized by the offset."""
-        if self.instrument.max_atten == AttenuatorInstrument.INVALID_MAX_ATTEN:
-            raise ValueError('Invalid Max Attenuator Value')
-
-        return self.instrument.max_atten - self.offset
-
-
-class AttenuatorGroup(object):
-    """An abstraction for groups of attenuators that will share behavior.
-
-    Attenuator groups are intended to further facilitate abstraction of testing
-    functions from the physical objects underlying them. By adding attenuators
-    to a group, it is possible to operate on functional groups that can be
-    thought of in a common manner in the test. This class is intended to provide
-    convenience to the user and avoid re-implementation of helper functions and
-    small loops scattered throughout user code.
-    """
-
-    def __init__(self, name=''):
-        """This constructor for AttenuatorGroup
-
-        Args:
-            name: An optional parameter intended to further facilitate the
-                passing of easily tracked groups of attenuators throughout code.
-                It is left to the user to use the name in a way that meets their
-                needs.
-        """
-        self.name = name
-        self.attens = []
-        self._value = 0
-
-    def add_from_instrument(self, instrument, indices):
-        """Adds an AttenuatorInstrument to the group.
-
-        This function will create Attenuator objects for all of the indices
-        passed in and add them to the group.
-
-        Args:
-            instrument: the AttenuatorInstrument to pull attenuators from.
-                indices: The index or indices to add to the group. Either a
-                range, a list, or a single integer.
-
-        Raises
-        ------
-        TypeError
-            Requires a valid AttenuatorInstrument to be passed in.
-        """
-        if not instrument or not isinstance(instrument, AttenuatorInstrument):
-            raise TypeError('Must provide an Attenuator Instrument Ref')
-
-        if type(indices) is range or type(indices) is list:
-            for i in indices:
-                self.attens.append(Attenuator(instrument, i))
-        elif type(indices) is int:
-            self.attens.append(Attenuator(instrument, indices))
-
-    def add(self, attenuator):
-        """Adds an already constructed Attenuator object to this group.
-
-        Args:
-            attenuator: An Attenuator object.
-
-        Raises:
-            TypeError if the attenuator parameter is not an Attenuator.
-        """
-        if not isinstance(attenuator, Attenuator):
-            raise TypeError('Must provide an Attenuator')
-
-        self.attens.append(attenuator)
-
-    def synchronize(self):
-        """Sets all grouped attenuators to the group's attenuation value."""
-        self.set_atten(self._value)
-
-    def is_synchronized(self):
-        """Returns true if all attenuators have the synchronized value."""
-        for att in self.attens:
-            if att.get_atten() != self._value:
-                return False
-        return True
-
-    def set_atten(self, value):
-        """Sets the attenuation value of all attenuators in the group.
-
-        Args:
-            value: A floating point value for nominal attenuation to be set.
-        """
-        value = float(value)
-        for att in self.attens:
-            att.set_atten(value)
-        self._value = value
-
-    def get_atten(self):
-        """Returns the current attenuation setting of AttenuatorGroup."""
-        return float(self._value)
diff --git a/src/antlion/controllers/attenuator_lib/_tnhelper.py b/src/antlion/controllers/attenuator_lib/_tnhelper.py
deleted file mode 100644
index 643ccdf..0000000
--- a/src/antlion/controllers/attenuator_lib/_tnhelper.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A helper module to communicate over telnet with AttenuatorInstruments.
-
-User code shouldn't need to directly access this class.
-"""
-
-import logging
-import telnetlib
-import re
-from antlion.controllers import attenuator
-from antlion.libs.proc import job
-
-
-def _ascii_string(uc_string):
-    return str(uc_string).encode('ASCII')
-
-
-class _TNHelper(object):
-    """An internal helper class for Telnet+SCPI command-based instruments.
-
-    It should only be used by those implementation control libraries and not by
-    any user code directly.
-    """
-    def __init__(self,
-                 tx_cmd_separator='\n',
-                 rx_cmd_separator='\n',
-                 prompt=''):
-        self._tn = None
-        self._ip_address = None
-        self._port = None
-
-        self.tx_cmd_separator = tx_cmd_separator
-        self.rx_cmd_separator = rx_cmd_separator
-        self.prompt = prompt
-
-    def open(self, host, port=23):
-        self._ip_address = host
-        self._port = port
-        if self._tn:
-            self._tn.close()
-        logging.debug("Telnet Server IP = %s" % host)
-        self._tn = telnetlib.Telnet()
-        self._tn.open(host, port, 10)
-
-    def is_open(self):
-        return bool(self._tn)
-
-    def close(self):
-        if self._tn:
-            self._tn.close()
-            self._tn = None
-
-    def diagnose_telnet(self):
-        """Function that diagnoses telnet connections.
-
-        This function diagnoses telnet connections and can be used in case of
-        command failures. The function checks if the devices is still reachable
-        via ping, and whether or not it can close and reopen the telnet
-        connection.
-
-        Returns:
-            False when telnet server is unreachable or unresponsive
-            True when telnet server is reachable and telnet connection has been
-            successfully reopened
-        """
-        logging.debug('Diagnosing telnet connection')
-        try:
-            job_result = job.run('ping {} -c 5 -i 0.2'.format(
-                self._ip_address))
-        except:
-            logging.error("Unable to ping telnet server.")
-            return False
-        ping_output = job_result.stdout
-        if not re.search(r' 0% packet loss', ping_output):
-            logging.error('Ping Packets Lost. Result: {}'.format(ping_output))
-            return False
-        try:
-            self.close()
-        except:
-            logging.error('Cannot close telnet connection.')
-            return False
-        try:
-            self.open(self._ip_address, self._port)
-        except:
-            logging.error('Cannot reopen telnet connection.')
-            return False
-        logging.debug('Telnet connection likely recovered')
-        return True
-
-    def cmd(self, cmd_str, wait_ret=True, retry=False):
-        if not isinstance(cmd_str, str):
-            raise TypeError('Invalid command string', cmd_str)
-
-        if not self.is_open():
-            raise attenuator.InvalidOperationError(
-                'Telnet connection not open for commands')
-
-        cmd_str.strip(self.tx_cmd_separator)
-        self._tn.read_until(_ascii_string(self.prompt), 2)
-        self._tn.write(_ascii_string(cmd_str + self.tx_cmd_separator))
-
-        if wait_ret is False:
-            return None
-
-        match_idx, match_val, ret_text = self._tn.expect(
-            [_ascii_string('\S+' + self.rx_cmd_separator)], 1)
-
-        logging.debug('Telnet Command: {}'.format(cmd_str))
-        logging.debug('Telnet Reply: ({},{},{})'.format(
-            match_idx, match_val, ret_text))
-
-        if match_idx == -1:
-            telnet_recovered = self.diagnose_telnet()
-            if telnet_recovered and retry:
-                logging.debug('Retrying telnet command once.')
-                return self.cmd(cmd_str, wait_ret, retry=False)
-            else:
-                raise attenuator.InvalidDataError(
-                    'Telnet command failed to return valid data')
-
-        ret_text = ret_text.decode()
-        ret_text = ret_text.strip(self.tx_cmd_separator +
-                                  self.rx_cmd_separator + self.prompt)
-
-        return ret_text
diff --git a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py b/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
deleted file mode 100644
index 9d38214..0000000
--- a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Class for Telnet control of Aeroflex 832X and 833X Series Attenuator Modules
-
-This class provides a wrapper to the Aeroflex attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.aeroflex.com/ams/weinschel/PDFILES/IM-608-Models-8320-&-8321-preliminary.pdf
-"""
-
-from antlion.controllers import attenuator
-from antlion.controllers.attenuator_lib import _tnhelper
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
-
-    def __init__(self, num_atten=0):
-        super(AttenuatorInstrument, self).__init__(num_atten)
-
-        self._tnhelper = _tnhelper._TNHelper(tx_cmd_separator='\r\n',
-                                             rx_cmd_separator='\r\n',
-                                             prompt='>')
-        self.properties = None
-        self.address = None
-
-    def open(self, host, port=23):
-        """Opens a telnet connection to the desired AttenuatorInstrument and
-        queries basic information.
-
-        Args:
-            host: A valid hostname (IP address or DNS-resolvable name) to an
-            MC-DAT attenuator instrument.
-            port: An optional port number (defaults to telnet default 23)
-        """
-        self._tnhelper.open(host, port)
-
-        # work around a bug in IO, but this is a good thing to do anyway
-        self._tnhelper.cmd('*CLS', False)
-        self.address = host
-
-        if self.num_atten == 0:
-            self.num_atten = int(self._tnhelper.cmd('RFCONFIG? CHAN'))
-
-        configstr = self._tnhelper.cmd('RFCONFIG? ATTN 1')
-
-        self.properties = dict(zip(['model', 'max_atten', 'min_step',
-                                    'unknown', 'unknown2', 'cfg_str'],
-                                   configstr.split(", ", 5)))
-
-        self.max_atten = float(self.properties['max_atten'])
-
-    def is_open(self):
-        """Returns True if the AttenuatorInstrument has an open connection."""
-        return bool(self._tnhelper.is_open())
-
-    def close(self):
-        """Closes the telnet connection.
-
-        This should be called as part of any teardown procedure prior to the
-        attenuator instrument leaving scope.
-        """
-        self._tnhelper.close()
-
-    def set_atten(self, idx, value, **_):
-        """This function sets the attenuation of an attenuator given its index
-        in the instrument.
-
-        Args:
-            idx: A zero-based index that identifies a particular attenuator in
-                an instrument. For instruments that only have one channel, this
-                is ignored by the device.
-            value: A floating point value for nominal attenuation to be set.
-
-        Raises:
-            InvalidOperationError if the telnet connection is not open.
-            IndexError if the index is not valid for this instrument.
-            ValueError if the requested set value is greater than the maximum
-                attenuation value.
-        """
-        if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
-
-        if idx >= self.num_atten:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
-
-        if value > self.max_atten:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
-
-        self._tnhelper.cmd('ATTN ' + str(idx + 1) + ' ' + str(value), False)
-
-    def get_atten(self, idx, **_):
-        """Returns the current attenuation of the attenuator at the given index.
-
-        Args:
-            idx: The index of the attenuator.
-
-        Raises:
-            InvalidOperationError if the telnet connection is not open.
-
-        Returns:
-            the current attenuation value as a float
-        """
-        if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
-
-        #       Potentially redundant safety check removed for the moment
-        #       if idx >= self.num_atten:
-        #           raise IndexError("Attenuator index out of range!", self.num_atten, idx)
-
-        atten_val = self._tnhelper.cmd('ATTN? ' + str(idx + 1))
-
-        return float(atten_val)
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/http.py b/src/antlion/controllers/attenuator_lib/minicircuits/http.py
deleted file mode 100644
index c84f64b..0000000
--- a/src/antlion/controllers/attenuator_lib/minicircuits/http.py
+++ /dev/null
@@ -1,159 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for HTTP control of Mini-Circuits RCDAT series attenuators
-
-This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
-"""
-
-import urllib
-from antlion.controllers import attenuator
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
-    """A specific HTTP-controlled implementation of AttenuatorInstrument for
-    Mini-Circuits RC-DAT attenuators.
-
-    With the exception of HTTP-specific commands, all functionality is defined
-    by the AttenuatorInstrument class.
-    """
-
-    def __init__(self, num_atten=1):
-        super(AttenuatorInstrument, self).__init__(num_atten)
-        self._ip_address = None
-        self._port = None
-        self._timeout = None
-        self.address = None
-
-    def open(self, host, port=80, timeout=2):
-        """Initializes the AttenuatorInstrument and queries basic information.
-
-        Args:
-            host: A valid hostname (IP address or DNS-resolvable name) to an
-            MC-DAT attenuator instrument.
-            port: An optional port number (defaults to http default 80)
-            timeout: An optional timeout for http requests
-        """
-        self._ip_address = host
-        self._port = port
-        self._timeout = timeout
-        self.address = host
-
-        att_req = urllib.request.urlopen('http://{}:{}/MN?'.format(
-            self._ip_address, self._port))
-        config_str = att_req.read().decode('utf-8').strip()
-        if not config_str.startswith('MN='):
-            raise attenuator.InvalidDataError(
-                'Attenuator returned invalid data. Attenuator returned: {}'.
-                format(config_str))
-
-        config_str = config_str[len('MN='):]
-        self.properties = dict(
-            zip(['model', 'max_freq', 'max_atten'], config_str.split('-', 2)))
-        self.max_atten = float(self.properties['max_atten'])
-
-    def is_open(self):
-        """Returns True if the AttenuatorInstrument has an open connection.
-
-        Since this controller is based on HTTP requests, there is no connection
-        required and the attenuator is always ready to accept requests.
-        """
-        return True
-
-    def close(self):
-        """Closes the connection to the attenuator.
-
-        Since this controller is based on HTTP requests, there is no connection
-        teardowns required.
-        """
-
-    def set_atten(self, idx, value, strict=True, retry=False, **_):
-        """This function sets the attenuation of an attenuator given its index
-        in the instrument.
-
-        Args:
-            idx: A zero-based index that identifies a particular attenuator in
-                an instrument. For instruments that only have one channel, this
-                is ignored by the device.
-            value: A floating point value for nominal attenuation to be set.
-            strict: if True, function raises an error when given out of
-                bounds attenuation values, if false, the function sets out of
-                bounds values to 0 or max_atten.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            InvalidDataError if the attenuator does not respond with the
-            expected output.
-        """
-        if not (0 <= idx < self.num_atten):
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
-
-        if value > self.max_atten and strict:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
-        # The actual device uses one-based index for channel numbers.
-        adjusted_value = min(max(0, value), self.max_atten)
-        att_req = urllib.request.urlopen(
-            'http://{}:{}/CHAN:{}:SETATT:{}'.format(self._ip_address,
-                                                    self._port, idx + 1,
-                                                    adjusted_value),
-            timeout=self._timeout)
-        att_resp = att_req.read().decode('utf-8').strip()
-        if att_resp != '1':
-            if retry:
-                self.set_atten(idx, value, strict, retry=False)
-            else:
-                raise attenuator.InvalidDataError(
-                    'Attenuator returned invalid data. Attenuator returned: {}'
-                    .format(att_resp))
-
-    def get_atten(self, idx, retry=False, **_):
-        """Returns the current attenuation of the attenuator at the given index.
-
-        Args:
-            idx: The index of the attenuator.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            InvalidDataError if the attenuator does not respond with the
-            expected outpu
-
-        Returns:
-            the current attenuation value as a float
-        """
-        if not (0 <= idx < self.num_atten):
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
-        att_req = urllib.request.urlopen(
-            'http://{}:{}/CHAN:{}:ATT?'.format(self._ip_address, self.port, idx + 1),
-            timeout=self._timeout)
-        att_resp = att_req.read().decode('utf-8').strip()
-        try:
-            atten_val = float(att_resp)
-        except:
-            if retry:
-                self.get_atten(idx, retry=False)
-            else:
-                raise attenuator.InvalidDataError(
-                    'Attenuator returned invalid data. Attenuator returned: {}'
-                    .format(att_resp))
-        return atten_val
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py b/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
deleted file mode 100644
index 538532f..0000000
--- a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for Telnet control of Mini-Circuits RCDAT series attenuators
-
-This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
-"""
-
-from antlion.controllers import attenuator
-from antlion.controllers.attenuator_lib import _tnhelper
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
-    """A specific telnet-controlled implementation of AttenuatorInstrument for
-    Mini-Circuits RC-DAT attenuators.
-
-    With the exception of telnet-specific commands, all functionality is defined
-    by the AttenuatorInstrument class. Because telnet is a stateful protocol,
-    the functionality of AttenuatorInstrument is contingent upon a telnet
-    connection being established.
-    """
-
-    def __init__(self, num_atten=0):
-        super(AttenuatorInstrument, self).__init__(num_atten)
-        self._tnhelper = _tnhelper._TNHelper(tx_cmd_separator='\r\n',
-                                             rx_cmd_separator='\r\n',
-                                             prompt='')
-        self.address = None
-
-    def __del__(self):
-        if self.is_open():
-            self.close()
-
-    def open(self, host, port=23):
-        """Opens a telnet connection to the desired AttenuatorInstrument and
-        queries basic information.
-
-        Args:
-            host: A valid hostname (IP address or DNS-resolvable name) to an
-            MC-DAT attenuator instrument.
-            port: An optional port number (defaults to telnet default 23)
-        """
-        self._tnhelper.open(host, port)
-        self.address = host
-
-        if self.num_atten == 0:
-            self.num_atten = 1
-
-        config_str = self._tnhelper.cmd('MN?')
-
-        if config_str.startswith('MN='):
-            config_str = config_str[len('MN='):]
-
-        self.properties = dict(
-            zip(['model', 'max_freq', 'max_atten'], config_str.split('-', 2)))
-        self.max_atten = float(self.properties['max_atten'])
-
-    def is_open(self):
-        """Returns True if the AttenuatorInstrument has an open connection."""
-        return bool(self._tnhelper.is_open())
-
-    def close(self):
-        """Closes the telnet connection.
-
-        This should be called as part of any teardown procedure prior to the
-        attenuator instrument leaving scope.
-        """
-        self._tnhelper.close()
-
-    def set_atten(self, idx, value, strict=True, retry=False):
-        """This function sets the attenuation of an attenuator given its index
-        in the instrument.
-
-        Args:
-            idx: A zero-based index that identifies a particular attenuator in
-                an instrument. For instruments that only have one channel, this
-                is ignored by the device.
-            value: A floating point value for nominal attenuation to be set.
-            strict: if True, function raises an error when given out of
-                bounds attenuation values, if false, the function sets out of
-                bounds values to 0 or max_atten.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            InvalidOperationError if the telnet connection is not open.
-            IndexError if the index is not valid for this instrument.
-            ValueError if the requested set value is greater than the maximum
-                attenuation value.
-        """
-
-        if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
-
-        if idx >= self.num_atten:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
-
-        if value > self.max_atten and strict:
-            raise ValueError('Attenuator value out of range!', self.max_atten,
-                             value)
-        # The actual device uses one-based index for channel numbers.
-        adjusted_value = min(max(0, value), self.max_atten)
-        self._tnhelper.cmd('CHAN:%s:SETATT:%s' % (idx + 1, adjusted_value),
-                           retry=retry)
-
-    def get_atten(self, idx, retry=False):
-        """Returns the current attenuation of the attenuator at the given index.
-
-        Args:
-            idx: The index of the attenuator.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            InvalidOperationError if the telnet connection is not open.
-
-        Returns:
-            the current attenuation value as a float
-        """
-        if not self.is_open():
-            raise attenuator.InvalidOperationError('Connection not open!')
-
-        if idx >= self.num_atten or idx < 0:
-            raise IndexError('Attenuator index out of range!', self.num_atten,
-                             idx)
-
-        if self.num_atten == 1:
-            atten_val_str = self._tnhelper.cmd(':ATT?', retry=retry)
-        else:
-            atten_val_str = self._tnhelper.cmd('CHAN:%s:ATT?' % (idx + 1),
-                                               retry=retry)
-        atten_val = float(atten_val_str)
-        return atten_val
diff --git a/src/antlion/controllers/bits.py b/src/antlion/controllers/bits.py
deleted file mode 100644
index 0a9ed21..0000000
--- a/src/antlion/controllers/bits.py
+++ /dev/null
@@ -1,470 +0,0 @@
-"""Module managing the required definitions for using the bits power monitor"""
-
-import logging
-import os
-import time
-import uuid
-
-from antlion import context
-from antlion.controllers import power_metrics
-from antlion.controllers import power_monitor
-from antlion.controllers.bits_lib import bits_client
-from antlion.controllers.bits_lib import bits_service
-from antlion.controllers.bits_lib import bits_service_config as bsc
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'Bits'
-ACTS_CONTROLLER_REFERENCE_NAME = 'bitses'
-
-
-def create(configs):
-    return [Bits(index, config) for (index, config) in enumerate(configs)]
-
-
-def destroy(bitses):
-    for bits in bitses:
-        bits.teardown()
-
-
-def get_info(bitses):
-    return [bits.config for bits in bitses]
-
-
-class BitsError(Exception):
-    pass
-
-
-class _BitsCollection(object):
-    """Object that represents a bits collection
-
-    Attributes:
-        name: The name given to the collection.
-        markers_buffer: An array of un-flushed markers, each marker is
-        represented by a bi-dimensional tuple with the format
-        (<nanoseconds_since_epoch or datetime>, <text>).
-        monsoon_output_path: A path to store monsoon-like data if possible, Bits
-        uses this path to attempt data extraction in monsoon format, if this
-        parameter is left as None such extraction is not attempted.
-    """
-
-    def __init__(self, name, monsoon_output_path=None):
-        self.monsoon_output_path = monsoon_output_path
-        self.name = name
-        self.markers_buffer = []
-
-    def add_marker(self, timestamp, marker_text):
-        self.markers_buffer.append((timestamp, marker_text))
-
-
-def _transform_name(bits_metric_name):
-    """Transform bits metrics names to a more succinct version.
-
-    Examples of bits_metrics_name as provided by the client:
-    - default_device.slider.C1_30__PP0750_L1S_VDD_G3D_M_P:mA,
-    - default_device.slider.C1_30__PP0750_L1S_VDD_G3D_M_P:mW,
-    - default_device.Monsoon.Monsoon:mA,
-    - default_device.Monsoon.Monsoon:mW,
-    - <device>.<collector>.<rail>:<unit>
-
-    Args:
-        bits_metric_name: A bits metric name.
-
-    Returns:
-        For monsoon metrics, and for backwards compatibility:
-          Monsoon:mA -> avg_current,
-          Monsoon:mW -> avg_power,
-
-        For everything else:
-          <rail>:mW -> <rail/rail>_avg_current
-          <rail>:mW -> <rail/rail>_avg_power
-          ...
-    """
-    prefix, unit = bits_metric_name.split(':')
-    rail = prefix.split('.')[-1]
-
-    if 'mW' == unit:
-        suffix = 'avg_power'
-    elif 'mA' == unit:
-        suffix = 'avg_current'
-    elif 'mV' == unit:
-        suffix = 'avg_voltage'
-    else:
-        logging.warning('unknown unit type for unit %s' % unit)
-        suffix = ''
-
-    if 'Monsoon' == rail:
-        return suffix
-    elif suffix == '':
-        return rail
-    else:
-        return '%s_%s' % (rail, suffix)
-
-
-def _raw_data_to_metrics(raw_data_obj):
-    data = raw_data_obj['data']
-    metrics = []
-    for sample in data:
-        unit = sample['unit']
-        if 'Msg' == unit:
-            continue
-        elif 'mW' == unit:
-            unit_type = 'power'
-        elif 'mA' == unit:
-            unit_type = 'current'
-        elif 'mV' == unit:
-            unit_type = 'voltage'
-        else:
-            logging.warning('unknown unit type for unit %s' % unit)
-            continue
-
-        name = _transform_name(sample['name'])
-        avg = sample['avg']
-        metrics.append(power_metrics.Metric(avg, unit_type, unit, name=name))
-
-    return metrics
-
-
-def _get_single_file(registry, key):
-    if key not in registry:
-        return None
-    entry = registry[key]
-    if isinstance(entry, str):
-        return entry
-    if isinstance(entry, list):
-        return None if len(entry) == 0 else entry[0]
-    raise ValueError('registry["%s"] is of unsupported type %s for this '
-                     'operation. Supported types are str and list.' % (
-                         key, type(entry)))
-
-
-class Bits(object):
-
-    ROOT_RAIL_KEY = 'RootRail'
-    ROOT_RAIL_DEFAULT_VALUE = 'Monsoon:mA'
-
-    def __init__(self, index, config):
-        """Creates an instance of a bits controller.
-
-        Args:
-            index: An integer identifier for this instance, this allows to
-                tell apart different instances in the case where multiple
-                bits controllers are being used concurrently.
-            config: The config as defined in the ACTS  BiTS controller config.
-                Expected format is:
-                {
-                    // optional
-                    'Monsoon':   {
-                        'serial_num': <serial number:int>,
-                        'monsoon_voltage': <voltage:double>
-                    }
-                    // optional
-                    'Kibble': [
-                        {
-                            'board': 'BoardName1',
-                            'connector': 'A',
-                            'serial': 'serial_1'
-                        },
-                        {
-                            'board': 'BoardName2',
-                            'connector': 'D',
-                            'serial': 'serial_2'
-                        }
-                    ]
-                    // optional
-                    'RootRail': 'Monsoon:mA'
-                }
-        """
-        self.index = index
-        self.config = config
-        self._service = None
-        self._client = None
-        self._active_collection = None
-        self._collections_counter = 0
-        self._root_rail = config.get(self.ROOT_RAIL_KEY,
-                                     self.ROOT_RAIL_DEFAULT_VALUE)
-
-    def setup(self, *_, registry=None, **__):
-        """Starts a bits_service in the background.
-
-        This function needs to be called with either a registry or after calling
-        power_monitor.update_registry, and it needs to be called before any other
-        method in this class.
-
-        Args:
-            registry: A dictionary with files used by bits. Format:
-                {
-                    // required, string or list of strings
-                    bits_service: ['/path/to/bits_service']
-
-                    // required, string or list of strings
-                    bits_client: ['/path/to/bits.par']
-
-                    // needed for monsoon, string or list of strings
-                    lvpm_monsoon: ['/path/to/lvpm_monsoon.par']
-
-                    // needed for monsoon, string or list of strings
-                    hvpm_monsoon: ['/path/to/hvpm_monsoon.par']
-
-                    // needed for kibble, string or list of strings
-                    kibble_bin: ['/path/to/kibble.par']
-
-                    // needed for kibble, string or list of strings
-                    kibble_board_file: ['/path/to/phone_s.board']
-
-                    // optional, string or list of strings
-                    vm_file: ['/path/to/file.vm']
-                }
-
-                All fields in this dictionary can be either a string or a list
-                of strings. If lists are passed, only their first element is
-                taken into account. The reason for supporting lists but only
-                acting on their first element is for easier integration with
-                harnesses that handle resources as lists.
-        """
-        if registry is None:
-            registry = power_monitor.get_registry()
-        if 'bits_service' not in registry:
-            raise ValueError('No bits_service binary has been defined in the '
-                             'global registry.')
-        if 'bits_client' not in registry:
-            raise ValueError('No bits_client binary has been defined in the '
-                             'global registry.')
-
-        bits_service_binary = _get_single_file(registry, 'bits_service')
-        bits_client_binary = _get_single_file(registry, 'bits_client')
-        lvpm_monsoon_bin = _get_single_file(registry, 'lvpm_monsoon')
-        hvpm_monsoon_bin = _get_single_file(registry, 'hvpm_monsoon')
-        kibble_bin = _get_single_file(registry, 'kibble_bin')
-        kibble_board_file = _get_single_file(registry, 'kibble_board_file')
-        vm_file = _get_single_file(registry, 'vm_file')
-        config = bsc.BitsServiceConfig(self.config,
-                                       lvpm_monsoon_bin=lvpm_monsoon_bin,
-                                       hvpm_monsoon_bin=hvpm_monsoon_bin,
-                                       kibble_bin=kibble_bin,
-                                       kibble_board_file=kibble_board_file,
-                                       virtual_metrics_file=vm_file)
-        output_log = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            'bits_service_out_%s.txt' % self.index)
-        service_name = 'bits_config_%s' % self.index
-
-        self._active_collection = None
-        self._collections_counter = 0
-        self._service = bits_service.BitsService(config,
-                                                 bits_service_binary,
-                                                 output_log,
-                                                 name=service_name,
-                                                 timeout=3600 * 24)
-        self._service.start()
-        self._client = bits_client.BitsClient(bits_client_binary,
-                                              self._service,
-                                              config)
-        # this call makes sure that the client can interact with the server.
-        devices = self._client.list_devices()
-        logging.debug(devices)
-
-    def disconnect_usb(self, *_, **__):
-        self._client.disconnect_usb()
-
-    def connect_usb(self, *_, **__):
-        self._client.connect_usb()
-
-    def measure(self, *_, measurement_args=None,
-                measurement_name=None, monsoon_output_path=None,
-                **__):
-        """Blocking function that measures power through bits for the specified
-        duration. Results need to be consulted through other methods such as
-        get_metrics or post processing files like the ones
-        generated at monsoon_output_path after calling `release_resources`.
-
-        Args:
-            measurement_args: A dictionary with the following structure:
-                {
-                   'duration': <seconds to measure for>
-                   'hz': <samples per second>
-                   'measure_after_seconds': <sleep time before measurement>
-                }
-                The actual number of samples per second is limited by the
-                bits configuration. The value of hz is defaulted to 1000.
-            measurement_name: A name to give to the measurement (which is also
-                used as the Bits collection name. Bits collection names (and
-                therefore measurement names) need to be unique within the
-                context of a Bits object.
-            monsoon_output_path: If provided this path will be used to generate
-                a monsoon like formatted file at the release_resources step.
-        """
-        if measurement_args is None:
-            raise ValueError('measurement_args can not be left undefined')
-
-        duration = measurement_args.get('duration')
-        if duration is None:
-            raise ValueError(
-                'duration can not be left undefined within measurement_args')
-
-        hz = measurement_args.get('hz', 1000)
-
-        # Delay the start of the measurement if an offset is required
-        measure_after_seconds = measurement_args.get('measure_after_seconds')
-        if measure_after_seconds:
-            time.sleep(measure_after_seconds)
-
-        if self._active_collection:
-            raise BitsError(
-                'Attempted to start a collection while there is still an '
-                'active one. Active collection: %s',
-                self._active_collection.name)
-
-        self._collections_counter = self._collections_counter + 1
-        # The name gets a random 8 characters salt suffix because the Bits
-        # client has a bug where files with the same name are considered to be
-        # the same collection and it won't load two files with the same name.
-        # b/153170987 b/153944171
-        if not measurement_name:
-            measurement_name = 'bits_collection_%s_%s' % (
-                str(self._collections_counter), str(uuid.uuid4())[0:8])
-
-        self._active_collection = _BitsCollection(measurement_name,
-                                                  monsoon_output_path)
-        self._client.start_collection(self._active_collection.name,
-                                      default_sampling_rate=hz)
-        time.sleep(duration)
-
-    def get_metrics(self, *_, timestamps=None, **__):
-        """Gets metrics for the segments delimited by the timestamps dictionary.
-
-        Must be called before releasing resources, otherwise it will fail adding
-        markers to the collection.
-
-        Args:
-            timestamps: A dictionary of the shape:
-                {
-                    'segment_name': {
-                        'start' : <milliseconds_since_epoch> or <datetime>
-                        'end': <milliseconds_since_epoch> or <datetime>
-                    }
-                    'another_segment': {
-                        'start' : <milliseconds_since_epoch> or <datetime>
-                        'end': <milliseconds_since_epoch> or <datetime>
-                    }
-                }
-        Returns:
-            A dictionary of the shape:
-                {
-                    'segment_name': <list of power_metrics.Metric>
-                    'another_segment': <list of power_metrics.Metric>
-                }
-        """
-        if timestamps is None:
-            raise ValueError('timestamps dictionary can not be left undefined')
-
-        metrics = {}
-
-        for segment_name, times in timestamps.items():
-            if 'start' not in times or 'end' not in times:
-                continue
-
-            start = times['start']
-            end = times['end']
-
-            # bits accepts nanoseconds only, but since this interface needs to
-            # backwards compatible with monsoon which works with milliseconds we
-            # require to do a conversion from milliseconds to nanoseconds.
-            # The preferred way for new calls to this function should be using
-            # datetime instead which is unambiguous
-            if isinstance(start, (int, float)):
-                start = start * 1e6
-            if isinstance(end, (int, float)):
-                end = end * 1e6
-
-            raw_metrics = self._client.get_metrics(self._active_collection.name,
-                                                   start=start, end=end)
-            self._add_marker(start, 'start - %s' % segment_name)
-            self._add_marker(end, 'end - %s' % segment_name)
-            metrics[segment_name] = _raw_data_to_metrics(raw_metrics)
-        return metrics
-
-    def _add_marker(self, timestamp, marker_text):
-        if not self._active_collection:
-            raise BitsError(
-                'markers can not be added without an active collection')
-        self._active_collection.add_marker(timestamp, marker_text)
-
-    def release_resources(self):
-        """Performs all the cleanup and export tasks.
-
-        In the way that Bits' is interfaced several tasks can not be performed
-        while a collection is still active (like exporting the data) and others
-        can only take place while the collection is still active (like adding
-        markers to a collection).
-
-        To workaround this unique workflow, the collections that are started
-        with the 'measure' method are not really stopped after the method
-        is unblocked, it is only stopped after this method is called.
-
-        All the export files (.7z.bits and monsoon-formatted file) are also
-        generated in this method.
-        """
-        if not self._active_collection:
-            raise BitsError(
-                'Attempted to stop a collection without starting one')
-        self._client.add_markers(self._active_collection.name,
-                                 self._active_collection.markers_buffer)
-        self._client.stop_collection(self._active_collection.name)
-
-        export_file = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            '%s.7z.bits' % self._active_collection.name)
-        self._client.export(self._active_collection.name, export_file)
-        if self._active_collection.monsoon_output_path:
-            self._attempt_monsoon_format()
-        self._active_collection = None
-
-    def _attempt_monsoon_format(self):
-        """Attempts to create a monsoon-formatted file.
-
-        In the case where there is not enough information to retrieve a
-        monsoon-like file, this function will do nothing.
-        """
-        available_channels = self._client.list_channels(
-            self._active_collection.name)
-        milli_amps_channel = None
-
-        for channel in available_channels:
-            if channel.endswith(self._root_rail):
-                milli_amps_channel = self._root_rail
-                break
-
-        if milli_amps_channel is None:
-            logging.debug('No monsoon equivalent channels were found when '
-                          'attempting to recreate monsoon file format. '
-                          'Available channels were: %s',
-                          str(available_channels))
-            return
-
-        logging.debug('Recreating monsoon file format from channel: %s',
-                      milli_amps_channel)
-        self._client.export_as_monsoon_format(
-            self._active_collection.monsoon_output_path,
-            self._active_collection.name,
-            milli_amps_channel)
-
-    def get_waveform(self, file_path=None):
-        """Parses a file generated in release_resources.
-
-        Args:
-            file_path: Path to a waveform file.
-
-        Returns:
-            A list of tuples in which the first element is a timestamp and the
-            second element is the sampled current at that time.
-        """
-        if file_path is None:
-            raise ValueError('file_path can not be None')
-
-        return list(power_metrics.import_raw_data(file_path))
-
-    def teardown(self):
-        if self._service is None:
-            return
-
-        if self._service.service_state == bits_service.BitsServiceStates.STARTED:
-            self._service.stop()
diff --git a/src/antlion/controllers/bits_lib/__init__.py b/src/antlion/controllers/bits_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/bits_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/bits_lib/bits_client.py b/src/antlion/controllers/bits_lib/bits_client.py
deleted file mode 100644
index c68aafd..0000000
--- a/src/antlion/controllers/bits_lib/bits_client.py
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import csv
-from datetime import datetime
-import logging
-import tempfile
-
-from antlion.libs.proc import job
-import yaml
-
-
-class BitsClientError(Exception):
-    pass
-
-
-# An arbitrary large number of seconds.
-ONE_YEAR = str(3600 * 24 * 365)
-EPOCH = datetime.utcfromtimestamp(0)
-
-
-def _to_ns(timestamp):
-    """Returns the numerical value of a timestamp in nanoseconds since epoch.
-
-    Args:
-        timestamp: Either a number or a datetime.
-
-    Returns:
-        Rounded timestamp if timestamp is numeric, number of nanoseconds since
-        epoch if timestamp is instance of datetime.datetime.
-    """
-    if isinstance(timestamp, datetime):
-        return int((timestamp - EPOCH).total_seconds() * 1e9)
-    elif isinstance(timestamp, (float, int)):
-        return int(timestamp)
-    raise ValueError('%s can not be converted to a numerical representation of '
-                     'nanoseconds.' % type(timestamp))
-
-
-class BitsClient(object):
-    """Helper class to issue bits' commands"""
-
-    def __init__(self, binary, service, service_config):
-        """Constructs a BitsClient.
-
-        Args:
-            binary: The location of the bits.par client binary.
-            service: A bits_service.BitsService object. The service is expected
-              to be previously setup.
-            service_config: The bits_service_config.BitsService object used to
-              start the service on service_port.
-        """
-        self._log = logging.getLogger()
-        self._binary = binary
-        self._service = service
-        self._server_config = service_config
-
-    def _acquire_monsoon(self):
-        """Gets hold of a Monsoon so no other processes can use it.
-        Only works if there is a monsoon."""
-        self._log.debug('acquiring monsoon')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'acquire_monsoon', timeout=10)
-
-    def _release_monsoon(self):
-        self._log.debug('releasing monsoon')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'release_monsoon', timeout=10)
-
-    def run_cmd(self, *args, timeout=60):
-        """Executes a generic bits.par command.
-
-        Args:
-            args: A bits.par command as a tokenized array. The path to the
-              binary and the service port are provided by default, cmd should
-              only contain the remaining tokens of the desired command.
-            timeout: Number of seconds to wait for the command to finish before
-              forcibly killing it.
-        """
-        result = job.run([self._binary, '--port',
-                          self._service.port] + [str(arg) for arg in args],
-                         timeout=timeout)
-        return result.stdout
-
-    def export(self, collection_name, path):
-        """Exports a collection to its bits persistent format.
-
-        Exported files can be shared and opened through the Bits UI.
-
-        Args:
-            collection_name: Collection to be exported.
-            path: Where the resulting file should be created. Bits requires that
-            the resulting file ends in .7z.bits.
-        """
-        if not path.endswith('.7z.bits'):
-            raise BitsClientError('Bits\' collections can only be exported to '
-                                  'files ending in .7z.bits, got %s' % path)
-        self._log.debug('exporting collection %s to %s',
-                        collection_name,
-                        path)
-        self.run_cmd('--name',
-                     collection_name,
-                     '--ignore_gaps',
-                     '--export',
-                     '--export_path',
-                     path,
-                     timeout=600)
-
-    def export_as_csv(self, channels, collection_name, output_file):
-        """Export bits data as CSV.
-
-        Writes the selected channel data to the given output_file. Note that
-        the first line of the file contains headers.
-
-        Args:
-          channels: A list of string pattern matches for the channel to be
-            retrieved. For example, ":mW" will export all power channels,
-            ":mV" will export all voltage channels, "C1_01__" will export
-            power/voltage/current for the first fail of connector 1.
-          collection_name: A string for a collection that is sampling.
-          output_file: A string file path where the CSV will be written.
-        """
-        channels_arg = ','.join(channels)
-        cmd = ['--csvfile',
-               output_file,
-               '--name',
-               collection_name,
-               '--ignore_gaps',
-               '--csv_rawtimestamps',
-               '--channels',
-               channels_arg]
-        if self._server_config.has_virtual_metrics_file:
-            cmd = cmd + ['--vm_file', 'default']
-        self._log.debug(
-            'exporting csv for collection %s to %s, with channels %s',
-            collection_name, output_file, channels_arg)
-        self.run_cmd(*cmd, timeout=600)
-
-    def add_markers(self, collection_name, markers):
-        """Appends markers to a collection.
-
-        These markers are displayed in the Bits UI and are useful to label
-        important test events.
-
-        Markers can only be added to collections that have not been
-        closed / stopped. Markers need to be added in chronological order,
-        this function ensures that at least the markers added in each
-        call are sorted in chronological order, but if this function
-        is called multiple times, then is up to the user to ensure that
-        the subsequent batches of markers are for timestamps higher (newer)
-        than all the markers passed in previous calls to this function.
-
-        Args:
-            collection_name: The name of the collection to add markers to.
-            markers: A list of tuples of the shape:
-
-             [(<nano_seconds_since_epoch or datetime>, <marker text>),
-              (<nano_seconds_since_epoch or datetime>, <marker text>),
-              (<nano_seconds_since_epoch or datetime>, <marker text>),
-              ...
-            ]
-        """
-        # sorts markers in chronological order before adding them. This is
-        # required by go/pixel-bits
-        for ts, marker in sorted(markers, key=lambda x: _to_ns(x[0])):
-            self._log.debug('Adding marker at %s: %s', str(ts), marker)
-            self.run_cmd('--name',
-                         collection_name,
-                         '--log_ts',
-                         str(_to_ns(ts)),
-                         '--log',
-                         marker,
-                         timeout=10)
-
-    def get_metrics(self, collection_name, start=None, end=None):
-        """Extracts metrics for a period of time.
-
-        Args:
-            collection_name: The name of the collection to get metrics from
-            start: Numerical nanoseconds since epoch until the start of the
-            period of interest or datetime. If not provided, start will be the
-            beginning of the collection.
-            end: Numerical nanoseconds since epoch until the end of the
-            period of interest or datetime. If not provided, end will be the
-            end of the collection.
-        """
-        with tempfile.NamedTemporaryFile(prefix='bits_metrics') as tf:
-            cmd = ['--name',
-                   collection_name,
-                   '--ignore_gaps',
-                   '--aggregates_yaml_path',
-                   tf.name]
-
-            if start is not None:
-                cmd = cmd + ['--abs_start_time', str(_to_ns(start))]
-            if end is not None:
-                cmd = cmd + ['--abs_stop_time', str(_to_ns(end))]
-            if self._server_config.has_virtual_metrics_file:
-                cmd = cmd + ['--vm_file', 'default']
-
-            self.run_cmd(*cmd)
-            with open(tf.name) as mf:
-                self._log.debug(
-                    'bits aggregates for collection %s [%s-%s]: %s' % (
-                        collection_name, start, end,
-                        mf.read()))
-
-            with open(tf.name) as mf:
-                return yaml.safe_load(mf)
-
-    def disconnect_usb(self):
-        """Disconnects the monsoon's usb. Only works if there is a monsoon"""
-        self._log.debug('disconnecting monsoon\'s usb')
-        self.run_cmd('--collector',
-                     'Monsoon',
-                     '--collector_cmd',
-                     'usb_disconnect', timeout=10)
-
-    def start_collection(self, collection_name, default_sampling_rate=1000):
-        """Indicates Bits to start a collection.
-
-        Args:
-            collection_name: Name to give to the collection to be started.
-            Collection names must be unique at Bits' service level. If multiple
-            collections must be taken within the context of the same Bits'
-            service, ensure that each collection is given a different one.
-            default_sampling_rate: Samples per second to be collected
-        """
-
-        cmd = ['--name',
-               collection_name,
-               '--non_blocking',
-               '--time',
-               ONE_YEAR,
-               '--default_sampling_rate',
-               str(default_sampling_rate)]
-
-        if self._server_config.has_kibbles:
-            cmd = cmd + ['--disk_space_saver']
-
-        self._log.debug('starting collection %s', collection_name)
-        self.run_cmd(*cmd, timeout=10)
-
-    def connect_usb(self):
-        """Connects the monsoon's usb. Only works if there is a monsoon."""
-        cmd = ['--collector',
-               'Monsoon',
-               '--collector_cmd',
-               'usb_connect']
-        self._log.debug('connecting monsoon\'s usb')
-        self.run_cmd(*cmd, timeout=10)
-
-    def stop_collection(self, collection_name):
-        """Stops the active collection."""
-        self._log.debug('stopping collection %s', collection_name)
-        self.run_cmd('--name',
-                     collection_name,
-                     '--stop')
-        self._log.debug('stopped collection %s', collection_name)
-
-    def list_devices(self):
-        """Lists devices managed by the bits_server this client is connected
-        to.
-
-        Returns:
-            bits' output when called with --list devices.
-        """
-        self._log.debug('listing devices')
-        result = self.run_cmd('--list', 'devices', timeout=20)
-        return result
-
-    def list_channels(self, collection_name):
-        """Finds all the available channels in a given collection.
-
-        Args:
-            collection_name: The name of the collection to get channels from.
-        """
-        metrics = self.get_metrics(collection_name)
-        return [channel['name'] for channel in metrics['data']]
-
-    def export_as_monsoon_format(self, dest_path, collection_name,
-                                 channel_pattern):
-        """Exports data from a collection in monsoon style.
-
-        This function exists because there are tools that have been built on
-        top of the monsoon format. To be able to leverage such tools we need
-        to make the data compliant with the format.
-
-        The monsoon format is:
-
-        <time_since_epoch_in_secs> <amps>
-
-        Args:
-            dest_path: Path where the resulting file will be generated.
-            collection_name: The name of the Bits' collection to export data
-            from.
-            channel_pattern: A regex that matches the Bits' channel to be used
-            as source of data. If there are multiple matching channels, only the
-            first one will be used. The channel is always assumed to be
-            expressed en milli-amps, the resulting format requires amps, so the
-            values coming from the first matching channel will always be
-            multiplied by 1000.
-        """
-        with tempfile.NamedTemporaryFile(prefix='bits_csv_') as tmon:
-            self.export_as_csv([channel_pattern], collection_name, tmon.name)
-
-            self._log.debug(
-                'massaging bits csv to monsoon format for collection'
-                ' %s', collection_name)
-            with open(tmon.name) as csv_file:
-                reader = csv.reader(csv_file)
-                headers = next(reader)
-                self._log.debug('csv headers %s', headers)
-                with open(dest_path, 'w') as dest:
-                    for row in reader:
-                        ts = float(row[0]) / 1e9
-                        amps = float(row[1]) / 1e3
-                        dest.write('%.7f %.12f\n' % (ts, amps))
diff --git a/src/antlion/controllers/bits_lib/bits_service.py b/src/antlion/controllers/bits_lib/bits_service.py
deleted file mode 100644
index ad2f660..0000000
--- a/src/antlion/controllers/bits_lib/bits_service.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import atexit
-import json
-import logging
-import os
-import re
-import signal
-import tempfile
-import time
-
-from enum import Enum
-
-from antlion import context
-from antlion.libs.proc import job
-from antlion.libs.proc import process
-
-
-class BitsServiceError(Exception):
-    pass
-
-
-class BitsServiceStates(Enum):
-    NOT_STARTED = 'not-started'
-    STARTED = 'started'
-    STOPPED = 'stopped'
-
-
-class BitsService(object):
-    """Helper class to start and stop a bits service
-
-    Attributes:
-        port: When the service starts the port it was assigned to is made
-        available for external agents to reference to the background service.
-        config: The BitsServiceConfig used to configure this service.
-        name: A free form string.
-        service_state: A BitsServiceState that represents the service state.
-    """
-
-    def __init__(self, config, binary, output_log_path,
-                 name='bits_service_default',
-                 timeout=None):
-        """Creates a BitsService object.
-
-        Args:
-            config: A BitsServiceConfig.
-            described in go/pixel-bits/user-guide/service/configuration.md
-            binary: Path to a bits_service binary.
-            output_log_path: Full path to where the resulting logs should be
-            stored.
-            name: Optional string to identify this service by. This
-            is used as reference in logs to tell this service apart from others
-            running in parallel.
-            timeout: Maximum time in seconds the service should be allowed
-            to run in the background after start. If left undefined the service
-            in the background will not time out.
-        """
-        self.name = name
-        self.port = None
-        self.config = config
-        self.service_state = BitsServiceStates.NOT_STARTED
-        self._timeout = timeout
-        self._binary = binary
-        self._log = logging.getLogger()
-        self._process = None
-        self._output_log = open(output_log_path, 'w')
-        self._collections_dir = tempfile.TemporaryDirectory(
-            prefix='bits_service_collections_dir_')
-        self._cleaned_up = False
-        atexit.register(self._atexit_cleanup)
-
-    def _atexit_cleanup(self):
-        if not self._cleaned_up:
-            self._log.error('Cleaning up bits_service %s at exit.', self.name)
-            self._cleanup()
-
-    def _write_extra_debug_logs(self):
-        dmesg_log = '%s.dmesg.txt' % self._output_log.name
-        dmesg = job.run(['dmesg', '-e'], ignore_status=True)
-        with open(dmesg_log, 'w') as f:
-            f.write(dmesg.stdout)
-
-        free_log = '%s.free.txt' % self._output_log.name
-        free = job.run(['free', '-m'], ignore_status=True)
-        with open(free_log, 'w') as f:
-            f.write(free.stdout)
-
-        df_log = '%s.df.txt' % self._output_log.name
-        df = job.run(['df', '-h'], ignore_status=True)
-        with open(df_log, 'w') as f:
-            f.write(df.stdout)
-
-    def _cleanup(self):
-        self._write_extra_debug_logs()
-        self.port = None
-        self._collections_dir.cleanup()
-        if self._process and self._process.is_running():
-            self._process.signal(signal.SIGINT)
-            self._log.debug('SIGINT sent to bits_service %s.' % self.name)
-            self._process.wait(kill_timeout=60.0)
-            self._log.debug('bits_service %s has been stopped.' % self.name)
-        self._output_log.close()
-        if self.config.has_monsoon:
-            job.run([self.config.monsoon_config.monsoon_binary,
-                     '--serialno',
-                     str(self.config.monsoon_config.serial_num),
-                     '--usbpassthrough',
-                     'on'],
-                    timeout=10)
-        self._cleaned_up = True
-
-    def _service_started_listener(self, line):
-        if self.service_state is BitsServiceStates.STARTED:
-            return
-        if 'Started server!' in line and self.port is not None:
-            self.service_state = BitsServiceStates.STARTED
-
-    PORT_PATTERN = re.compile(r'.*Server listening on .*:(\d+)\.$')
-
-    def _service_port_listener(self, line):
-        if self.port is not None:
-            return
-        match = self.PORT_PATTERN.match(line)
-        if match:
-            self.port = match.group(1)
-
-    def _output_callback(self, line):
-        self._output_log.write(line)
-        self._output_log.write('\n')
-        self._service_port_listener(line)
-        self._service_started_listener(line)
-
-    def _trigger_background_process(self, binary):
-        config_path = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            '%s.config.json' % self.name)
-        with open(config_path, 'w') as f:
-            f.write(json.dumps(self.config.config_dic, indent=2))
-
-        cmd = [binary,
-               '--port',
-               '0',
-               '--collections_folder',
-               self._collections_dir.name,
-               '--collector_config_file',
-               config_path]
-
-        # bits_service only works on linux systems, therefore is safe to assume
-        # that 'timeout' will be available.
-        if self._timeout:
-            cmd = ['timeout',
-                   '--signal=SIGTERM',
-                   '--kill-after=60',
-                   str(self._timeout)] + cmd
-
-        self._process = process.Process(cmd)
-        self._process.set_on_output_callback(self._output_callback)
-        self._process.set_on_terminate_callback(self._on_terminate)
-        self._process.start()
-
-    def _on_terminate(self, *_):
-        self._log.error('bits_service %s stopped unexpectedly.', self.name)
-        self._cleanup()
-
-    def start(self):
-        """Starts the bits service in the background.
-
-        This function blocks until the background service signals that it has
-        successfully started. A BitsServiceError is raised if the signal is not
-        received.
-        """
-        if self.service_state is BitsServiceStates.STOPPED:
-            raise BitsServiceError(
-                'bits_service %s was already stopped. A stopped'
-                ' service can not be started again.' % self.name)
-
-        if self.service_state is BitsServiceStates.STARTED:
-            raise BitsServiceError(
-                'bits_service %s has already been started.' % self.name)
-
-        self._log.info('starting bits_service %s', self.name)
-        self._trigger_background_process(self._binary)
-
-        # wait 40 seconds for the service to be ready.
-        max_startup_wait = time.time() + 40
-        while time.time() < max_startup_wait:
-            if self.service_state is BitsServiceStates.STARTED:
-                self._log.info('bits_service %s started on port %s', self.name,
-                               self.port)
-                return
-            time.sleep(0.1)
-
-        self._log.error('bits_service %s did not start on time, starting '
-                        'service teardown and raising a BitsServiceError.')
-        self._cleanup()
-        raise BitsServiceError(
-            'bits_service %s did not start successfully' % self.name)
-
-    def stop(self):
-        """Stops the bits service."""
-        if self.service_state is BitsServiceStates.STOPPED:
-            raise BitsServiceError(
-                'bits_service %s has already been stopped.' % self.name)
-        port = self.port
-        self._log.info('stopping bits_service %s on port %s', self.name, port)
-        self.service_state = BitsServiceStates.STOPPED
-        self._cleanup()
-        self._log.info('bits_service %s on port %s was stopped', self.name,
-                       port)
diff --git a/src/antlion/controllers/bits_lib/bits_service_config.py b/src/antlion/controllers/bits_lib/bits_service_config.py
deleted file mode 100644
index 1900869..0000000
--- a/src/antlion/controllers/bits_lib/bits_service_config.py
+++ /dev/null
@@ -1,249 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-
-DEFAULT_MONSOON_CONFIG_DICT = {
-    'enabled': 1,
-    'type': 'monsooncollector',
-    'monsoon_reset': 0,
-    # maximum monsoon sample rate that works best for both lvpm and hvpm
-    'sampling_rate': 1000,
-}
-
-
-class _BitsMonsoonConfig(object):
-    """Helper object to construct a bits_service config from a monsoon config as
-    defined for the bits controller config and required additional resources,
-    such as paths to executables.
-
-    The format for the bits_service's monsoon configuration is explained at:
-    http://go/pixel-bits/user-guide/service/collectors/monsoon
-
-    Attributes:
-        config_dic: A bits_service's monsoon configuration as a python
-        dictionary.
-    """
-
-    def __init__(self, monsoon_config, lvpm_monsoon_bin=None,
-                 hvpm_monsoon_bin=None):
-        """Constructs _BitsServiceMonsoonConfig.
-
-        Args:
-            monsoon_config: The monsoon config as defined in the
-                ACTS Bits controller config. Expected format is:
-                  { 'serial_num': <serial number:int>,
-                    'monsoon_voltage': <voltage:double> }
-            lvpm_monsoon_bin: Binary file to interact with low voltage monsoons.
-                Needed if the monsoon is a lvpm monsoon (serial number lower
-                than 20000).
-            hvpm_monsoon_bin: Binary file to interact with high voltage
-                monsoons. Needed if the monsoon is a hvpm monsoon (serial number
-                greater than 20000).
-        """
-        if 'serial_num' not in monsoon_config:
-            raise ValueError(
-                'Monsoon serial_num can not be undefined. Received '
-                'config was: %s' % monsoon_config)
-        if 'monsoon_voltage' not in monsoon_config:
-            raise ValueError('Monsoon voltage can not be undefined. Received '
-                             'config was: %s' % monsoon_config)
-
-        self.serial_num = int(monsoon_config['serial_num'])
-        self.monsoon_voltage = float(monsoon_config['monsoon_voltage'])
-
-        self.config_dic = copy.deepcopy(DEFAULT_MONSOON_CONFIG_DICT)
-        if float(self.serial_num) >= 20000:
-            self.config_dic['hv_monsoon'] = 1
-            if hvpm_monsoon_bin is None:
-                raise ValueError('hvpm_monsoon binary is needed but was None. '
-                                 'Received config was: %s' % monsoon_config)
-            self.monsoon_binary = hvpm_monsoon_bin
-        else:
-            self.config_dic['hv_monsoon'] = 0
-            if lvpm_monsoon_bin is None:
-                raise ValueError('lvpm_monsoon binary is needed but was None. '
-                                 'Received config was: %s' % monsoon_config)
-            self.monsoon_binary = lvpm_monsoon_bin
-
-        self.config_dic['monsoon_binary_path'] = self.monsoon_binary
-        self.config_dic['monsoon_voltage'] = self.monsoon_voltage
-        self.config_dic['serial_num'] = self.serial_num
-
-
-DEFAULT_KIBBLES_BOARD_CONFIG = {
-    'enabled': 1,
-    'type': 'kibblecollector',
-    'attached_kibbles': {}
-}
-
-DEFAULT_KIBBLE_CONFIG = {
-    'ultra_channels_current_hz': 976.5625,
-    'ultra_channels_voltage_hz': 976.5625,
-    'high_channels_current_hz': 976.5625,
-    'high_channels_voltage_hz': 976.5625
-}
-
-
-class _BitsKibblesConfig(object):
-    def __init__(self, kibbles_config, kibble_bin, kibble_board_file):
-        """Constructs _BitsKibblesConfig.
-
-        Args:
-            kibbles_config: A list of compacted kibble boards descriptions.
-                Expected format is:
-                    [{
-                        'board': 'BoardName1',
-                        'connector': 'A',
-                        'serial': 'serial_1'
-                     },
-                    {
-                        'board': 'BoardName2',
-                        'connector': 'D',
-                        'serial': 'serial_2'
-                    }]
-                More details can be found at go/acts-bits.
-            kibble_bin: Binary file to interact with kibbles.
-            kibble_board_file: File describing the distribution of rails on a
-                kibble. go/kibble#setting-up-bits-board-files
-        """
-
-        if not isinstance(kibbles_config, list):
-            raise ValueError(
-                'kibbles_config must be a list. Got %s.' % kibbles_config)
-
-        if kibble_bin is None:
-            raise ValueError('Kibbles were present in the config but no '
-                             'kibble_bin was provided')
-        if kibble_board_file is None:
-            raise ValueError('Kibbles were present in the config but no '
-                             'kibble_board_file was provided')
-
-        self.boards_configs = {}
-
-        for kibble in kibbles_config:
-            if 'board' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'board')
-            if 'connector' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'connector')
-            if 'serial' not in kibble:
-                raise ValueError('An individual kibble config must have a '
-                                 'serial')
-
-            board = kibble['board']
-            connector = kibble['connector']
-            serial = kibble['serial']
-            if board not in self.boards_configs:
-                self.boards_configs[board] = copy.deepcopy(
-                    DEFAULT_KIBBLES_BOARD_CONFIG)
-                self.boards_configs[board][
-                    'board_file'] = kibble_board_file
-                self.boards_configs[board]['kibble_py'] = kibble_bin
-            kibble_config = copy.deepcopy(DEFAULT_KIBBLE_CONFIG)
-            kibble_config['connector'] = connector
-            self.boards_configs[board]['attached_kibbles'][
-                serial] = kibble_config
-
-
-DEFAULT_SERVICE_CONFIG_DICT = {
-    'devices': {
-        'default_device': {
-            'enabled': 1,
-            'collectors': {}
-        }
-    }
-}
-
-
-class BitsServiceConfig(object):
-    """Helper object to construct a bits_service config from a bits controller
-    config and required additional resources, such as paths to executables.
-
-    The format for bits_service's configuration is explained in:
-    go/pixel-bits/user-guide/service/configuration.md
-
-    Attributes:
-        config_dic: A bits_service configuration as a python dictionary.
-    """
-
-    def __init__(self, controller_config, lvpm_monsoon_bin=None,
-                 hvpm_monsoon_bin=None, kibble_bin=None,
-                 kibble_board_file=None, virtual_metrics_file=None):
-        """Creates a BitsServiceConfig.
-
-        Args:
-            controller_config: The config as defined in the ACTS  BiTS
-                controller config. Expected format is:
-                {
-                    // optional
-                    'Monsoon':   {
-                        'serial_num': <serial number:int>,
-                        'monsoon_voltage': <voltage:double>
-                    }
-                    // optional
-                    'Kibble': [
-                        {
-                            'board': 'BoardName1',
-                            'connector': 'A',
-                            'serial': 'serial_1'
-                        },
-                        {
-                            'board': 'BoardName2',
-                            'connector': 'D',
-                            'serial': 'serial_2'
-                        }
-                    ]
-                }
-            lvpm_monsoon_bin: Binary file to interact with low voltage monsoons.
-                Needed if the monsoon is a lvpm monsoon (serial number lower
-                than 20000).
-            hvpm_monsoon_bin: Binary file to interact with high voltage
-                monsoons. Needed if the monsoon is a hvpm monsoon (serial number
-                greater than 20000).
-            kibble_bin: Binary file to interact with kibbles.
-            kibble_board_file: File describing the distribution of rails on a
-                kibble. go/kibble#setting-up-bits-board-files
-            virtual_metrics_file: A list of virtual metrics files to add
-                data aggregates on top of regular channel aggregates.
-                go/pixel-bits/user-guide/virtual-metrics
-        """
-        self.config_dic = copy.deepcopy(DEFAULT_SERVICE_CONFIG_DICT)
-        self.has_monsoon = False
-        self.has_kibbles = False
-        self.has_virtual_metrics_file = False
-        self.monsoon_config = None
-        self.kibbles_config = None
-        if 'Monsoon' in controller_config:
-            self.has_monsoon = True
-            self.monsoon_config = _BitsMonsoonConfig(
-                controller_config['Monsoon'],
-                lvpm_monsoon_bin,
-                hvpm_monsoon_bin)
-            self.config_dic['devices']['default_device']['collectors'][
-                'Monsoon'] = self.monsoon_config.config_dic
-        if 'Kibbles' in controller_config:
-            self.has_kibbles = True
-            self.kibbles_config = _BitsKibblesConfig(
-                controller_config['Kibbles'],
-                kibble_bin, kibble_board_file)
-            self.config_dic['devices']['default_device']['collectors'].update(
-                self.kibbles_config.boards_configs)
-            if virtual_metrics_file is not None:
-                self.config_dic['devices']['default_device'][
-                    'vm_files'] = [virtual_metrics_file]
-                self.has_virtual_metrics_file = True
diff --git a/src/antlion/controllers/bluetooth_pts_device.py b/src/antlion/controllers/bluetooth_pts_device.py
deleted file mode 100644
index 1dbe5c7..0000000
--- a/src/antlion/controllers/bluetooth_pts_device.py
+++ /dev/null
@@ -1,764 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Prerequisites:
-    Windows 10
-    Bluetooth PTS installed
-    Recommended: Running cmder as Admin: https://cmder.net/
-
-### BEGIN SETUP STEPS###
-1. Install latest version of Python for windows:
-    https://www.python.org/downloads/windows/
-
-Tested successfully on Python 3.7.3.:
-    https://www.python.org/ftp/python/3.7.3/python-3.7.3.exe
-
-2. Launch Powershell and setup PATH:
-Setx PATH “%PATH%;C:/Users/<username>/AppData/Local/Programs/Python/Python37-32/Scripts”
-
-3. Launch Cmder as Admin before running any PTS related ACTS tests.
-
-
-### END SETUP STEPS###
-
-
-Bluetooth PTS controller.
-Mandatory parameters are log_directory and sig_root_directory.
-
-ACTS Config setup:
-"BluetoothPtsDevice": {
-    "log_directory": "C:\\Users\\fsbtt\\Documents\\Profile Tuning Suite\\Test_Dir",
-    "sig_root_directory": "C:\\Program Files (x86)\\Bluetooth SIG"
-}
-
-"""
-from antlion import signals
-from datetime import datetime
-
-import ctypes
-import logging
-import os
-import subprocess
-import time
-import xml.etree.ElementTree as ET
-
-from xml.dom import minidom
-from xml.etree.ElementTree import Element
-
-
-class BluetoothPtsDeviceConfigError(signals.ControllerError):
-    pass
-
-
-class BluetoothPtsSnifferError(signals.ControllerError):
-    pass
-
-
-MOBLY_CONTROLLER_CONFIG_NAME = "BluetoothPtsDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "bluetooth_pts_device"
-
-# Prefix to identify final verdict string. This is a PTS specific log String.
-VERDICT = 'VERDICT/'
-
-# Verdict strings that are specific to PTS.
-VERDICT_STRINGS = {
-    'RESULT_PASS': 'PASS',
-    'RESULT_FAIL': 'FAIL',
-    'RESULT_INCONC': 'INCONC',
-    'RESULT_INCOMP':
-    'INCOMP',  # Initial final verdict meaning that test has not completed yet.
-    'RESULT_NONE':
-    'NONE',  # Error verdict usually indicating internal PTS error.
-}
-
-# Sniffer ready log message.
-SNIFFER_READY = 'SNIFFER/Save and clear complete'
-
-# PTS Log Types as defined by PTS:
-LOG_TYPE_GENERAL_TEXT = 0
-LOG_TYPE_FIRST = 1
-LOG_TYPE_START_TEST_CASE = 1
-LOG_TYPE_TEST_CASE_ENDED = 2
-LOG_TYPE_START_DEFAULT = 3
-LOG_TYPE_DEFAULT_ENDED = 4
-LOG_TYPE_FINAL_VERDICT = 5
-LOG_TYPE_PRELIMINARY_VERDICT = 6
-LOG_TYPE_TIMEOUT = 7
-LOG_TYPE_ASSIGNMENT = 8
-LOG_TYPE_START_TIMER = 9
-LOG_TYPE_STOP_TIMER = 10
-LOG_TYPE_CANCEL_TIMER = 11
-LOG_TYPE_READ_TIMER = 12
-LOG_TYPE_ATTACH = 13
-LOG_TYPE_IMPLICIT_SEND = 14
-LOG_TYPE_GOTO = 15
-LOG_TYPE_TIMED_OUT_TIMER = 16
-LOG_TYPE_ERROR = 17
-LOG_TYPE_CREATE = 18
-LOG_TYPE_DONE = 19
-LOG_TYPE_ACTIVATE = 20
-LOG_TYPE_MESSAGE = 21
-LOG_TYPE_LINE_MATCHED = 22
-LOG_TYPE_LINE_NOT_MATCHED = 23
-LOG_TYPE_SEND_EVENT = 24
-LOG_TYPE_RECEIVE_EVENT = 25
-LOG_TYPE_OTHERWISE_EVENT = 26
-LOG_TYPE_RECEIVED_ON_PCO = 27
-LOG_TYPE_MATCH_FAILED = 28
-LOG_TYPE_COORDINATION_MESSAGE = 29
-
-PTS_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-
-
-def create(config):
-    if not config:
-        raise errors.PTS_DEVICE_EMPTY_CONFIG_MSG
-    return get_instance(config)
-
-
-def destroy(pts):
-    try:
-        pts[0].clean_up()
-    except:
-        pts[0].log.error("Failed to clean up properly.")
-
-
-def get_info(pts_devices):
-    """Get information from the BluetoothPtsDevice object.
-
-    Args:
-        pts_devices: A list of BluetoothPtsDevice objects although only one
-        will ever be specified.
-
-    Returns:
-        A dict, representing info for BluetoothPtsDevice object.
-    """
-    return {
-        "address": pts_devices[0].address,
-        "sniffer_ready": pts_devices[0].sniffer_ready,
-        "ets_manager_library": pts_devices[0].ets_manager_library,
-        "log_directory": pts_devices[0].log_directory,
-        "pts_installation_directory":
-        pts_devices[0].pts_installation_directory,
-    }
-
-
-def get_instance(config):
-    """Create BluetoothPtsDevice instance from a dictionary containing
-    information related to PTS. Namely the SIG root directory as
-    sig_root_directory and the log directory represented by the log_directory.
-
-    Args:
-        config: A dict that contains BluetoothPtsDevice device info.
-
-    Returns:
-        A list of BluetoothPtsDevice objects.
-    """
-    result = []
-    try:
-        log_directory = config.pop("log_directory")
-    except KeyError:
-        raise BluetoothPtsDeviceConfigError(
-            "Missing mandatory log_directory in config.")
-    try:
-        sig_root_directory = config.pop("sig_root_directory")
-    except KeyError:
-        example_path = \
-            "C:\\\\Program Files (x86)\\\\Bluetooth SIG"
-        raise BluetoothPtsDeviceConfigError(
-            "Missing mandatory sig_root_directory in config. Example path: {}".
-            format(example_path))
-
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth PTS\\bin\\ETSManager.dll"
-    ets_manager_library = "{}\\Bluetooth PTS\\bin\\ETSManager.dll".format(
-        sig_root_directory)
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth PTS\\bin"
-    pts_installation_directory = "{}\\Bluetooth PTS\\bin".format(
-        sig_root_directory)
-    # "C:\\Program Files (x86)\\Bluetooth SIG\\Bluetooth Protocol Viewer"
-    pts_sniffer_directory = "{}\\Bluetooth Protocol Viewer".format(
-        sig_root_directory)
-    result.append(
-        BluetoothPtsDevice(ets_manager_library, log_directory,
-                           pts_installation_directory, pts_sniffer_directory))
-    return result
-
-
-class BluetoothPtsDevice:
-    """Class representing an Bluetooth PTS device and associated functions.
-
-    Each object of this class represents one BluetoothPtsDevice in ACTS.
-    """
-
-    _next_action = -1
-    _observers = []
-    address = ""
-    current_implicit_send_description = ""
-    devices = []
-    extra_answers = []
-    log_directory = ""
-    log = None
-    ics = None
-    ixit = None
-    profile_under_test = None
-    pts_library = None
-    pts_profile_mmi_request = ""
-    pts_test_result = VERDICT_STRINGS['RESULT_INCOMP']
-    sniffer_ready = False
-    test_log_directory = ""
-    test_log_prefix = ""
-
-    def __init__(self, ets_manager_library, log_directory,
-                 pts_installation_directory, pts_sniffer_directory):
-        self.log = logging.getLogger()
-        if ets_manager_library is not None:
-            self.ets_manager_library = ets_manager_library
-        self.log_directory = log_directory
-        if pts_installation_directory is not None:
-            self.pts_installation_directory = pts_installation_directory
-        if pts_sniffer_directory is not None:
-            self.pts_sniffer_directory = pts_sniffer_directory
-        # Define callback functions
-        self.USEAUTOIMPLSENDFUNC = ctypes.CFUNCTYPE(ctypes.c_bool)
-        self.use_auto_impl_send_func = self.USEAUTOIMPLSENDFUNC(
-            self.UseAutoImplicitSend)
-
-        self.DONGLE_MSG_FUNC = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_char_p)
-        self.dongle_msg_func = self.DONGLE_MSG_FUNC(self.DongleMsg)
-
-        self.DEVICE_SEARCH_MSG_FUNC = ctypes.CFUNCTYPE(ctypes.c_bool,
-                                                       ctypes.c_char_p,
-                                                       ctypes.c_char_p,
-                                                       ctypes.c_char_p)
-        self.dev_search_msg_func = self.DEVICE_SEARCH_MSG_FUNC(
-            self.DeviceSearchMsg)
-
-        self.LOGFUNC = ctypes.CFUNCTYPE(ctypes.c_bool, ctypes.c_char_p,
-                                        ctypes.c_char_p, ctypes.c_char_p,
-                                        ctypes.c_int, ctypes.c_void_p)
-        self.log_func = self.LOGFUNC(self.Log)
-
-        self.ONIMPLSENDFUNC = ctypes.CFUNCTYPE(ctypes.c_char_p,
-                                               ctypes.c_char_p, ctypes.c_int)
-        self.onimplsend_func = self.ONIMPLSENDFUNC(self.ImplicitSend)
-
-        # Helps with PTS reliability.
-        os.chdir(self.pts_installation_directory)
-        # Load EtsManager
-        self.pts_library = ctypes.cdll.LoadLibrary(self.ets_manager_library)
-        self.log.info("ETS Manager library {0:s} has been loaded".format(
-            self.ets_manager_library))
-        # If post-logging is turned on all callbacks to LPLOG-type function
-        # will be executed after test execution is complete. It is recommended
-        # that post-logging is turned on to avoid simultaneous invocations of
-        # LPLOG and LPAUTOIMPLICITSEND callbacks.
-        self.pts_library.SetPostLoggingEx(True)
-
-        self.xml_root = Element("ARCHIVE")
-        version = Element("VERSION")
-        version.text = "2.0"
-        self.xml_root.append(version)
-        self.xml_pts_pixit = Element("PicsPixit")
-        self.xml_pts_pixit.text = ""
-        self.xml_pts_running_log = Element("LOG")
-        self.xml_pts_running_log.text = ""
-        self.xml_pts_running_summary = Element("SUMMARY")
-        self.xml_pts_running_summary.text = ""
-
-    def clean_up(self):
-        # Since we have no insight to the actual PTS library,
-        # catch all Exceptions and log them.
-        try:
-            self.log.info("Cleaning up Stack...")
-            self.pts_library.ExitStackEx(self.profile_under_test)
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        try:
-            self.log.info("Unregistering Profile...")
-            self.pts_library.UnregisterProfileEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.UnregisterProfileEx(
-                self.profile_under_test.encode())
-            self.pts_library.UnRegisterGetDevInfoEx()
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        try:
-            self.log.info("Cleaning up Sniffer")
-            self.pts_library.SnifferTerminateEx()
-        except Exception as err:
-            self.log.error(
-                "Failed to clean up BluetoothPtsDevice: {}".format(err))
-        self.log.info("Cleanup Done.")
-
-    def write_xml_pts_pixit_values_for_current_test(self):
-        """ Writes the current PICS and IXIT values to the XML result.
-        """
-        self.xml_pts_pixit.text = "ICS VALUES:\n\n"
-        for key, value in self.ics.items():
-            self.xml_pts_pixit.text += "{} {}\n".format(
-                key.decode(), value.decode())
-        self.xml_pts_pixit.text += "\nIXIT VALUES:\n\n"
-        for key, (_, value) in self.ixit.items():
-            self.xml_pts_pixit.text += "{} {}\n".format(
-                key.decode(), value.decode())
-
-    def set_ics_and_ixit(self, ics, ixit):
-        self.ics = ics
-        self.ixit = ixit
-
-    def set_profile_under_test(self, profile):
-        self.profile_under_test = profile
-
-    def setup_pts(self):
-        """Prepares PTS to run tests. This needs to be called in test classes
-        after ICS, IXIT, and setting Profile under test.
-        Specifically BluetoothPtsDevice functions:
-            set_profile_under_test
-            set_ics_and_ixit
-        """
-
-        # Register layer to test with callbacks
-        self.pts_library.RegisterProfileWithCallbacks.argtypes = [
-            ctypes.c_char_p, self.USEAUTOIMPLSENDFUNC, self.ONIMPLSENDFUNC,
-            self.LOGFUNC, self.DEVICE_SEARCH_MSG_FUNC, self.DONGLE_MSG_FUNC
-        ]
-        res = self.pts_library.RegisterProfileWithCallbacks(
-            self.profile_under_test.encode(), self.use_auto_impl_send_func,
-            self.onimplsend_func, self.log_func, self.dev_search_msg_func,
-            self.dongle_msg_func)
-
-        self.log.info(
-            "Profile has been registered with result {0:d}".format(res))
-
-        # GetDeviceInfo module is for discovering devices and PTS Dongle address
-        # Initialize GetDeviceInfo and register it with callbacks
-        # First parameter is PTS executable directory
-        self.pts_library.InitGetDevInfoWithCallbacks.argtypes = [
-            ctypes.c_char_p, self.DEVICE_SEARCH_MSG_FUNC, self.DONGLE_MSG_FUNC
-        ]
-        res = self.pts_library.InitGetDevInfoWithCallbacks(
-            self.pts_installation_directory.encode(), self.dev_search_msg_func,
-            self.dongle_msg_func)
-        self.log.info(
-            "GetDevInfo has been initialized with result {0:d}".format(res))
-        # Initialize PTS dongle
-        res = self.pts_library.VerifyDongleEx()
-        self.log.info(
-            "PTS dongle has been initialized with result {0:d}".format(res))
-
-        # Find PTS dongle address
-        self.pts_library.GetDongleBDAddress.restype = ctypes.c_ulonglong
-        self.address = self.pts_library.GetDongleBDAddress()
-        self.address_str = "{0:012X}".format(self.address)
-        self.log.info("PTS BD Address 0x{0:s}".format(self.address_str))
-
-        # Initialize Bluetooth Protocol Viewer communication module
-        self.pts_library.SnifferInitializeEx()
-
-        # If Bluetooth Protocol Viewer is not running, start it
-        if not self.is_sniffer_running():
-            self.log.info("Starting Protocol Viewer")
-            args = [
-                "{}\Executables\Core\FTS.exe".format(
-                    self.pts_sniffer_directory),
-                '/PTS Protocol Viewer=Generic',
-                '/OEMTitle=Bluetooth Protocol Viewer', '/OEMKey=Virtual'
-            ]
-            subprocess.Popen(args)
-            sniffer_timeout = 10
-            while not self.is_sniffer_running():
-                time.sleep(sniffer_timeout)
-
-        # Register to recieve Bluetooth Protocol Viewer notofications
-        self.pts_library.SnifferRegisterNotificationEx()
-        self.pts_library.SetParameterEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p
-        ]
-
-        for ics_name in self.ics:
-            res = self.pts_library.SetParameterEx(
-                ics_name, b'BOOLEAN', self.ics[ics_name],
-                self.profile_under_test.encode())
-            if res:
-                self.log.info("ICS {0:s} set successfully".format(
-                    str(ics_name)))
-            else:
-                self.log.error("Setting ICS {0:s} value failed".format(
-                    str(ics_name)))
-
-        for ixit_name in self.ixit:
-            res = self.pts_library.SetParameterEx(
-                ixit_name, (self.ixit[ixit_name])[0],
-                (self.ixit[ixit_name])[1], self.profile_under_test.encode())
-            if res:
-                self.log.info("IXIT {0:s} set successfully".format(
-                    str(ixit_name)))
-            else:
-                self.log.error("Setting IXIT {0:s} value failed".format(
-                    str(ixit_name)))
-
-        # Prepare directory to store Bluetooth Protocol Viewer output
-        if not os.path.exists(self.log_directory):
-            os.makedirs(self.log_directory)
-
-        address_b = self.address_str.encode("utf-8")
-        self.pts_library.InitEtsEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p
-        ]
-
-        implicit_send_path = "{}\\implicit_send3.dll".format(
-            self.pts_installation_directory).encode()
-        res = self.pts_library.InitEtsEx(self.profile_under_test.encode(),
-                                         self.log_directory.encode(),
-                                         implicit_send_path, address_b)
-        self.log.info("ETS has been initialized with result {0:s}".format(
-            str(res)))
-
-        # Initialize Host Stack DLL
-        self.pts_library.InitStackEx.argtypes = [ctypes.c_char_p]
-        res = self.pts_library.InitStackEx(self.profile_under_test.encode())
-        self.log.info("Stack has been initialized with result {0:s}".format(
-            str(res)))
-
-        # Select to receive Log messages after test is done
-        self.pts_library.SetPostLoggingEx.argtypes = [
-            ctypes.c_bool, ctypes.c_char_p
-        ]
-        self.pts_library.SetPostLoggingEx(True,
-                                          self.profile_under_test.encode())
-
-        # Clear Bluetooth Protocol Viewer. Dongle message callback will update
-        # sniffer_ready automatically. No need to fail setup if the timeout
-        # is exceeded since the logs will still be available just not starting
-        # from a clean slate. Just post a warning.
-        self.sniffer_ready = False
-        self.pts_library.SnifferClearEx()
-        end_time = time.time() + 10
-        while not self.sniffer_ready and time.time() < end_time:
-            time.sleep(1)
-        if not self.sniffer_ready:
-            self.log.warning("Sniffer not cleared. Continuing.")
-
-    def is_sniffer_running(self):
-        """ Looks for running Bluetooth Protocol Viewer process
-
-        Returns:
-            Returns True if finds one, False otherwise.
-        """
-        prog = [
-            line.split()
-            for line in subprocess.check_output("tasklist").splitlines()
-        ]
-        [prog.pop(e) for e in [0, 1, 2]]
-        for task in prog:
-            task_name = task[0].decode("utf-8")
-            if task_name == "Fts.exe":
-                self.log.info("Found FTS process successfully.")
-                # Sleep recommended by PTS.
-                time.sleep(1)
-                return True
-        return False
-
-    def UseAutoImplicitSend(self):
-        """Callback method that defines Which ImplicitSend will be used.
-
-        Returns:
-            True always to inform PTS to use the local implementation.
-        """
-        return True
-
-    def DongleMsg(self, msg_str):
-        """ Receives PTS dongle messages.
-
-        Specifically this receives the Bluetooth Protocol Viewer completed
-        save/clear operations.
-
-        Returns:
-            True if sniffer is ready, False otherwise.
-        """
-        msg = (ctypes.c_char_p(msg_str).value).decode("utf-8")
-        self.log.info(msg)
-        # Sleep recommended by PTS.
-        time.sleep(1)
-        if SNIFFER_READY in msg:
-            self.sniffer_ready = True
-        return True
-
-    def DeviceSearchMsg(self, addr_str, name_str, cod_str):
-        """ Receives device search messages
-
-        Each device may return multiple messages
-        Each message will contain device address and may contain device name and
-        COD.
-
-        Returns:
-            True always and reports to the callback appropriately.
-        """
-        addr = (ctypes.c_char_p(addr_str).value).replace(b'\xed',
-                                                         b' ').decode("utf-8")
-        name = (ctypes.c_char_p(name_str).value).replace(b'\xed',
-                                                         b' ').decode("utf-8")
-        cod = (ctypes.c_char_p(cod_str).value).replace(b'\xed',
-                                                       b' ').decode("utf-8")
-        self.devices.append(
-            "Device address = {0:s} name = {1:s} cod = {2:s}".format(
-                addr, name, cod))
-        return True
-
-    def Log(self, log_time_str, log_descr_str, log_msg_str, log_type, project):
-        """ Receives PTS log messages.
-
-        Returns:
-            True always and reports to the callback appropriately.
-        """
-        log_time = (ctypes.c_char_p(log_time_str).value).decode("utf-8")
-        log_descr = (ctypes.c_char_p(log_descr_str).value).decode("utf-8")
-        log_msg = (ctypes.c_char_p(log_msg_str).value).decode("utf-8")
-        if "Verdict Description" in log_descr:
-            self.xml_pts_running_summary.text += "\t- {}".format(log_msg)
-        if "Final Verdict" in log_descr:
-            self.xml_pts_running_summary.text += "{}{}\n".format(
-                log_descr.strip(), log_msg.strip())
-        full_log_msg = "{}{}{}".format(log_time, log_descr, log_msg)
-        self.xml_pts_running_log.text += "{}\n".format(str(full_log_msg))
-
-        if ctypes.c_int(log_type).value == LOG_TYPE_FINAL_VERDICT:
-            indx = log_msg.find(VERDICT)
-            if indx == 0:
-                if self.pts_test_result == VERDICT_STRINGS['RESULT_INCOMP']:
-                    if VERDICT_STRINGS['RESULT_INCONC'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_INCONC']
-                    elif VERDICT_STRINGS['RESULT_FAIL'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_FAIL']
-                    elif VERDICT_STRINGS['RESULT_PASS'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_PASS']
-                    elif VERDICT_STRINGS['RESULT_NONE'] in log_msg:
-                        self.pts_test_result = VERDICT_STRINGS['RESULT_NONE']
-        return True
-
-    def ImplicitSend(self, description, style):
-        """ ImplicitSend callback
-
-        Implicit Send Styles:
-            MMI_Style_Ok_Cancel1 =     0x11041, Simple prompt           | OK, Cancel buttons      | Default: OK
-            MMI_Style_Ok_Cancel2 =     0x11141, Simple prompt           | Cancel button           | Default: Cancel
-            MMI_Style_Ok1 =            0x11040, Simple prompt           | OK button               | Default: OK
-            MMI_Style_Yes_No1 =        0x11044, Simple prompt           | Yes, No buttons         | Default: Yes
-            MMI_Style_Yes_No_Cancel1 = 0x11043, Simple prompt           | Yes, No buttons         | Default: Yes
-            MMI_Style_Abort_Retry1 =   0x11042, Simple prompt           | Abort, Retry buttons    | Default: Abort
-            MMI_Style_Edit1 =          0x12040, Request for data input  | OK, Cancel buttons      | Default: OK
-            MMI_Style_Edit2 =          0x12140, Select item from a list | OK, Cancel buttons      | Default: OK
-
-        Handling
-            MMI_Style_Ok_Cancel1
-                OK = return "OK"
-                Cancel = return 0
-
-            MMI_Style_Ok_Cancel2
-                OK = return "OK"
-                Cancel = return 0
-
-            MMI_Style_Ok1
-                OK = return "OK", this version should not return 0
-
-            MMI_Style_Yes_No1
-                Yes = return "OK"
-                No = return 0
-
-            MMI_Style_Yes_No_Cancel1
-                Yes = return "OK"
-                No = return 0
-                Cancel = has been deprecated
-
-            MMI_Style_Abort_Retry1
-                Abort = return 0
-                Retry = return "OK"
-
-            MMI_Style_Edit1
-                OK = return expected string
-                Cancel = return 0
-
-            MMI_Style_Edit2
-                OK = return expected string
-                Cancel = return 0
-
-        Receives ImplicitSend messages
-        Description format is as following:
-        {MMI_ID,Test Name,Layer Name}MMI Action\n\nDescription: MMI Description
-        """
-        descr_str = (ctypes.c_char_p(description).value).decode("utf-8")
-        # Sleep recommended by PTS.
-        time.sleep(1)
-        indx = descr_str.find('}')
-        implicit_send_info = descr_str[1:(indx)]
-        self.current_implicit_send_description = descr_str[(indx + 1):]
-        items = implicit_send_info.split(',')
-        implicit_send_info_id = items[0]
-        implicit_send_info_test_case = items[1]
-        self.pts_profile_mmi_request = items[2]
-        self.log.info(
-            "OnImplicitSend() has been called with the following parameters:\n"
-        )
-        self.log.info("\t\tproject_name = {0:s}".format(
-            self.pts_profile_mmi_request))
-        self.log.info("\t\tid = {0:s}".format(implicit_send_info_id))
-        self.log.info(
-            "\t\ttest_case = {0:s}".format(implicit_send_info_test_case))
-        self.log.info("\t\tdescription = {0:s}".format(
-            self.current_implicit_send_description))
-        self.log.info("\t\tstyle = {0:#X}".format(ctypes.c_int(style).value))
-        self.log.info("")
-        try:
-            self.next_action = int(implicit_send_info_id)
-        except Exception as err:
-            self.log.error(
-                "Setting verdict to RESULT_FAIL, exception found: {}".format(
-                    err))
-            self.pts_test_result = VERDICT_STRINGS['RESULT_FAIL']
-        res = b'OK'
-        if len(self.extra_answers) > 0:
-            res = self.extra_answers.pop(0).encode()
-        self.log.info("Sending Response: {}".format(res))
-        return res
-
-    def log_results(self, test_name):
-        """Log results.
-
-        Saves the sniffer results in cfa format and clears the sniffer.
-
-        Args:
-            test_name: string, name of the test run.
-        """
-        self.pts_library.SnifferCanSaveEx.restype = ctypes.c_bool
-        canSave = ctypes.c_bool(self.pts_library.SnifferCanSaveEx()).value
-        self.pts_library.SnifferCanSaveAndClearEx.restype = ctypes.c_bool
-        canSaveClear = ctypes.c_bool(
-            self.pts_library.SnifferCanSaveAndClearEx()).value
-        file_name = "\\{}.cfa".format(self.test_log_prefix).encode()
-        path = self.test_log_directory.encode() + file_name
-
-        if canSave == True:
-            self.pts_library.SnifferSaveEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.SnifferSaveEx(path)
-        else:
-            self.pts_library.SnifferSaveAndClearEx.argtypes = [ctypes.c_char_p]
-            self.pts_library.SnifferSaveAndClearEx(path)
-        end_time = time.time() + 60
-        while self.sniffer_ready == False and end_time > time.time():
-            self.log.info("Waiting for sniffer to be ready...")
-            time.sleep(1)
-        if self.sniffer_ready == False:
-            raise BluetoothPtsSnifferError(
-                "Sniffer not ready after 60 seconds.")
-
-    def execute_test(self, test_name, test_timeout=60):
-        """Execute the input test name.
-
-        Preps PTS to run the test and waits up to 2 minutes for all steps
-        in the execution to finish. Cleanup of PTS related objects follows
-        any test verdict.
-
-        Args:
-            test_name: string, name of the test to execute.
-        """
-        today = datetime.now()
-        self.write_xml_pts_pixit_values_for_current_test()
-        # TODO: Find out how to grab the PTS version. Temporarily
-        # hardcoded to v.7.4.1.2.
-        self.xml_pts_pixit.text = (
-            "Test Case Started: {} v.7.4.1.2, {} started on {}\n\n{}".format(
-                self.profile_under_test, test_name,
-                today.strftime("%A, %B %d, %Y, %H:%M:%S"),
-                self.xml_pts_pixit.text))
-
-        self.xml_pts_running_summary.text += "Test case : {} started\n".format(
-            test_name)
-        log_time_formatted = "{:%Y_%m_%d_%H_%M_%S}".format(datetime.now())
-        formatted_test_name = test_name.replace('/', '_')
-        formatted_test_name = formatted_test_name.replace('-', '_')
-        self.test_log_prefix = "{}_{}".format(formatted_test_name,
-                                              log_time_formatted)
-        self.test_log_directory = "{}\\{}\\{}".format(self.log_directory,
-                                                      self.profile_under_test,
-                                                      self.test_log_prefix)
-        os.makedirs(self.test_log_directory)
-        curr_test = test_name.encode()
-
-        self.pts_library.StartTestCaseEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p, ctypes.c_bool
-        ]
-        res = self.pts_library.StartTestCaseEx(
-            curr_test, self.profile_under_test.encode(), True)
-        self.log.info("Test has been started with result {0:s}".format(
-            str(res)))
-
-        # Wait till verdict is received
-        self.log.info("Begin Test Execution... waiting for verdict.")
-        end_time = time.time() + test_timeout
-        while self.pts_test_result == VERDICT_STRINGS[
-                'RESULT_INCOMP'] and time.time() < end_time:
-            time.sleep(1)
-        self.log.info("End Test Execution... Verdict {}".format(
-            self.pts_test_result))
-
-        # Clean up after test is done
-        self.pts_library.TestCaseFinishedEx.argtypes = [
-            ctypes.c_char_p, ctypes.c_char_p
-        ]
-        res = self.pts_library.TestCaseFinishedEx(
-            curr_test, self.profile_under_test.encode())
-
-        self.log_results(test_name)
-        self.xml_pts_running_summary.text += "{} finished\n".format(test_name)
-        # Add the log results to the XML output
-        self.xml_root.append(self.xml_pts_pixit)
-        self.xml_root.append(self.xml_pts_running_log)
-        self.xml_root.append(self.xml_pts_running_summary)
-        rough_string = ET.tostring(self.xml_root,
-                                   encoding='utf-8',
-                                   method='xml')
-        reparsed = minidom.parseString(rough_string)
-        with open(
-                "{}\\{}.xml".format(self.test_log_directory,
-                                    self.test_log_prefix), "w") as writter:
-            writter.write(
-                reparsed.toprettyxml(indent="  ", encoding="utf-8").decode())
-
-        if self.pts_test_result is VERDICT_STRINGS['RESULT_PASS']:
-            return True
-        return False
-
-    """Observer functions"""
-
-    def bind_to(self, callback):
-        """ Callbacks to add to the observer.
-        This is used for DUTS automatic responses (ImplicitSends local
-        implementation).
-        """
-        self._observers.append(callback)
-
-    @property
-    def next_action(self):
-        return self._next_action
-
-    @next_action.setter
-    def next_action(self, action):
-        self._next_action = action
-        for callback in self._observers:
-            callback(self._next_action)
-
-    """End Observer functions"""
diff --git a/src/antlion/controllers/buds_controller.py b/src/antlion/controllers/buds_controller.py
deleted file mode 100644
index 0040d85..0000000
--- a/src/antlion/controllers/buds_controller.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""This is the controller module for Pixel Buds devices.
-
-For the device definition, see buds_lib.apollo_lib.
-"""
-
-from antlion.controllers.buds_lib.apollo_lib import ParentDevice
-
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'BudsDevice'
-ACTS_CONTROLLER_REFERENCE_NAME = 'buds_devices'
-
-
-class ConfigError(Exception):
-    """Raised when the configuration is malformatted."""
-
-
-def create(configs):
-    """Creates a Pixel Buds device for each config found within the configs.
-
-    Args:
-        configs: The configs can be structured in the following ways:
-
-                    ['serial1', 'serial2', ... ]
-
-                    [
-                        {
-                            'serial': 'serial1',
-                            'label': 'some_info',
-                            ...
-                        },
-                        {
-                            'serial': 'serial2',
-                            'label': 'other_info',
-                            ...
-                        }
-                    ]
-    """
-    created_controllers = []
-
-    if not isinstance(configs, list):
-        raise ConfigError('Malformatted config %s. Must be a list.' % configs)
-
-    for config in configs:
-        if isinstance(config, str):
-            created_controllers.append(ParentDevice(config))
-        elif isinstance(config, dict):
-            serial = config.get('serial', None)
-            if not serial:
-                raise ConfigError('Buds Device %s is missing entry "serial".' %
-                                  config)
-            created_controllers.append(ParentDevice(serial))
-        else:
-            raise ConfigError('Malformatted config: "%s". Must be a string or '
-                              'dict' % config)
-    return created_controllers
-
-
-def destroy(buds_device_list):
-    pass
-
-
-def get_info(buds_device_list):
-    device_infos = []
-    for buds_device in buds_device_list:
-        device_infos.append({'serial': buds_device.serial_number,
-                             'name': buds_device.device_name})
-    return device_infos
diff --git a/src/antlion/controllers/buds_lib/__init__.py b/src/antlion/controllers/buds_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/apollo_lib.py b/src/antlion/controllers/buds_lib/apollo_lib.py
deleted file mode 100644
index 1a63c62..0000000
--- a/src/antlion/controllers/buds_lib/apollo_lib.py
+++ /dev/null
@@ -1,1514 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo Commander through USB/UART interface.
-
-It uses python serial lib to communicate to a Apollo device.
-Some of the commander may not work yet, pending on the final version of the
-commander implementation.
-
-Typical usage examples:
-
-    To get a list of all apollo devices:
-    >>> devices = apollo_lib.get_devices()
-
-    To work with a specific apollo device:
-    >>> apollo = apollo_lib.Device(serial_number='ABCDEF0123456789',
-    >>> commander_port='/dev/ttyACM0')
-
-    To send a single command:
-    >>> apollo.cmd('PowOff')
-
-    To send a list of commands:
-    >>> apollo.cmd(['PowOff', 'PowOn', 'VolUp', 'VolDown']
-"""
-import atexit
-import os
-import re
-import subprocess
-import time
-from logging import Logger
-
-import serial
-from tenacity import retry, stop_after_attempt, wait_exponential
-
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.controllers.buds_lib import logserial
-from antlion.controllers.buds_lib.b29_lib import B29Device
-from antlion.controllers.buds_lib.dev_utils import apollo_log_decoder
-from antlion.controllers.buds_lib.dev_utils import apollo_log_regex
-from antlion.controllers.buds_lib.dev_utils import apollo_sink_events
-
-logging = tako_trace_logger.TakoTraceLogger(Logger('apollo'))
-
-BAUD_RATE = 115200
-BYTE_SIZE = 8
-PARITY = 'N'
-STOP_BITS = 1
-DEFAULT_TIMEOUT = 3
-WRITE_TO_FLASH_WAIT = 30  # wait 30 sec when writing to external flash.
-LOG_REGEX = re.compile(r'(?P<time_stamp>\d+)\s(?P<msg>.*)')
-STATUS_REGEX = r'(?P<time_stamp>\d+)\s(?P<key>.+?): (?P<value>.+)'
-APOLLO_CHIP = '_Apollo_'
-DEVICE_REGEX = (
-    r'_(?P<device_serial>[A-Z0-9]+)-(?P<interface>\w+)'
-    r'\s->\s(\.\./){2}(?P<port>\w+)'
-)
-OTA_VERIFICATION_FAILED = 'OTA verification failed. corrupt image?'
-OTA_ERASING_PARTITION = 'INFO OTA eras ptns'
-OTA_RECEIVE_CSR_REGEX = r'INFO OTA CSR rcv begin'
-CODEC_REGEX = r'(?P<time_stamp>\d+)\s(?P<codec>\w+) codec is used.'
-BUILD_REGEX = r'\d+\.\d+\.(?P<build>\d+)-?(?P<psoc_build>\d*)-?(?P<debug>\w*)'
-
-
-class Error(Exception):
-    """Module Level Error."""
-
-
-class ResponseError(Error):
-    """cmd Response Error."""
-
-
-class DeviceError(Error):
-    """Device Error."""
-
-
-class ConnectError(Error):
-    """Connection Error."""
-
-
-def get_devices():
-    """Get all available Apollo devices.
-
-    Returns:
-        (list) A list of available devices or empty list if none found
-
-    Raises:
-        Error: raises Error if no Apollo devices or wrong interfaces were found.
-    """
-    devices = []
-    result = os.popen('ls -l /dev/serial/by-id/*%s*' % APOLLO_CHIP).read()
-    if not result:
-        raise Error('No Apollo Devices found.')
-    for line in result.splitlines():
-        match = re.search(DEVICE_REGEX, line)
-        interface = match.group('interface')
-        # TODO: The commander port will always be None.
-        commander_port = None
-        if interface == 'if00':
-            commander_port = '/dev/' + match.group('port')
-            continue
-        elif interface == 'if02':
-            log_port = '/dev/' + match.group('port')
-        else:
-            raise Error('Wrong interface found.')
-        device_serial = match.group('device_serial')
-
-        device = {
-            'commander_port': commander_port,
-            'log_port': log_port,
-            'serial_number': device_serial
-        }
-        devices.append(device)
-    return devices
-
-
-class BudsDevice(object):
-    """Provides a simple class to interact with Apollo."""
-
-    def __init__(self, serial_number, commander_port=None, log_port=None,
-                 serial_logger=None):
-        """Establish a connection to a Apollo.
-
-        Open a connection to a device with a specific serial number.
-
-        Raises:
-            ConnectError: raises ConnectError if cannot open the device.
-        """
-        self.set_log = False
-        self.connection_handle = None
-        self.device_closed = False
-        if serial_logger:
-            self.set_logger(serial_logger)
-        self.pc = logserial.PortCheck()
-        self.serial_number = serial_number
-        # TODO (kselvakumaran): move this to an interface device class that
-        # apollo_lib.BudsDevice should derive from
-        if not commander_port and not log_port:
-            self.get_device_ports(self.serial_number)
-        if commander_port:
-            self.commander_port = commander_port
-        if log_port:
-            self.log_port = log_port
-        self.apollo_log = None
-        self.cmd_log = None
-        self.apollo_log_regex = apollo_log_regex
-        self.dut_type = 'apollo'
-
-        # TODO (kselvakumaran): move this to an interface device class that
-        # apollo_lib.BudsDevice should derive from
-
-        try:  # Try to open the device
-            self.connection_handle = logserial.LogSerial(
-                self.commander_port, BAUD_RATE, flush_output=False,
-                serial_logger=logging)
-            self.wait_for_commander()
-        except (serial.SerialException, AssertionError, ConnectError) as e:
-            logging.error(
-                'error opening device {}: {}'.format(serial_number, e))
-            raise ConnectError('Error open the device.')
-        # disable sleep on idle
-        self.stay_connected_state = 1
-        atexit.register(self.close)
-
-    def set_logger(self, serial_logger):
-        global logging
-        logging = serial_logger
-        self.set_log = True
-        if self.connection_handle:
-            self.connection_handle.set_logger(serial_logger)
-
-    def get_device_ports(self, serial_number):
-        commander_query = {'ID_SERIAL_SHORT': serial_number,
-                           'ID_USB_INTERFACE_NUM': '00'}
-        log_query = {'ID_SERIAL_SHORT': serial_number,
-                     'ID_USB_INTERFACE_NUM': '02'}
-        self.commander_port = self.pc.search_port_by_property(commander_query)
-        self.log_port = self.pc.search_port_by_property(log_query)
-        if not self.commander_port and not self.log_port:
-            raise ConnectError(
-                'BudsDevice serial number %s not found' % serial_number)
-        else:
-            if not self.commander_port:
-                raise ConnectError('No devices found')
-            self.commander_port = self.commander_port[0]
-            self.log_port = self.log_port[0]
-
-    def get_all_log(self):
-        return self.connection_handle.get_all_log()
-
-    def query_log(self, from_timestamp, to_timestamp):
-        return self.connection_handle.query_serial_log(
-            from_timestamp=from_timestamp, to_timestamp=to_timestamp)
-
-    def send(self, cmd):
-        """Sends the command to serial port.
-
-        It does not care about whether the cmd is successful or not.
-
-        Args:
-            cmd: The passed command
-
-        Returns:
-            The number of characters written
-        """
-        logging.debug(cmd)
-        # with self._lock:
-        self.connection_handle.write(cmd)
-        result = self.connection_handle.read()
-        return result
-
-    def cmd(self, cmds, wait=None):
-        """Sends the commands and check responses.
-
-        Valid cmd will return something like '585857269 running cmd VolUp'.
-        Invalid cmd will log an error and return something like '585826369 No
-        command vol exists'.
-
-        Args:
-            cmds: The commands to the commander.
-            wait: wait in seconds for the cmd response.
-
-        Returns:
-            (list) The second element of the array returned by _cmd.
-        """
-        if isinstance(cmds, str):
-            cmds = [cmds]
-        results = []
-        for cmd in cmds:
-            _, result = self._cmd(cmd, wait=wait)
-            results.append(result)
-        return results
-
-    def _cmd(self, cmd, wait=None, throw_error=True):
-        """Sends a single command and check responses.
-
-        Valid cmd will return something like '585857269 running cmd VolUp'.
-        Invalid cmd will log an error and return something like '585826369 No
-        command vol exists'. Some cmd will return multiple lines of output.
-        eg. 'menu'.
-
-        Args:
-            cmd: The command to the commander.
-            wait: wait in seconds for the cmd response.
-            throw_error: Throw exception on True
-
-        Returns:
-            (list) containing such as the following:
-            [<return value>, [<protobuf dictionary>, str]]
-            Hex strings (protobuf) are replaced by its decoded dictionaries
-            and stored in an arry along with other string returned fom the
-            device.
-
-        Raises:
-            DeviceError: On Error.(Optional)
-        """
-        self.connection_handle.write(cmd)
-
-        while self.connection_handle.is_logging:
-            time.sleep(.01)
-        if wait:
-            self.wait(wait)
-        # Using read_serial_port as readlines is a blocking call until idle.
-        res = self.read_serial_port()
-        result = []
-        self.cmd_log = res
-        command_resv = False
-        # TODO: Cleanup the usage of the two booleans below.
-        command_finish = False
-        command_rejected = False
-        # for line in iter_res:
-        for line in res:
-            if isinstance(line, dict):
-                if 'COMMANDER_RECV_COMMAND' in line.values():
-                    command_resv = True
-                elif 'COMMANDER_REJECT_COMMAND' in line.values():
-                    logging.info('Command rejected')
-                    command_rejected = True
-                    break
-                elif 'COMMANDER_FINISH_COMMAND' in line.values():
-                    command_finish = True
-                    break
-                elif (command_resv and not command_finish and
-                      not command_rejected):
-                    result.append(line)
-            # TODO(jesussalinas): Remove when only encoded lines are required
-            elif command_resv and not command_finish and not command_rejected:
-                if 'running cmd' not in line:
-                    result.append(line)
-        success = True
-        if command_rejected or not command_resv:
-            success = False
-            if throw_error:
-                logging.info(res)
-                raise DeviceError('Unknown command %s' % cmd)
-        return success, result
-
-    def get_pdl(self):
-        """Returns the PDL stack dictionary.
-
-        The PDL stack stores paired devices of Apollo. Each PDL entry include
-        mac_address, flags, link_key, priority fields.
-
-        Returns:
-            list of pdl dicts.
-        """
-        # Get the mask from CONNLIB41:
-        # CONNLIB41 typically looks something like this: 2403 fff1
-        # 2403 fff1 is actually two 16-bit words of a 32-bit integer
-        # like 0xfff12403 . This tells the chronological order of the entries
-        # in the paired device list one nibble each. LSB to MSB corresponds to
-        # CONNLIB42 through CONNLIB49. So, the above tells us that the device at
-        # 0x2638 is the 3rd most recent entry 0x2639 the latest entry etc. As
-        # a device re-pairs the masks are updated.
-        response = []
-        mask = 'ffffffff'
-        res = self.cmd('GetPSHex 0x2637')
-        if len(res[0]) == 0:
-            logging.warning('Error reading PDL mask @ 0x2637')
-            return response
-        else:
-            regexp = r'\d+\s+(?P<m1>....)\s(?P<m2>....)'
-            match = re.match(regexp, res[0][0])
-            if match:
-                connlib41 = match.group('m2') + match.group('m1')
-                mask = connlib41[::-1]
-                logging.debug('PDL mask: %s' % mask)
-
-        # Now get the MAC/link key
-        mask_idx = 0
-        for i in range(9784, 9883):
-            types = {}
-            res = self.cmd('GetPSHex ' + '%0.2x' % i)
-            if len(res[0]) == 0:
-                break
-            else:
-                regexp = ('\d+\s+(?P<Mac>....\s....\s....)\s'
-                          '(?P<Flags>....\s....)\s(?P<Linkkey>.*)')
-                match = re.search(regexp, res[0][0])
-                if match:
-                    mac_address = match.group('Mac').replace(' ', '').upper()
-                    formatted_mac = ''
-                    for i in range(len(mac_address)):
-                        formatted_mac += mac_address[i]
-                        if i % 2 != 0 and i < (len(mac_address) - 1):
-                            formatted_mac += ':'
-                    types['mac_address'] = formatted_mac
-                    types['flags'] = match.group('Flags').replace(' ', '')
-                    types['link_key'] = match.group('Linkkey').replace(' ', '')
-                    types['priority'] = int(mask[mask_idx], 16)
-                    mask_idx += 1
-                    response.append(types)
-
-        return response
-
-    def set_pairing_mode(self):
-        """Enter Bluetooth Pairing mode."""
-        logging.debug('Inside set_pairing_mode()...')
-        try:
-            return self.cmd('Pair')
-        except DeviceError:
-            logging.exception('Pair cmd failed')
-
-    # TODO (kselvakumaran): move this to an interface BT class that
-    # apollo_lib.BudsDevice should derive from
-    def turn_on_bluetooth(self):
-        return True
-
-    # TODO (kselvakumaran): move this to an interface BT class that
-    # apollo_lib.BudsDevice should derive from
-    def is_bt_enabled(self):
-        """Check if BT is enabled.
-
-        (TODO:weisu)Currently it is always true since there is no way to disable
-        BT in apollo
-
-        Returns:
-            True if BT is enabled.
-        """
-        logging.debug('Inside is_bt_enabled()...')
-        return True
-
-    def panic(self):
-        """Hitting a panic, device will be automatically reset after 5s."""
-        logging.debug('Inside panic()...')
-        try:
-            self.send('panic')
-        except serial.SerialException:
-            logging.exception('panic cmd failed')
-
-    def power(self, cmd):
-        """Controls the power state of the device.
-
-        Args:
-            cmd: If 'Off', powers the device off. Otherwise, powers the device
-                 on.
-        """
-        logging.debug('Inside power({})...'.format(cmd))
-        mode = '0' if cmd == 'Off' else '1'
-        cmd = 'Pow ' + mode
-        try:
-            return self.cmd(cmd)
-        except DeviceError:
-            logging.exception('{} cmd failed'.format(cmd))
-
-    def charge(self, state):
-        """Charging Control of the device.
-
-        Args:
-          state: '1/0' to enable/disable charging.
-        """
-        logging.debug('Inside charge({})...'.format(state))
-        cmd = 'chg ' + state
-        try:
-            self.cmd(cmd)
-        except DeviceError:
-            logging.exception('{} cmd failed'.format(cmd))
-
-    def get_battery_level(self):
-        """Get the battery charge level.
-
-        Returns:
-            charge percentage string.
-
-        Raises:
-            DeviceError: GetBatt response error.
-        """
-        response = self.cmd('GetBatt')
-        for line in response[0]:
-            if line.find('Batt:') > -1:
-                # Response if in this format '<messageID> Batt: <percentage>'
-                return line.split()[2]
-        raise DeviceError('Battery Level not found in GetBatt response')
-
-    def get_gas_gauge_current(self):
-        """Get the Gauge current value.
-
-        Returns:
-            Float value with the info
-
-        Raises:
-            DeviceError: I2CRead response error.
-        """
-        response = self.cmd('I2CRead 2 0x29')
-        for line in response[0]:
-            if line.find('value') > -1:
-                return float.fromhex(line.split()[6].replace(',', ''))
-        raise DeviceError('Current Level not found in I2CRead response')
-
-    def get_gas_gauge_voltage(self):
-        """Get the Gauge voltage value.
-
-        Returns:
-            Float value with the info
-
-        Raises:
-            DeviceError: I2CRead response error.
-        """
-        response = self.cmd('I2CRead 2 0x2A')
-        for line in response[0]:
-            if line.find('value') > -1:
-                return float.fromhex(line.split()[6].replace(',', ''))
-        raise DeviceError('Voltage Level not found in I2CRead response')
-
-    def reset(self, wait=5):
-        """Resetting the device."""
-        logging.debug('Inside reset()...')
-        self.power('Off')
-        self.wait(wait)
-        self.power('On')
-
-    def close(self):
-        if not self.device_closed:
-            self.connection_handle.close()
-            self.device_closed = True
-            if not self.set_log:
-                logging.flush_log()
-
-    def get_serial_log(self):
-        """Retrieve the logs from connection handle."""
-        return self.connection_handle.get_all_log()
-
-    def factory_reset(self):
-        """Erase paired device(s) (bond) data and reboot device."""
-        cmd = 'FactoryReset 1'
-        self.send(cmd)
-        self.wait(5)
-        self.reconnect()
-
-    def reboot(self, reconnect=10, retry_timer=30):
-        """Rebooting the device.
-
-        Args:
-            reconnect: reconnect attempts after reboot, None for no reconnect.
-            retry_timer: wait time in seconds before next connect retry.
-
-        Returns:
-            True if successfully reboot or reconnect.
-        """
-        logging.debug('Inside reboot()...')
-        self.panic()
-        if not reconnect:
-            return True
-        ini_time = time.time()
-        message = 'waiting for {} to shutdown'.format(self.serial_number)
-        logging.info(message)
-        while True:
-            alive = self.connection_handle.is_port_alive()
-            if not alive:
-                logging.info('rebooted')
-                break
-            if time.time() - ini_time > 60:
-                logging.info('Shutdown timeouted')
-                break
-            time.sleep(0.5)
-        return self.reconnect(reconnect, retry_timer)
-
-    def reconnect(self, iterations=30, retry_timer=20):
-        """Reconnect to the device.
-
-        Args:
-            iterations: Number of retry iterations.
-            retry_timer: wait time in seconds before next connect retry.
-
-        Returns:
-            True if reconnect to the device successfully.
-
-        Raises:
-            DeviceError: Failed to reconnect.
-        """
-        logging.debug('Inside reconnect()...')
-        for i in range(iterations):
-            try:
-                # port might be changed, refresh the port list.
-                self.get_device_ports(self.serial_number)
-                message = 'commander_port: {}, log_port: {}'.format(
-                    self.commander_port, self.log_port)
-                logging.info(message)
-                self.connection_handle.refresh_port_connection(
-                    self.commander_port)
-                # Sometimes there might be sfome delay when commander is
-                # functioning.
-                self.wait_for_commander()
-                return True
-            except Exception as e:  # pylint: disable=broad-except
-                message = 'Fail to connect {} times due to {}'.format(
-                    i + 1, e)
-                logging.warning(message)
-                # self.close()
-                time.sleep(retry_timer)
-        raise DeviceError('Cannot reconnect to %s with %d attempts.',
-                          self.commander_port, iterations)
-
-    @retry(stop=stop_after_attempt(4),
-           wait=wait_exponential())
-    def wait_for_commander(self):
-        """Wait for commander to function.
-
-        Returns:
-            True if commander worked.
-
-        Raises:
-            DeviceError: Failed to bring up commander.
-        """
-        # self.Flush()
-        result = self.cmd('menu')
-        if result:
-            return True
-        else:
-            raise DeviceError('Cannot start commander.')
-
-    def wait(self, timeout=1):
-        """Wait for the device."""
-        logging.debug('Inside wait()...')
-        time.sleep(timeout)
-
-    def led(self, cmd):
-        """LED control of the device."""
-        message = 'Inside led({})...'.format(cmd)
-        logging.debug(message)
-        cmd = 'EventUsrLeds' + cmd
-        try:
-            return self.cmd(_evt_hex(cmd))
-        except DeviceError:
-            logging.exception('LED cmd failed')
-
-    def volume(self, key, times=1):
-        """Volume Control. (Down/Up).
-
-        Args:
-            key: Down --Decrease a volume.
-                 Up --Increase a volume.
-            times: Simulate number of swipes.
-
-        Returns:
-            (int) Volume level.
-
-        Raises:
-            DeviceError
-        """
-        message = 'Inside volume({}, {})...'.format(key, times)
-        logging.debug(message)
-        updown = {
-            'Up': '1',
-            'Down': '0',
-        }
-        cmds = ['ButtonSwipe ' + updown[key]] * times
-        logging.info(cmds)
-        try:
-            self.cmd(cmds)
-            for line in self.cmd_log:
-                if isinstance(line, dict):
-                    if 'id' in line and line['id'] == 'VOLUME_CHANGE':
-                        if 'data' in line and line['data']:
-                            return int(line['data'])
-        except DeviceError:
-            logging.exception('ButtonSwipe cmd failed')
-
-    def menu(self):
-        """Return a list of supported commands."""
-        logging.debug('Inside menu()...')
-        try:
-            return self.cmd('menu')
-        except DeviceError:
-            logging.exception('menu cmd failed')
-
-    def set_ohd(self, mode='AUTO'):
-        """Manually set the OHD status and override auto-detection.
-
-        Args:
-            mode: ON --OHD manual mode with on-ear state.
-                  OFF --OHD manual mode with off-ear state.
-                  AUTO --OHD auto-detection mode.
-        Raises:
-            DeviceError: OHD Command failure.
-        """
-        logging.debug('Inside set_ohd()...')
-        try:
-            if mode != 'AUTO':
-                # Set up OHD manual mode
-                self.cmd('Test 14 0 2 1')
-                if mode == 'ON':
-                    # Detects on-ear
-                    self.cmd('Test 14 0 2 1 0x3')
-                else:
-                    # Detects off-ear
-                    self.cmd('Test 14 0 2 1 0x0')
-            else:
-                # Default mode (auto detect.)
-                self.cmd('Test 14 0 2 0')
-        except DeviceError:
-            logging.exception('OHD cmd failed')
-
-    def music_control_events(self, cmd, regexp=None, wait=.5):
-        """Sends the EvtHex to control media player.
-
-        Arguments:
-            cmd: the command to perform.
-            regexp: Optional pattern to validate the event logs.
-
-        Returns:
-            Boolean: True if the command triggers the correct events on the
-                     device, False otherwise.
-
-        # TODO(nviboonchan:) Add more supported commands.
-        Supported commands:
-            'PlayPause'
-            'VolumeUp'
-            'VolumeDown',
-        """
-        cmd_regexp = {
-            # Play/ Pause would need to pass the regexp argument since it's
-            # sending the same event but returns different responses depending
-            # on the device state.
-            'VolumeUp': apollo_log_regex.VOLUP_REGEX,
-            'VolumeDown': apollo_log_regex.VOLDOWN_REGEX,
-        }
-        if not regexp:
-            if cmd not in cmd_regexp:
-                logmsg = 'Expected pattern is not defined for event %s' % cmd
-                logging.exception(logmsg)
-                return False
-            regexp = cmd_regexp[cmd]
-        self.cmd('EvtHex %s' % apollo_sink_events.SINK_EVENTS['EventUsr' + cmd],
-                 wait=wait)
-        for line in self.cmd_log:
-            if isinstance(line, str):
-                if re.search(regexp, line):
-                    return True
-            elif isinstance(line, dict):
-                if line.get('id', None) == 'AVRCP_PLAY_STATUS_CHANGE':
-                    return True
-        return False
-
-    def avrcp(self, cmd):
-        """sends the Audio/Video Remote Control Profile (avrcp) control command.
-
-        Supported commands:
-            'PlayPause'
-            'Stop'
-            'SkipForward',
-            'SkipBackward',
-            'FastForwardPress',
-            'FastForwardRelease',
-            'RewindPress',
-            'RewindRelease',
-            'ShuffleOff',
-            'ShuffleAllTrack',
-            'ShuffleGroup',
-            'RepeatOff':,
-            'RepeatSingleTrack',
-            'RepeatAllTrack',
-            'RepeatGroup',
-            'Play',
-            'Pause',
-            'ToggleActive',
-            'NextGroupPress',
-            'PreviousGroupPress',
-            'NextGroupRelease',
-            'PreviousGroupRelease',
-
-        Args:
-            cmd: The avrcp command.
-
-        """
-        cmd = 'EventUsrAvrcp' + cmd
-        logging.debug(cmd)
-        try:
-            self.cmd(_evt_hex(cmd))
-        except DeviceError:
-            logging.exception('avrcp cmd failed')
-
-    def enable_log(self, levels=None):
-        """Enable specified logs."""
-        logging.debug('Inside enable_log()...')
-        if levels is None:
-            levels = ['ALL']
-        masks = hex(
-            sum([int(apollo_sink_events.LOG_FEATURES[x], 16) for x in levels]))
-        try:
-            self.cmd('LogOff %s' % apollo_sink_events.LOG_FEATURES['ALL'])
-            return self.cmd('LogOn %s' % masks)
-        except DeviceError:
-            logging.exception('Enable log failed')
-
-    def disable_log(self, levels=None):
-        """Disable specified logs."""
-        logging.debug('Inside disable_log()...')
-        if levels is None:
-            levels = ['ALL']
-        masks = hex(
-            sum([int(apollo_sink_events.LOG_FEATURES[x], 16) for x in levels]))
-        try:
-            self.cmd('LogOn %s' % apollo_sink_events.LOG_FEATURES['ALL'])
-            return self.cmd('LogOff %s' % masks)
-        except DeviceError:
-            logging.exception('Disable log failed')
-
-    def write_to_flash(self, file_name=None):
-        """Write file to external flash.
-
-        Note: Assume pv is installed. If not, install it by
-              'apt-get install pv'.
-
-        Args:
-            file_name: Full path file name.
-
-        Returns:
-            Boolean: True if write to partition is successful. False otherwise.
-        """
-        logging.debug('Inside write_to_flash()...')
-        if not os.path.isfile(file_name):
-            message = 'DFU file %s not found.'.format(file_name)
-            logging.exception(message)
-            return False
-        logging.info(
-            'Write file {} to external flash partition ...'.format(file_name))
-        image_size = os.path.getsize(file_name)
-        logging.info('image size is {}'.format(image_size))
-        results = self.cmd('Ota {}'.format(image_size), wait=3)
-        logging.debug('Result of Ota command' + str(results))
-        if any(OTA_VERIFICATION_FAILED in result for result in results[0]):
-            return False
-        # finished cmd Ota
-        if (any('OTA_ERASE_PARTITION' in result.values() for result in
-                results[0] if
-                isinstance(result, dict)) or
-                any('OTA erasd ptns' in result for result in results[0])):
-            try:
-                # -B: buffer size in bytes, -L rate-limit in B/s.
-                subcmd = ('pv --force -B 160 -L 10000 %s > %s' %
-                          (file_name, self.commander_port))
-                logging.info(subcmd)
-                p = subprocess.Popen(subcmd, stdout=subprocess.PIPE, shell=True)
-            except OSError:
-                logging.exception(
-                    'pv not installed, please install by: apt-get install pv')
-                return False
-            try:
-                res = self.read_serial_port(read_until=6)
-            except DeviceError:
-                logging.exception('Unable to read the device port')
-                return False
-            for line in res:
-                if isinstance(line, dict):
-                    logging.info(line)
-                else:
-                    match = re.search(OTA_RECEIVE_CSR_REGEX, line)
-                    if match:
-                        logging.info(
-                            'OTA Image received. Transfer is in progress...')
-                        # Polling during a transfer could miss the final message
-                        # when the device reboots, so we wait until the transfer
-                        # completes.
-                        p.wait()
-                        return True
-            # No image transfer in progress.
-            return False
-        else:
-            return False
-
-    def flash_from_file(self, file_name, reconnect=True):
-        """Upgrade Apollo from an image file.
-
-        Args:
-            file_name: DFU file name. eg. /google/data/ro/teams/wearables/
-                       apollo/ota/master/v76/apollo.dfu
-            reconnect: True to reconnect the device after flashing
-        Returns:
-            Bool: True if the upgrade is successful. False otherwise.
-        """
-        logging.debug('Inside flash_from_file()...')
-        if self.write_to_flash(file_name):
-            logging.info('OTA image transfer is completed')
-            if reconnect:
-                # Transfer is completed; waiting for the device to reboot.
-                logging.info('wait to make sure old connection disappears.')
-                self.wait_for_reset(timeout=150)
-                self.reconnect()
-                logging.info('BudsDevice reboots successfully after OTA.')
-            return True
-
-    def open_mic(self, post_delay=5):
-        """Open Microphone on the device using EvtHex command.
-
-        Args:
-            post_delay: time delay in seconds after the microphone is opened.
-
-        Returns:
-            Returns True or False based on whether the command was executed.
-        """
-        logging.debug('Inside open_mic()...')
-        success, _ = self._cmd('Voicecmd 1', post_delay)
-        return success
-
-    def close_mic(self, post_delay=5):
-        """Close Microphone on the device using EvtHex command.
-
-        Args:
-            post_delay: time delay in seconds after the microphone is closed.
-
-        Returns:
-            Returns true or false based on whether the command was executed.
-        """
-        logging.debug('Inside close_mic()...')
-        success, _ = self._cmd('Voicecmd 0', post_delay)
-        return success
-
-    def touch_key_press_event(self, wait=1):
-        """send key press event command.
-
-        Args:
-            wait: Inject delay after key press to simulate real touch event .
-        """
-        logging.debug('Inside KeyPress()...')
-        self._cmd('Touch 6')
-        self.wait(wait)
-
-    def touch_tap_event(self, wait_if_pause=10):
-        """send key release event after key press to simulate single tap.
-
-        Args:
-            wait_if_pause: Inject delay after avrcp pause was detected.
-
-        Returns:
-            Returns False if avrcp play orp ause not detected else True.
-        """
-        logging.debug('Inside Touch Tap event()...')
-        self._cmd('Touch 4')
-        for line in self.cmd_log:
-            if 'avrcp play' in line:
-                logging.info('avrcp play detected')
-                return True
-            if 'avrcp pause' in line:
-                logging.info('avrcp pause detected')
-                self.wait(wait_if_pause)
-                return True
-        return False
-
-    def touch_hold_up_event(self):
-        """Open Microphone on the device using touch hold up command.
-
-        Returns:
-            Returns True or False based on whether the command was executed.
-        """
-        logging.debug('Inside open_mic()...')
-        self._cmd('Touch 3')
-        for line in self.cmd_log:
-            if 'Button 1 LONG_BEGIN' in line:
-                logging.info('mic open success')
-                return True
-        return False
-
-    def touch_hold_down_event(self):
-        """Close Microphone on the device using touch hold down command.
-
-        Returns:
-            Returns true or false based on whether the command was executed.
-        """
-        logging.debug('Inside close_mic()...')
-        self._cmd('Touch 8')
-        for line in self.cmd_log:
-            if 'Button 1 LONG_END' in line:
-                logging.info('mic close success')
-                return True
-        return False
-
-    def tap(self):
-        """Performs a Tap gesture."""
-        logging.debug('Inside tap()')
-        self.cmd('ButtonTap 0')
-
-    def hold(self, duration):
-        """Tap and hold a button.
-
-        Args:
-            duration: (int) duration in milliseconds.
-        """
-        logging.debug('Inside hold()')
-        self.cmd('ButtonHold ' + str(duration))
-
-    def swipe(self, direction):
-        """Perform a swipe gesture.
-
-        Args:
-            direction: (int) swipe direction 1 forward, 0 backward.
-        """
-        logging.debug('Inside swipe()')
-        self.cmd('ButtonSwipe ' + direction)
-
-    def get_pskey(self, key):
-        """Fetch value from persistent store."""
-        try:
-            cmd = 'GetPSHex ' + apollo_sink_events.PSKEY[key]
-        except KeyError:
-            raise DeviceError('PS Key: %s not found' % key)
-        pskey = ''
-        try:
-            ret = self.cmd(cmd)
-            for result in ret[0]:
-                if not re.search(r'pskey', result.lower()) and LOG_REGEX.match(
-                        result):
-                    # values are broken into words separated by spaces.
-                    pskey += LOG_REGEX.match(result).group('msg').replace(' ',
-                                                                          '')
-                else:
-                    continue
-        except DeviceError:
-            logging.exception('GetPSHex cmd failed')
-        return pskey
-
-    def get_version(self):
-        """Return a device version information.
-
-        Note: Version information is obtained from the firmware loader. Old
-        information is lost when firmware is updated.
-        Returns:
-            A dictionary of device version info. eg.
-            {
-                'Fw Build': '73',
-                'OTA Status': 'No OTA performed before this boot',
-            }
-
-        """
-        logging.debug('Inside get_version()...')
-        success, result = self._cmd('GetVer', throw_error=False)
-        status = {}
-        if result:
-            for line in result:
-                if isinstance(line, dict):
-                    status['build'] = line['vm_build_number']
-                    status['psoc_build'] = line['psoc_version']
-                    status['debug'] = line['csr_fw_debug_build']
-                    status['Fw Build Label'] = line['build_label']
-                    if 'last_ota_status' in line.keys():
-                        # Optional value in the proto response
-                        status['OTA Status'] = line['last_ota_status']
-                    else:
-                        status['OTA Status'] = 'No info'
-        return success, status
-
-    def get_earcon_version(self):
-        """Return a device Earson version information.
-
-        Returns:
-            Boolean:  True if success, False otherwise.
-            String: Earon Version e.g. 7001 0201 6100 0000
-
-        """
-        # TODO(nviboonchan): Earcon version format would be changed in the
-        # future.
-        logging.debug('Inside get_earcon_version()...')
-        result = self.get_pskey('PSKEY_EARCON_VERSION')
-        if result:
-            return True, result
-        else:
-            return False, None
-
-    def get_bt_status(self):
-        """Return a device bluetooth connection information.
-
-        Returns:
-            A dictionary of bluetooth status. eg.
-            {
-                'Comp. App': 'FALSE',
-               'HFP (pri.)', 'FALSE',
-               'HFP (sec.)': 'FALSE',
-               'A2DP (pri.)': 'FALSE',
-               'A2DP (sec.)': 'FALSE',
-               'A2DP disconnects': '3',
-               'A2DP Role (pri.)': 'slave',
-               'A2DP RSSI (pri.)': '-Touch'
-            }
-        """
-        logging.debug('Inside get_bt_status()...')
-        return self._get_status('GetBTStatus')
-
-    def get_conn_devices(self):
-        """Gets the BT connected devices.
-
-        Returns:
-            A dictionary of BT connected devices. eg.
-            {
-                'HFP Pri': 'xxxx',
-                'HFP Sec': 'xxxx',
-                'A2DP Pri': 'xxxx',
-                'A2DP Sec': 'xxxx',
-                'RFCOMM devices': 'xxxx',
-                'CTRL': 'xxxx',
-                'AUDIO': 'None',
-                'DEBUG': 'None',
-                'TRANS': 'None'
-             }
-
-        Raises:
-            ResponseError: If unexpected response occurs.
-        """
-        response_regex = re.compile('[0-9]+ .+: ')
-        connected_status = {}
-        response = self.cmd('GetConnDevices')
-        if not response:
-            raise ResponseError(
-                'No response returned by GetConnDevices command')
-        for line in response[0]:
-            if response_regex.search(line):
-                profile, value = line[line.find(' '):].split(':', 1)
-                connected_status[profile] = value
-        if not connected_status:
-            raise ResponseError('No BT Profile Status in response.')
-        return connected_status
-
-    def _get_status(self, cmd):
-        """Return a device status information."""
-        status = {}
-        try:
-            results = self.cmd(cmd)
-        except DeviceError as ex:
-            # logging.exception('{} cmd failed'.format(cmd))
-            logging.warning('Failed to get device status info.')
-            raise ex
-        results = results[0]
-        for result in results:
-            match = re.match(STATUS_REGEX, result)
-            if match:
-                key = match.group('key')
-                value = match.group('value')
-                status.update({key: value})
-        return status
-
-    def is_streaming(self):
-        """Returns the music streaming status on Apollo.
-
-        Returns:
-            Boolean: True if device is streaming music. False otherwise.
-        """
-
-        status = self.cmd('GetDSPStatus')
-        if any('active feature mask: 0' in log for log in
-               status[0]):
-            return False
-        elif any('active feature mask: 2' in log for log in
-                 status[0]):
-            return True
-        else:
-            return False
-
-    def is_in_call(self):
-        """Returns the phone call status on Apollo.
-
-        Returns:
-            Boolean: True if device has incoming call. False otherwise.
-        """
-
-        status = self.cmd('GetDSPStatus')
-        if not any('Inc' or 'out' in log for log in status[0]):
-            return False
-        return True
-
-    def is_device_limbo(self):
-        """Check if device is in Limbo state.
-
-        Returns:
-            Boolean: True if device is in limbo state, False otherwise.
-        """
-        device_state = self.get_device_state()
-        logging.info('BudsDevice "{}" state {}'.format(self.serial_number,
-                                                       device_state))
-        return device_state == 'limbo'
-
-    def get_device_state(self):
-        """Get state of the device.
-
-        Returns:
-            String representing the device state.
-
-        Raises:
-            DeviceError: If command fails.
-        """
-        _, status = self._cmd('GetDSPStatus')
-        for stat in status:
-            if isinstance(stat, dict):
-                logging.info(stat)
-                return stat['sink_state'].lower()
-        raise DeviceError('BudsDevice state not found in GetDSPStatus.')
-
-    def set_stay_connected(self, value):
-        """Run command to set the value for SetAlwaysConnected.
-
-        Args:
-            value: (int) 1 to keep connection engages at all time,
-                         0 for restoring
-        Returns:
-            the set state of type int (0 or 1) or None if not applicable
-        """
-
-        if int(self.version) >= 1663:
-            self._cmd('SetAlwaysConnected {}'.format(value))
-            logging.info('Setting sleep on idle to {}'.format(value))
-            return value
-
-    def get_codec(self):
-        """Get device's current audio codec.
-
-        Returns:
-            String representing the audio codec.
-
-        Raises:
-            DeviceError: If command fails.
-        """
-        success, status = self._cmd('get_codec')
-        logging.info('---------------------------------------')
-        logging.info(status)
-        logging.info('---------------------------------------')
-        if success:
-            for line in status:
-                if isinstance(line, dict):
-                    logging.info('Codec found: %s'.format(line['codec']))
-                    return line['codec']
-        raise DeviceError('BudsDevice state not found in get_codec.')
-
-    def crash_dump_detection(self):
-        """Reads crash dump determines if a crash is detected.
-
-        Returns:
-            True if crash detection is supported and if a new crash is found.
-            False otherwise.
-        """
-        # Detects if crashdump output is new
-        new_crash_regex = r'new crash = ([01]+)'
-        # filter crashdump for just the trace
-        crash_stack_regex = r'BASIC(.*)\n[\d]+ APP_STACK(.*)\n'
-        # remove time stamp commander output
-        timestamp_remover_regex = '\n[\\d]+ '
-
-        logging.debug('Inside IsCrashDumpDetection()...')
-        cmd_return = self.cmd('CrashDump', wait=1)
-        crash_dump_str = '\n'.join(cmd_return[0])
-        logging.info(crash_dump_str)
-        try:
-            # check for crash
-            match = re.search(new_crash_regex, crash_dump_str)
-            if match is not None:
-                if match.groups()[0] == '1':  # new crash found
-                    logging.error('Crash detected!!')
-                    basic, app_stack = re.search(crash_stack_regex,
-                                                 crash_dump_str,
-                                                 re.DOTALL).groups()
-                    # remove time stamps from capture
-                    basic = re.sub(timestamp_remover_regex, '', basic)
-                    app_stack = re.sub(timestamp_remover_regex, '', app_stack)
-                    # write to log
-                    # pylint: disable=bad-whitespace
-                    logging.info(
-                        '\n&270d = %s\n&270e = %s\n' % (basic, app_stack))
-                    # pylint: enable=bad-whitespace
-                    return True
-                else:  # no new crash
-                    logging.info('No crash detected')
-                    return False
-        except AttributeError:
-            logging.exception(
-                'Apollo crash dump output is not in expected format')
-            raise DeviceError('Apollo crash dump not in expected format')
-
-    @property
-    def version(self):
-        """Application version.
-
-        Returns:
-            (String) Firmware version.
-        """
-        _, result = self.get_version()
-        return result['build']
-
-    @property
-    def bluetooth_address(self):
-        """Bluetooth MAC address.
-
-        Returns:
-            a string representing 48bit BT MAC address in Hex.
-
-        Raises:
-            DeviceError: Unable to find BT Address
-        """
-        results = self.get_pskey('PSKEY_BDADDR')
-        if not results:
-            raise DeviceError('Unable to find BT Address')
-        logging.info(results)
-        # Bluetooth lower address part, upper address part and non-significant
-        # address part.
-        bt_lap = results[2:8]
-        bt_uap = results[10:12]
-        bt_nap = results[12:16]
-        results = bt_nap + bt_uap + bt_lap
-
-        return ':'.join(map(''.join, zip(*[iter(results)] * 2))).upper()
-
-    @property
-    def device_name(self):
-        """Device Friendly Name.
-
-        Returns:
-            a string representing device friendly name.
-
-        Raises:
-            DeviceError: Unable to find a wearable device name.
-        """
-        result = self.get_pskey('PSKEY_DEVICE_NAME')
-        if not result:
-            raise DeviceError('Unable to find BudsDevice Name')
-        logging.info(_to_ascii(result))
-        return _to_ascii(result)
-
-    @property
-    def stay_connected(self):
-        return self.stay_connected_state
-
-    @stay_connected.setter
-    def stay_connected(self, value):
-        self.stay_connected_state = self.set_stay_connected(value)
-
-    def read_serial_port(self, read_until=None):
-        """Read serial port until specified read_until value in seconds."""
-        # use default read_until value if not specified
-        if read_until:
-            time.sleep(read_until)
-        res = self.connection_handle.read()
-        buf_read = []
-        for line in res:
-            if apollo_log_decoder.is_automation_protobuf(line):
-                decoded = apollo_log_decoder.decode(line)
-                buf_read.append(decoded)
-            else:
-                buf_read.append(line)
-        return buf_read
-
-    def wait_for_reset(self, timeout=30):
-        """waits for the device to reset by check serial enumeration.
-
-        Checks every .5 seconds for the port.
-
-        Args:
-            timeout: The max time to wait for the device to disappear.
-
-        Returns:
-            Bool: True if the device reset was detected. False if not.
-        """
-        start_time = time.time()
-        while True:
-            res = subprocess.Popen(['ls', self.commander_port],
-                                   stdout=subprocess.PIPE,
-                                   stderr=subprocess.PIPE)
-            res.communicate()
-            if res.returncode != 0:
-                logging.info('BudsDevice reset detected')
-                return True
-            elif (time.time() - start_time) > timeout:
-                logging.info('Timeout waiting for device to reset.....')
-                return False
-            else:
-                time.sleep(.5)
-
-    def set_in_case(self, reconnect=True):
-        """Simulates setting apollo in case and wait for device to come up.
-
-        Args:
-            reconnect: bool - if method should block until reconnect
-        """
-        logging.info('Setting device in case')
-        out = self.send('Pow 2')
-        for i in out:
-            if 'No OTA wakeup condition' in i:
-                logging.info('No wake up condition.')
-            elif 'STM Wakeup 10s' in i:
-                logging.info('Wake up condition detected.')
-        if reconnect:
-            self.wait_for_reset()
-            self.reconnect()
-
-
-class ParentDevice(BudsDevice):
-    """Wrapper object for Device that addresses b10 recovery and build flashing.
-
-    Recovery mechanism:
-    In case a serial connection could not be established to b10, the recovery
-    mechanism is activated  ONLY if'recover_device' is set to 'true' and
-    b29_serial is defined in config file. This helps recover a device that has a
-    bad build installed.
-    """
-
-    def __init__(self, serial_number, recover_device=False, b29_serial=None):
-        # if recover device parameter is supplied and there is an error in
-        # instantiating B10 try to recover device instantiating b10 has to fail
-        # at most $tries_before_recovery time before initiating a recovery
-        # try to run the recovery at most $recovery_times before raising Error
-        # after the first recovery attempt failure try to reset b29 each
-        # iteration
-        self.b29_device = None
-        if recover_device:
-            if b29_serial is None:
-                logging.error('B29 serial not defined')
-                raise Error(
-                    'Recovery failed because "b29_serial" definition not '
-                    'present in device manifest file')
-            else:
-                self.b29_device = B29Device(b29_serial)
-            tries_before_recovery = 5
-            recovery_tries = 5
-            for attempt in range(tries_before_recovery):
-                try:
-                    # build crash symptoms varies based on the nature of the
-                    # crash connectError is thrown if the device never shows up
-                    # in /dev/ sometimes device shows and can connect but
-                    # sending commands fails or crashes apollo in that case,
-                    # DeviceError is thrown
-                    super().__init__(serial_number, commander_port=None,
-                                     log_port=None, serial_logger=None)
-                    break
-                except (ConnectError, DeviceError) as ex:
-                    logging.warning(
-                        'Error initializing apollo object - # of attempt '
-                        'left : %d' % (tries_before_recovery - attempt - 1))
-                    if attempt + 1 >= tries_before_recovery:
-                        logging.error(
-                            'Retries exhausted - now attempting to restore '
-                            'golden image')
-                        for recovery_attempt in range(recovery_tries):
-                            if not self.b29_device.restore_golden_image():
-                                logging.error('Recovery failed - retrying...')
-                                self.b29_device.reset_charger()
-                                continue
-                            # try to instantiate now
-                            try:
-                                super().__init__(serial_number,
-                                                 commander_port=None,
-                                                 log_port=None,
-                                                 serial_logger=None)
-                                break
-                            except (ConnectError, DeviceError):
-                                if recovery_attempt == recovery_tries - 1:
-                                    raise Error(
-                                        'Recovery failed - ensure that there '
-                                        'is no mismatching serial numbers of '
-                                        'b29 and b10 is specified in config')
-                                else:
-                                    logging.warning(
-                                        'Recovery attempt failed - retrying...')
-                    time.sleep(2)
-        else:
-            super().__init__(serial_number, commander_port=None, log_port=None,
-                             serial_logger=None)
-        # set this to prevent sleep
-        self.set_stay_connected(1)
-
-    def get_info(self):
-        information_dictionary = {}
-        information_dictionary['type'] = self.dut_type
-        information_dictionary['serial'] = self.serial_number
-        information_dictionary['log port'] = self.log_port
-        information_dictionary['command port'] = self.commander_port
-        information_dictionary['bluetooth address'] = self.bluetooth_address
-        success, build_dict = self.get_version()
-        information_dictionary['build'] = build_dict
-        # Extract the build number as a separate key. Useful for BigQuery.
-        information_dictionary['firmware build number'] = build_dict.get(
-            'build', '9999')
-        information_dictionary['name'] = self.device_name
-        if self.b29_device:
-            information_dictionary['b29 serial'] = self.b29_device.serial
-            information_dictionary['b29 firmware'] = self.b29_device.fw_version
-            information_dictionary['b29 commander port'] = self.b29_device.port
-            information_dictionary[
-                'b29 app version'] = self.b29_device.app_version
-        return information_dictionary
-
-    def setup(self, **kwargs):
-        """
-
-        Args:
-            apollo_build: if specified, will be used in flashing the device to
-                          that build prior to running any of the tests. If not
-                          specified flashing is skipped.
-        """
-        if 'apollo_build' in kwargs and kwargs['apollo_build'] is not None:
-            build = kwargs['apollo_build']
-            X20_REGEX = re.compile(r'/google/data/')
-            if not os.path.exists(build) or os.stat(build).st_size == 0:
-                # if x20 path, retry on file-not-found error or if file size is
-                # zero b/c X20 path does not update immediately
-                if X20_REGEX.match(build):
-                    for i in range(20):
-                        # wait until file exists and size is > 0 w/ 6 second
-                        # interval on retry
-                        if os.path.exists(build) and os.stat(build).st_size > 0:
-                            break
-
-                        if i == 19:
-                            logging.error('Build path (%s) does not exist or '
-                                          'file size is 0 - aborted' % build)
-
-                            raise Error('Specified build path (%s) does not '
-                                        'exist or file size is 0' % build)
-                        else:
-                            logging.warning('Build path (%s) does not exist or '
-                                            'file size is 0 - retrying...' %
-                                            build)
-                            time.sleep(6)
-                else:
-                    raise Error('Specified build path (%s) does not exist or '
-                                'file size is 0' % build)
-                self.flash_from_file(file_name=build, reconnect=True)
-        else:
-            logging.info('Not flashing apollo.')
-
-    def teardown(self, **kwargs):
-        self.close()
-
-
-def _evt_hex(cmd):
-    return 'EvtHex ' + apollo_sink_events.SINK_EVENTS[cmd]
-
-
-def _to_ascii(orig):
-    # Returned value need to be byte swapped. Remove last octet if it is 0.
-    result = _byte_swap(orig)
-    result = result[:-2] if result[-2:] == '00' else result
-    return bytearray.fromhex(result).decode()
-
-
-def _byte_swap(orig):
-    """Simple function to swap bytes order.
-
-    Args:
-        orig: original string
-
-    Returns:
-        a string with bytes swapped.
-        eg. orig = '6557276920736952006f'.
-        After swap, return '57656927732052696f00'
-    """
-    return ''.join(
-        sum([(c, d, a, b) for a, b, c, d in zip(*[iter(orig)] * 4)], ()))
diff --git a/src/antlion/controllers/buds_lib/apollo_utils.py b/src/antlion/controllers/buds_lib/apollo_utils.py
deleted file mode 100644
index 98c9be8..0000000
--- a/src/antlion/controllers/buds_lib/apollo_utils.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from antlion import signals
-from antlion.controllers.buds_lib import apollo_lib
-
-AVRCP_WAIT_TIME = 3
-
-
-def get_serial_object(pri_ad, serial_device):
-    """This function will creates object for serial device connected.
-
-    Args:
-        pri_ad: Android device.
-        serial_device: serial device connected.
-
-    Returns:
-        object of serial device, otherwise Abort the class.
-    """
-    devices = apollo_lib.get_devices()
-    for device in devices:
-        if device['serial_number'] in serial_device:
-            return apollo_lib.BudsDevice(device['serial_number'])
-    pri_ad.log.error('Apollo device not found')
-    raise signals.TestAbortAll('Apollo device not found')
-
-
-def avrcp_actions(pri_ad, buds_device):
-    """Performs avrcp controls like volume up, volume down
-
-    Args:
-        pri_ad: Android device.
-        buds_device: serial device object to perform avrcp actions.
-
-    Returns:
-        True if successful, otherwise otherwise raises Exception.
-    """
-    pri_ad.log.debug("Setting voume to 0")
-    pri_ad.droid.setMediaVolume(0)
-    current_volume = pri_ad.droid.getMediaVolume()
-    pri_ad.log.info('Current volume to {}'.format(current_volume))
-    for _ in range(5):
-        buds_device.volume('Up')
-        time.sleep(AVRCP_WAIT_TIME)
-    pri_ad.log.info('Volume increased to {}'.format(
-        pri_ad.droid.getMediaVolume()))
-    if current_volume == pri_ad.droid.getMediaVolume():
-        pri_ad.log.error('Increase volume failed')
-        raise signals.TestFailure("Increase volume failed")
-    current_volume = pri_ad.droid.getMediaVolume()
-    for _ in range(5):
-        buds_device.volume('Down')
-        time.sleep(AVRCP_WAIT_TIME)
-    pri_ad.log.info('Volume decreased to {}'.format(
-        pri_ad.droid.getMediaVolume()))
-    if current_volume == pri_ad.droid.getMediaVolume():
-        pri_ad.log.error('Decrease volume failed')
-        raise signals.TestFailure("Decrease volume failed")
-    return True
diff --git a/src/antlion/controllers/buds_lib/b29_lib.py b/src/antlion/controllers/buds_lib/b29_lib.py
deleted file mode 100644
index 32839c1..0000000
--- a/src/antlion/controllers/buds_lib/b29_lib.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Class definition of B29 device for controlling the device.
-
-B29 is an engineering device with serial capabilities. It is almost like
-b20 except it has additional features that allow sending commands
-to b10 via one-wire and to pull logs from b10 via one-wire.
-
-Please see https://docs.google.com/document/d/17yJeJRNWxv5E9
-fBvw0sXkgwCBkshU_l4SxWkKgAxVmk/edit for details about available operations.
-"""
-
-import os
-import re
-import time
-from logging import Logger
-
-from antlion import utils
-from antlion.controllers.buds_lib import tako_trace_logger
-
-logging = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-DEVICE_REGEX = (
-    r'_(?P<device_serial>[A-Z0-9]+)-(?P<interface>\w+)\s->\s'
-    r'(\.\./){2}(?P<port>\w+)'
-)
-# TODO: automate getting the latest version from x20
-DEBUG_BRIDGE = ('/google/data/ro/teams/wearables/apollo/ota/jenkins-presubmit/'
-                'ovyalov/master/apollo-sw/CL14060_v2-build13686/v13686/'
-                'automation/apollo_debug_bridge/linux2/apollo_debug_bridge')
-B29_CHIP = 'Cypress_Semiconductor_USBUART'
-
-
-# TODO:
-# as the need arises, additional functionalities of debug_bridge should be
-# integrated
-# TODO:
-# https://docs.google.com/document/d/17yJeJRNWxv5E9fBvw0sXkgwCBkshU_
-# l4SxWkKgAxVmk/edit
-
-class B29Error(Exception):
-    """Module Level Error."""
-
-
-def get_b29_devices():
-    """ Get all available B29 devices.
-
-    Returns:
-      (list) A list of available devices (ex: ['/dev/ttyACM4',...]) or empty
-      list if none found
-    """
-    devices = []
-    result = os.popen('ls -l /dev/serial/by-id/*%s*' % B29_CHIP).read()
-    for line in result.splitlines():
-        match = re.search(DEVICE_REGEX, line)
-        device_serial = match.group('device_serial')
-        log_port = None
-        commander_port = '/dev/' + match.group('port')
-        device = {
-            'commander_port': commander_port,
-            'log_port': log_port,
-            'serial_number': device_serial
-        }
-        devices.append(device)
-    return devices
-
-
-class B29Device(object):
-    """Class to control B29 device."""
-
-    def __init__(self, b29_serial):
-        """ Class to control B29 device
-        Args: String type of serial number (ex: 'D96045152F121B00'
-        """
-        self.serial = b29_serial
-        b29_port = [d['commander_port'] for d in get_b29_devices() if
-                    d['serial_number'] == b29_serial]
-        if not b29_port:
-            logging.error("unable to find b29 with serial number %s" %
-                          b29_serial)
-            raise B29Error(
-                "Recovery failed because b29_serial specified in device "
-                "manifest file is not found or invalid")
-        self.port = b29_port[0]
-        self.ping_match = {'psoc': r'Pings: tx=[\d]* rx=[1-9][0-9]',
-                           'csr': r'count=100, sent=[\d]*, received=[1-9][0-9]',
-                           'charger': r'Pings: tx=[\d]* rx=[1-9][0-9]'}
-        self.fw_version = self._get_version('fw')
-        self.app_version = self._get_version('app')
-
-    def _get_version(self, type='fw'):
-        """ Method to get version of B29
-        Returns:
-            String version if found (ex: '0006'), None otherwise
-        """
-        command = '--serial={}'.format(self.port)
-        debug_bridge_process = self._send_command(command=command)
-        if type == 'fw':
-            version_match = re.compile(r'CHARGER app version: version=([\d]*)')
-        elif type == 'app':
-            version_match = re.compile(r'APP VERSION: ([\d]*)')
-        version_str = self._parse_output_of_running_process(
-            debug_bridge_process, version_match)
-        debug_bridge_process.kill()
-        if version_str:
-            match = version_match.search(version_str)
-            version = match.groups()[0]
-            return version
-        return None
-
-    def _parse_output_of_running_process(self, subprocess, match, timeout=30):
-        """ Parses the logs from subprocess objects and checks to see if a
-        match is found within the allotted time
-        Args:
-            subprocess: object returned by _send_command (which is the same as
-            bject returned by subprocess.Popen()) match: regex match object
-            (what is returned by re.compile(r'<regex>') timeout: int - time to
-            keep retrying before bailing
-
-        """
-        start_time = time.time()
-        success_match = re.compile(match)
-        while start_time + timeout > time.time():
-            out = subprocess.stderr.readline()
-            if success_match.search(out):
-                return out
-            time.sleep(.5)
-        return False
-
-    def _send_command(self, command):
-        """ Send command to b29 using apollo debug bridge
-        Args:
-          command: The command for apollo debug to execute
-        Returns:
-          subprocess object
-        """
-        return utils.start_standing_subprocess(
-            '{} {} {}'.format(DEBUG_BRIDGE, '--rpc_port=-1', command),
-            shell=True)
-
-    def restore_golden_image(self):
-        """ Start a subprocess that calls the debug-bridge executable with
-        options that restores golden image of b10 attached to the b29. The
-        recovery restores the 'golden image' which is available in b10 partition
-         8. The process runs for 120 seconds which is adequate time for the
-         recovery to have completed.
-        """
-        # TODO:
-        # because we are accessing x20, we need to capture error resulting from
-        #  expired prodaccess and report it explicitly
-        # TODO:
-        # possibly file not found error?
-
-        # start the process, wait for two minutes and kill it
-        logging.info('Restoring golden image...')
-        command = '--serial=%s --debug_spi=dfu --sqif_partition=8' % self.port
-        debug_bridge_process = self._send_command(command=command)
-        success_match = re.compile('DFU on partition #8 successfully initiated')
-        if self._parse_output_of_running_process(debug_bridge_process,
-                                                 success_match):
-            logging.info('Golden image restored successfully')
-            debug_bridge_process.kill()
-            return True
-        logging.warning('Failed to restore golden image')
-        debug_bridge_process.kill()
-        return False
-
-    def ping_component(self, component, timeout=30):
-        """ Send ping to the specified component via B290
-        Args:
-            component = 'csr' or 'psoc' or 'charger'
-        Returns:
-            True if successful and False otherwise
-        """
-        if component not in ('csr', 'psoc', 'charger'):
-            raise B29Error('specified parameter for component is not valid')
-        logging.info('Pinging %s via B29...' % component)
-        command = '--serial={} --ping={}'.format(self.port, component)
-        debug_bridge_process = self._send_command(command=command)
-        if self._parse_output_of_running_process(debug_bridge_process,
-                                                 self.ping_match[component],
-                                                 timeout):
-            logging.info('Ping passes')
-            debug_bridge_process.kill()
-            return True
-        else:
-            logging.warning('Ping failed')
-            debug_bridge_process.kill()
-            return False
-
-    def reset_charger(self):
-        """ Send reset command to B29
-        Raises: TimeoutError (lib.utils.TimeoutError) if the device does not
-        come back within 120 seconds
-        """
-        # --charger_reset
-        if int(self.fw_version) >= 6:
-            logging.info('Resetting B29')
-            command = '--serial={} --charger_reset'.format(self.port)
-            reset_charger_process = self._send_command(command=command)
-            time.sleep(2)
-            reset_charger_process.kill()
-            logging.info('Waiting for B29 to become available..')
-            utils.wait_until(lambda: self.ping_component('charger'), 120)
-        else:
-            logging.warning('B20 firmware version %s does not support '
-                            'charger_reset argument' % self.fw_version)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/__init__.py b/src/antlion/controllers/buds_lib/dev_utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py
deleted file mode 100644
index fafb05a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_decoder.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Decodes the protobufs described in go/apollo-qa-tracing-design."""
-
-import base64
-import binascii
-import struct
-
-from antlion.controllers.buds_lib.dev_utils.proto.gen import apollo_qa_pb2
-from antlion.controllers.buds_lib.dev_utils.proto.gen import audiowear_pb2
-
-
-def to_dictionary(proto):
-    proto_dic = {}
-    msg = [element.split(':') for element in str(proto).split('\n') if element]
-    for element in msg:
-        key = element[0].strip()
-        value = element[1].strip()
-        proto_dic[key] = value
-    return proto_dic
-
-
-def is_automation_protobuf(logline):
-    return logline.startswith('QA_MSG|')
-
-
-def decode(logline):
-    """Decode the logline.
-
-    Args:
-      logline: String line with the encoded message.
-
-    Returns:
-      String value with the decoded message.
-    """
-    decoded = None
-    decoders = {'HEX': binascii.unhexlify, 'B64': base64.decodebytes}
-    msgs = {
-        apollo_qa_pb2.TRACE:
-            apollo_qa_pb2.ApolloQATrace,
-        apollo_qa_pb2.GET_VER_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetVerResponse,
-        apollo_qa_pb2.GET_CODEC_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetCodecResponse,
-        apollo_qa_pb2.GET_DSP_STATUS_RESPONSE:
-            apollo_qa_pb2.ApolloQAGetDspStatusResponse,
-    }
-
-    if is_automation_protobuf(logline):
-        _, encoding, message = logline.split("|", 2)
-        message = message.rstrip()
-        if encoding in decoders.keys():
-            message = decoders[encoding](message)
-            header = message[0:4]
-            serialized = message[4:]
-            if len(header) == 4 and len(serialized) == len(message) - 4:
-                msg_group, msg_type, msg_len = struct.unpack('>BBH', header)
-                if (len(serialized) == msg_len and
-                        msg_group == audiowear_pb2.APOLLO_QA):
-                    proto = msgs[msg_type]()
-                    proto.ParseFromString(serialized)
-                    decoded = to_dictionary(proto)
-    return decoded
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py
deleted file mode 100644
index b4dd58a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_log_regex.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo's event logs regexp for each button action."""
-
-EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)\r\n')
-VOL_CHANGE_REGEX = (
-  r'(?P<time_stamp>\d+)\sVolume = (?P<vol_level>\d+)(.*)\r\n')
-VOLUP_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)3202(.*)\r\n')
-VOLDOWN_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)320a(.*)\r\n')
-AVRCP_PLAY_REGEX = (r'(?P<time_stamp>\d+)\sAVRCP '
-                    r'play\r\n')
-AVRCP_PAUSE_REGEX = (r'(?P<time_stamp>\d+)\sAVRCP '
-                     r'paused\r\n')
-MIC_OPEN_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3206\](.*)\r\n')
-MIC_CLOSE_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3207\](.*)\r\n')
-PREV_TRACK_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3208\](.*)\r\n')
-PREV_CHANNEL_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3209\](.*)\r\n')
-NEXT_TRACK_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3200\](.*)\r\n')
-NEXT_CHANNEL_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3201\](.*)\r\n')
-FETCH_NOTIFICATION_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
-  r'\[3205\](.*)\r\n')
-VOICE_CMD_COMPLETE_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])\sDspOnVoiceCommandComplete\r\n')
-VOICE_CMD_START_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])\sDspStartVoiceCommand(.*)\r\n')
-MIC_OPEN_PROMT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)AudioPromptPlay 33(.*)\r\n')
-MIC_CLOSE_PROMT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)AudioPromptPlay 34(.*)\r\n')
-POWER_ON_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z]) --hello--(.*)PowerOn(.*)\r\n')
-POWER_OFF_EVENT_REGEX = (
-  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z]) EvtAW:320d(.*)\r\n')
diff --git a/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py b/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py
deleted file mode 100644
index fcba1fd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/apollo_sink_events.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Apollo PS Keys and User Sink Events."""
-
-# Persistent Store (PS) Keys from rio_all_merged.psr.
-
-PSKEY = {
-    'PSKEY_BDADDR': '0x0001',
-    'PSKEY_DEVICE_NAME': '0x0108',
-    'PSKEY_DEEP_SLEEP_STATE': '0x0229',
-    'PSKEY_USB_VERSION': '0x02bc',
-    'PSKEY_USB_DEVICE_CLASS_CODES': '0x02bd',
-    'PSKEY_USB_VENDOR_ID': '0x02be',
-    'PSKEY_USB_PRODUCT_ID': '0x02bf',
-    'PSKEY_USB_PRODUCT_STRING': '0x02c2',
-    'PSKEY_USB_SERIAL_NUMBER_STRING': '0x02c3',
-    'PSKEY_EARCON_VERSION': '0x28b'
-}
-
-# Rio logging features from rio_log.h.
-
-LOG_FEATURES = {
-    'ALL': '0xffff',
-    'VERBOSE': '0x0001',
-    'TEST': '0x0002',
-    'CSR': '0x0004',
-    'DEBUG': '0x0008',
-    'INFO': '0x0010',
-    'ERROR': '0x0020',
-    'TIME_STAMP': '0x0040',
-}
-
-# Supported events from sink_events.h."""
-
-SINK_EVENTS = {
-    'EventUsrMicOpen': '0x3206',
-    'EventUsrMicClose': '0x3207',
-    'EventUsrPowerOn': '0x4001',
-    'EventUsrPowerOff': '0x4002',
-    'EventUsrEnterPairing': '0x4003',
-    'EventUsrInitateVoiceDial': '0x4004',
-    'EventUsrInitateVoiceDial_AG2': '0x4005',
-    'EventUsrLastNumberRedial': '0x4006',
-    'EventUsrLastNumberRedial_AG2': '0x4007',
-    'EventUsrAnswer': '0x4008',
-    'EventUsrReject': '0x4009',
-    'EventUsrCancelEnd': '0x400A',
-    'EventUsrTransferToggle': '0x400B',
-    'EventUsrMuteToggle': '0x400C',
-    'EventUsrVolumeUp': '0x400D',
-    'EventUsrVolumeDown': '0x400E',
-    'EventUsrVolumeToggle': '0x400F',
-    'EventUsrThreeWayReleaseAllHeld': '0x4010',
-    'EventUsrThreeWayAcceptWaitingReleaseActive': '0x4011',
-    'EventUsrThreeWayAcceptWaitingHoldActive': '0x4012',
-    'EventUsrThreeWayAddHeldTo3Way': '0x4013',
-    'EventUsrThreeWayConnect2Disconnect': '0x4014',
-    'EventUsrLedsOnOffToggle': '0x4015',
-    'EventUsrLedsOn': '0x4016',
-    'EventUsrLedsOff': '0x4017',
-    'EventUsrEstablishSLC': '0x4018',
-    'EventUsrMuteOn': '0x4019',
-    'EventUsrMuteOff': '0x401A',
-    'EventUsrEnterTXContTestMode': '0x401B',
-    'EventUsrEnterDUTState': '0x401C',
-    'EventUsrBassBoostEnableDisableToggle': '0x401D',
-    'EventUsrPlaceIncomingCallOnHold': '0x401E',
-    'EventUsrAcceptHeldIncomingCall': '0x401F',
-    'EventUsrRejectHeldIncomingCall': '0x4020',
-    'EventUsrEnterDFUMode': '0x4021',
-    'EventUsrEnterDriverlessDFUMode': '0x4022',
-    'EventUsrEnterServiceMode': '0x4023',
-    'EventUsrAudioPromptsOn': '0x4024',
-    'EventUsrAudioPromptsOff': '0x4025',
-    'EventUsrDialStoredNumber': '0x4026',
-    'EventUsrUpdateStoredNumber': '0x4027',
-    'EventUsrRestoreDefaults': '0x4028',
-    'EventUsrConfirmationAccept': '0x4029',
-    'EventUsrConfirmationReject': '0x402A',
-    'EventUsrSelectAudioPromptLanguageMode': '0x402B',
-    'EventUsrSwitchAudioMode': '0x402F',
-    'EventUsrButtonLockingOn': '0x4030',
-    'EventUsrButtonLockingOff': '0x4031',
-    'EventUsrButtonLockingToggle': '0x4032',
-    'EventUsrRssiPair': '0x4034',
-    'EventUsrBassBoostOn': '0x4035',
-    'EventUsrBassBoostOff': '0x4036',
-    'EventUsr3DEnhancementOn': '0x4037',
-    'EventUsr3DEnhancementOff': '0x4038',
-    'EventUsrSelectAudioSourceNext': '0x4039',
-    'EventUsrSelectAudioSourceAnalog': '0x403A',
-    'EventUsrSelectAudioSourceUSB': '0x403B',
-    'EventUsrSelectAudioSourceAG1': '0x403C',
-    'EventUsrSelectAudioSourceAG2': '0x403D',
-    'EventUsrSelectFMAudioSource': '0x403E',
-    'EventUsrSelectAudioSourceNone': '0x403F',
-    'EventUsrPbapDialIch': '0x4040',
-    'EventUsrPbapDialMch': '0x4041',
-    'EventUsrIntelligentPowerManagementOn': '0x4042',
-    'EventUsrIntelligentPowerManagementOff': '0x4043',
-    'EventUsrIntelligentPowerManagementToggle': '0x4044',
-    'EventUsrAvrcpPlayPause': '0x4045',
-    'EventUsrAvrcpStop': '0x4046',
-    'EventUsrAvrcpSkipForward': '0x4047',
-    'EventUsrAvrcpSkipBackward': '0x4048',
-    'EventUsrAvrcpFastForwardPress': '0x4049',
-    'EventUsrAvrcpFastForwardRelease': '0x404A',
-    'EventUsrAvrcpRewindPress': '0x404B',
-    'EventUsrAvrcpRewindRelease': '0x404C',
-    'EventUsrAvrcpShuffleOff': '0x404D',
-    'EventUsrAvrcpShuffleAllTrack': '0x404E',
-    'EventUsrAvrcpShuffleGroup': '0x404F',
-    'EventUsrAvrcpRepeatOff': '0x4050',
-    'EventUsrAvrcpRepeatSingleTrack': '0x4051',
-    'EventUsrAvrcpRepeatAllTrack': '0x4052',
-    'EventUsrAvrcpRepeatGroup': '0x4053',
-    'EventUsrAvrcpPlay': '0x4054',
-    'EventUsrAvrcpPause': '0x4055',
-    'EventUsrAvrcpToggleActive': '0x4056',
-    'EventUsrAvrcpNextGroupPress': '0x4057',
-    'EventUsrAvrcpPreviousGroupPress': '0x4058',
-    'EventUsrPbapSetPhonebook': '0x4059',
-    'EventUsrPbapBrowseEntry': '0x405A',
-    'EventUsrPbapBrowseList': '0x405B',
-    'EventUsrPbapDownloadPhonebook': '0x405C',
-    'EventUsrPbapSelectPhonebookObject': '0x405D',
-    'EventUsrPbapBrowseComplete': '0x405E',
-    'EventUsrPbapGetPhonebookSize': '0x405F',
-    'EventUsrUsbPlayPause': '0x4060',
-    'EventUsrUsbStop': '0x4061',
-    'EventUsrUsbFwd': '0x4062',
-    'EventUsrUsbBack': '0x4063',
-    'EventUsrUsbMute': '0x4064',
-    'EventUsrUsbLowPowerMode': '0x4065',
-    'EventUsrTestModeAudio': '0x4066',
-    'EventUsrTestModeTone': '0x4067',
-    'EventUsrTestModeKey': '0x4068',
-    'EventUsrTestDefrag': '0x4069',
-    'EventUsrDebugKeysToggle': '0x406A',
-    'EventUsrSpeechRecognitionTuningStart': '0x406B',
-    'EventUsrWbsTestSetCodecs': '0x406C',
-    'EventUsrWbsTestOverrideResponse': '0x406D',
-    'EventUsrWbsTestSetCodecsSendBAC': '0x406E',
-    'EventUsrCreateAudioConnection': '0x406F',
-    'EventUsrSwapA2dpMediaChannel': '0x407F',
-    'EventUsrExternalMicConnected': '0x4080',
-    'EventUsrExternalMicDisconnected': '0x4081',
-    'EventUsrSSROn': '0x4082',
-    'EventUsrSSROff': '0x4083',
-    'EventUsrPeerSessionInquire': '0x4089',
-    'EventUsrPeerSessionConnDisc': '0x408A',
-    'EventUsrPeerSessionEnd': '0x408B',
-    'EventUsrBatteryLevelRequest': '0x408C',
-    'EventUsrVolumeOrientationNormal': '0x408D',
-    'EventUsrVolumeOrientationInvert': '0x408E',
-    'EventUsrResetPairedDeviceList': '0x408F',
-    'EventUsrEnterDutMode': '0x4090',
-    'EventUsr3DEnhancementEnableDisableToggle': '0x4091',
-    'EventUsrRCVolumeUp': '0x4092',
-    'EventUsrRCVolumeDown': '0x4093',
-    'EventUsrEnterBootMode2': '0x4094',
-    'EventUsrChargerConnected': '0x4095',
-    'EventUsrChargerDisconnected': '0x4096',
-    'EventUsrSubwooferDisconnect': '0x4097',
-    'EventUsrAnalogAudioConnected': '0x4098',
-    'EventUsrAnalogAudioDisconnected': '0x4099',
-    'EventUsrGaiaDFURequest': '0x409A',
-    'EventUsrStartIRLearningMode': '0x409B',
-    'EventUsrStopIRLearningMode': '0x409C',
-    'EventUsrClearIRCodes': '0x409D',
-    'EventUsrUserEqOn': '0x409E',
-    'EventUsrUserEqOff': '0x409F',
-    'EventUsrUserEqOnOffToggle': '0x40A0',
-    'EventUsrSpdifAudioConnected': '0x40AD',
-    'EventUsrSpdifAudioDisconnected': '0x40AE',
-    'EventUsrSelectAudioSourceSpdif': '0x40AF',
-    'EventUsrChangeAudioRouting': '0x40B0',
-    'EventUsrMasterDeviceTrimVolumeUp': '0x40B1',
-    'EventUsrMasterDeviceTrimVolumeDown': '0x40B2',
-    'EventUsrSlaveDeviceTrimVolumeUp': '0x40B3',
-    'EventUsrSlaveDeviceTrimVolumeDown': '0x40B4',
-    'EventUsrEstablishPeerConnection': '0x40B5',
-    'EventUsrTwsQualificationEnablePeerOpen': '0x40B7',
-    'EventUsrBleStartBonding': '0x40D0',
-    'EventUsrBleSwitchPeripheral': '0x40D1',
-    'EventUsrBleSwitchCentral': '0x40D2',
-    'EventUsrImmAlertStop': '0x40D3',
-    'EventUsrLlsAlertStop': '0x40D4',
-    'EventUsrFindMyRemoteImmAlertMild': '0x40D5',
-    'EventUsrFindMyRemoteImmAlertHigh': '0x40D6',
-    'EventUsrFindMyPhoneImmAlertMild': '0x40D7',
-    'EventUsrFindMyPhoneImmAlertHigh': '0x40D8',
-    'EventUsrFindMyPhoneRemoteImmAlertHigh': '0x40D9',
-    'EventUsrPartyModeOn': '0x40DA',
-    'EventUsrPartyModeOff': '0x40DB',
-    'EventUsrBleDeleteDevice': '0x40EC',
-    'EventUsrAvrcpNextGroupRelease': '0x40ED',
-    'EventUsrAvrcpPreviousGroupRelease': '0x40EE',
-    'EventUsrTwsQualificationVolUp': '0x40EF',
-    'EventUsrTwsQualificationVolDown': '0x40F0',
-    'EventUsrStartA2DPStream': '0x40F1',
-    'EventUsrPbapBrowseListByName': '0x40F2',
-    'EventUsrTwsQualificationSetAbsVolume': '0x40F3',
-    'EventUsrTwsQualificationPlayTrack': '0x40F4',
-    'EventUsrBleHidExtraConfig': '0x40F5',
-    'EventUsrTwsQualificationAVRCPConfigureDataSize': '0x40F6',
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile b/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile
deleted file mode 100644
index 8509cd6..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/Makefile
+++ /dev/null
@@ -1,4 +0,0 @@
-all: nanopb_pb2.py plugin_pb2.py
-
-%_pb2.py: %.proto
-	protoc -I. --python_out=. $<
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto
deleted file mode 100644
index 0db32e9..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/apollo_qa.proto
+++ /dev/null
@@ -1,175 +0,0 @@
-/* Forward & backward compatibility practices must be followed.  This means:
-   a) Never re-use an enum if the semantics are different.
-   b) Never alter the semantic meaning.  If needed, simply deprecate
-      old message/field/enum & start using new one.  If deprecated message
-      is no longer used in code make sure to communicate this to QA.
-   c) Prefer 3-stage migrations if possible: add new message/enum/field & start
-      sending it, make sure QA has switched to new message/enum/field, remove old
-      message/enum/field.
-   Fields should start out required unless they are optional in the code.  They should
-   become optional when deprecated (make sure to update QA automation first to expect the
-   field to be optional) or removed.  Make sure to never re-use field ids unless
-   the field is being resurrected.
- */
-syntax = "proto2";
-//package audiowear;
-package apollo.lib.apollo_dev_util_lib.proto;
-import "nanopb.proto";
-//import "third_party/nanopb/nanopb.proto";
-
-
-option java_package = "com.google.android.bisto.nano";
-option java_outer_classname = "ApolloQA";
-
-enum ApolloQAMessageType {
-  TRACE = 1;
-  GET_VER_RESPONSE = 2;
-  GET_CODEC_RESPONSE = 3;
-  GET_DSP_STATUS_RESPONSE = 4;
-  FACTORY_PLAY_SOUND = 5;
-  FACTORY_INFO_REQUEST = 6;
-  FACTORY_INFO_RESPONSE = 7;
-}
-enum TraceId {
-  // data[0] = partition# erased.
-  OTA_ERASE_PARTITION = 1;
-  // data[0] = partition# we will write to.
-  // data[1] = expected length we will write to partition
-  OTA_START_PARTITION_WRITE = 2;
-  // data[0] = partition# written
-  // data[1] = actual written length
-  OTA_FINISHED_PARTITION_WRITE = 3;
-  // our custom signature validation has begun
-  OTA_SIGNATURE_START = 4;
-  // our custom signature validation rejected the image
-  OTA_SIGNATURE_FAILURE = 5;
-  // triggering CSR to reboot & apply DFU
-  OTA_TRIGGERING_LOADER = 6;
-  // the CSR loader rejected the image
-  OTA_LOADER_VERIFY_FAILED = 7;
-  // progress update.
-  // data[0] = num bytes received
-  // data[1] = num bytes expected
-  OTA_PROGRESS = 8;
-  OTA_ABORTED = 9;
-  // up to 10: reserved for OTA
-  // data[0] = AvrcpPlayStatus as integer
-  AVRCP_PLAY_STATUS_CHANGE = 10;
-  /* data[0] = new volume level (int16 stored in uint32)
-     data[1] = new volume level in dB (int16 stored in uint32)
-     data[2] = previous volume level (int16 stored in uint32)
-     easy conversion in python:
-       new_volume = ctpyes.c_short(ctypes.c_uint(data[0]).value).value
-       new_volume_db = ctpyes.c_short(ctypes.c_uint(data[1]).value).value
-  */
-  VOLUME_CHANGE = 11;
-  /* data[0] = entry number of command */
-  COMMANDER_RECV_COMMAND = 12;
-  COMMANDER_FINISH_COMMAND = 13;
-  /* currently no information about the rejected command */
-  COMMANDER_REJECT_COMMAND = 14;
-}
-/* Note: FWD_SEEK & REV_SEEK are bit-wise or'ed into the status.
-   Use SEEK_MASK first to read the seek or mask it out to get the other
-   states).  Do not & with individual states for comparison as aside from
-   seek the other states are not a bitwise-mask.
-*/
-enum AvrcpPlayStatus {
-  STOPPED = 0;
-  PLAYING = 1;
-  PAUSED = 2;
-  FWD_SEEK = 8;
-  REV_SEEK = 16;
-  ERROR = 5;
-  /* & this with the status to compare against FWD_SEEK/REV_SEEK.
-     & with the complement of this value to get the other states */
-  SEEK_MASK = 24;
-}
-/* These messages are internal trace-points for QA to do whitebox validation.
-   However, developers should feel free to remove trace-points if they
-   no longer make sense (but communicate to QA to prep this ahead-of-time). */
-message ApolloQATrace {
-  // all messages should have a timestamp field and it will get auto-populated
-  // (no need to set it at the call-site).
-  required uint32 timestamp = 1;
-  // this is the event that occured internally
-  required TraceId id = 2;
-  // this is some optional data that depends on the traceid.
-  // if not documented then no fields will be written.
-  repeated uint32 data = 3 [packed = true, (nanopb).max_count = 5];
-}
-enum PreviousBootStatus {
-  OTA_SUCCESS = 1; /* previous boot OK */
-  OTA_ERROR = 2; /* previous OTA failed */
-}
-//Next ID: 10
-message ApolloQAGetVerResponse {
-  required uint32 timestamp = 1;
-  required uint32 csr_fw_version = 2; // not implemented for now
-  required bool csr_fw_debug_build = 3; // not implemented for now
-  required uint32 vm_build_number = 4;
-  required bool vm_debug_build = 5;
-  required uint32 psoc_version = 6;
-  // the build label sent to AGSA. This should just be an amalgum of the broken-down
-  // info above. Aside from X.Y.Z prefix, do not parse this for anything as it is
-  // free-form text.
-  required string build_label = 7 [(nanopb).max_size = 32];
-  // if not set then info wasn't available.
-  optional PreviousBootStatus last_ota_status = 8;
-  required uint32 charger_version = 9;
-  optional uint32 expected_psoc_version = 10;
-}
-enum ApolloQAA2dpCodec {
-  AAC = 1;
-  SBC = 2;
-}
-message ApolloQAGetCodecResponse {
-  required uint32 timestamp = 1;
-  // if not set then unknown codec (error).
-  optional ApolloQAA2dpCodec codec = 2;
-}
-enum SinkState {
-  LIMBO = 0;
-  CONNECTABLE = 1;
-  DISCOVERABLE = 2;
-  CONNECTED = 3;
-  OUTGOING_CALLS_ESTABLISH = 4;
-  INCOMING_CALLS_ESTABLISH = 5;
-  ACTIVE_CALL_SCO = 6;
-  TEST_MODE = 7;
-  THREE_WAY_CALL_WAITING = 8;
-  THREE_WAY_CALL_ON_HOLD = 9;
-  THREE_WAY_MULTICALL = 10;
-  INCOMING_CALL_ON_HOLD = 11;
-  ACTIVE_CALL_NO_SCO = 12;
-  A2DP_STREAMING = 13;
-  DEVICE_LOW_BATTERY = 14;
-}
-message ApolloQAGetDspStatusResponse {
-  required uint32 timestamp = 1;
-  required bool is_dsp_loaded = 2;
-  required SinkState sink_state = 3;
-  /**
-   * This is a bit-wise mask.
-   * see AudioWearFeatureBits audiowear_plugin_state.h
-   * for the definition.
-   */
-  required uint32 features_active = 4;
-}
-message ApolloQAFactoryPlaySound {
-  enum PromptType {
-    PROMPT_TYPE_BT_CONNECTED = 1;
-    PROMPT_TYPE_IN_EAR = 2;
-  }
-  required PromptType prompt = 1;
-}
-message ApolloQAFactoryInfoRequest {
-}
-message ApolloQAFactoryInfoResponse {
-  required uint32 timestamp = 1;
-  optional int32 crystal_trim = 2 [(nanopb).int_size = IS_16];
-  optional bool crash_dump_exists = 3;
-  optional bool is_developer_mode_enabled = 4;
-  optional bool is_always_connected = 5;
-  optional uint32 hwid = 6;
-}
\ No newline at end of file
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto
deleted file mode 100644
index 8f825bd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/audiowear.proto
+++ /dev/null
@@ -1,25 +0,0 @@
-syntax = "proto2";
-
-//package audiowear;
-package apollo.lib.apollo_dev_util_lib.proto;
-
-option java_package = "com.google.android.bisto.nano";
-option java_outer_classname = "Protocol";
-
-enum MessageGroup {
-    UNKNOWN_MESSAGE_GROUP = 0;
-    DEVICE_INPUT = 1;
-    OTA = 2;
-    DEVICE_CAPABILITY = 3;
-    DEVICE_STATUS = 4;
-    LOGGING = 5;
-    SENSORS = 6;
-    COMPANION_STATUS = 7;
-    DEVICE_COMMAND = 8;
-    BISTO_SETTINGS = 9;
-    WELLNESS = 10;
-    TEST = 11;
-    BLE_SERVICE = 12;
-    APOLLO_QA = 126;
-    TRANSLATE = 127;
-}
\ No newline at end of file
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py
deleted file mode 100644
index fefcfe4..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: apollo_qa.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-import nanopb_pb2 as nanopb__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x61pollo_qa.proto\x12$apollo.lib.apollo_dev_util_lib.proto\x1a\x0cnanopb.proto\"t\n\rApolloQATrace\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x39\n\x02id\x18\x02 \x02(\x0e\x32-.apollo.lib.apollo_dev_util_lib.proto.TraceId\x12\x15\n\x04\x64\x61ta\x18\x03 \x03(\rB\x07\x10\x01\x92?\x02\x10\x05\"\xcd\x02\n\x16\x41polloQAGetVerResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x16\n\x0e\x63sr_fw_version\x18\x02 \x02(\r\x12\x1a\n\x12\x63sr_fw_debug_build\x18\x03 \x02(\x08\x12\x17\n\x0fvm_build_number\x18\x04 \x02(\r\x12\x16\n\x0evm_debug_build\x18\x05 \x02(\x08\x12\x14\n\x0cpsoc_version\x18\x06 \x02(\r\x12\x1a\n\x0b\x62uild_label\x18\x07 \x02(\tB\x05\x92?\x02\x08 \x12Q\n\x0flast_ota_status\x18\x08 \x01(\x0e\x32\x38.apollo.lib.apollo_dev_util_lib.proto.PreviousBootStatus\x12\x17\n\x0f\x63harger_version\x18\t \x02(\r\x12\x1d\n\x15\x65xpected_psoc_version\x18\n \x01(\r\"u\n\x18\x41polloQAGetCodecResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x46\n\x05\x63odec\x18\x02 \x01(\x0e\x32\x37.apollo.lib.apollo_dev_util_lib.proto.ApolloQAA2dpCodec\"\xa6\x01\n\x1c\x41polloQAGetDspStatusResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x15\n\ris_dsp_loaded\x18\x02 \x02(\x08\x12\x43\n\nsink_state\x18\x03 \x02(\x0e\x32/.apollo.lib.apollo_dev_util_lib.proto.SinkState\x12\x17\n\x0f\x66\x65\x61tures_active\x18\x04 \x02(\r\"\xb9\x01\n\x18\x41polloQAFactoryPlaySound\x12Y\n\x06prompt\x18\x01 \x02(\x0e\x32I.apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryPlaySound.PromptType\"B\n\nPromptType\x12\x1c\n\x18PROMPT_TYPE_BT_CONNECTED\x10\x01\x12\x16\n\x12PROMPT_TYPE_IN_EAR\x10\x02\"\x1c\n\x1a\x41polloQAFactoryInfoRequest\"\xb6\x01\n\x1b\x41polloQAFactoryInfoResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x1b\n\x0c\x63rystal_trim\x18\x02 \x01(\x05\x42\x05\x92?\x02\x38\x10\x12\x19\n\x11\x63rash_dump_exists\x18\x03 \x01(\x08\x12!\n\x19is_developer_mode_enabled\x18\x04 \x01(\x08\x12\x1b\n\x13is_always_connected\x18\x05 \x01(\x08\x12\x0c\n\x04hwid\x18\x06 \x01(\r*\xb8\x01\n\x13\x41polloQAMessageType\x12\t\n\x05TRACE\x10\x01\x12\x14\n\x10GET_VER_RESPONSE\x10\x02\x12\x16\n\x12GET_CODEC_RESPONSE\x10\x03\x12\x1b\n\x17GET_DSP_STATUS_RESPONSE\x10\x04\x12\x16\n\x12\x46\x41\x43TORY_PLAY_SOUND\x10\x05\x12\x18\n\x14\x46\x41\x43TORY_INFO_REQUEST\x10\x06\x12\x19\n\x15\x46\x41\x43TORY_INFO_RESPONSE\x10\x07*\xfc\x02\n\x07TraceId\x12\x17\n\x13OTA_ERASE_PARTITION\x10\x01\x12\x1d\n\x19OTA_START_PARTITION_WRITE\x10\x02\x12 \n\x1cOTA_FINISHED_PARTITION_WRITE\x10\x03\x12\x17\n\x13OTA_SIGNATURE_START\x10\x04\x12\x19\n\x15OTA_SIGNATURE_FAILURE\x10\x05\x12\x19\n\x15OTA_TRIGGERING_LOADER\x10\x06\x12\x1c\n\x18OTA_LOADER_VERIFY_FAILED\x10\x07\x12\x10\n\x0cOTA_PROGRESS\x10\x08\x12\x0f\n\x0bOTA_ABORTED\x10\t\x12\x1c\n\x18\x41VRCP_PLAY_STATUS_CHANGE\x10\n\x12\x11\n\rVOLUME_CHANGE\x10\x0b\x12\x1a\n\x16\x43OMMANDER_RECV_COMMAND\x10\x0c\x12\x1c\n\x18\x43OMMANDER_FINISH_COMMAND\x10\r\x12\x1c\n\x18\x43OMMANDER_REJECT_COMMAND\x10\x0e*m\n\x0f\x41vrcpPlayStatus\x12\x0b\n\x07STOPPED\x10\x00\x12\x0b\n\x07PLAYING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x46WD_SEEK\x10\x08\x12\x0c\n\x08REV_SEEK\x10\x10\x12\t\n\x05\x45RROR\x10\x05\x12\r\n\tSEEK_MASK\x10\x18*4\n\x12PreviousBootStatus\x12\x0f\n\x0bOTA_SUCCESS\x10\x01\x12\r\n\tOTA_ERROR\x10\x02*%\n\x11\x41polloQAA2dpCodec\x12\x07\n\x03\x41\x41\x43\x10\x01\x12\x07\n\x03SBC\x10\x02*\xd8\x02\n\tSinkState\x12\t\n\x05LIMBO\x10\x00\x12\x0f\n\x0b\x43ONNECTABLE\x10\x01\x12\x10\n\x0c\x44ISCOVERABLE\x10\x02\x12\r\n\tCONNECTED\x10\x03\x12\x1c\n\x18OUTGOING_CALLS_ESTABLISH\x10\x04\x12\x1c\n\x18INCOMING_CALLS_ESTABLISH\x10\x05\x12\x13\n\x0f\x41\x43TIVE_CALL_SCO\x10\x06\x12\r\n\tTEST_MODE\x10\x07\x12\x1a\n\x16THREE_WAY_CALL_WAITING\x10\x08\x12\x1a\n\x16THREE_WAY_CALL_ON_HOLD\x10\t\x12\x17\n\x13THREE_WAY_MULTICALL\x10\n\x12\x19\n\x15INCOMING_CALL_ON_HOLD\x10\x0b\x12\x16\n\x12\x41\x43TIVE_CALL_NO_SCO\x10\x0c\x12\x12\n\x0e\x41\x32\x44P_STREAMING\x10\r\x12\x16\n\x12\x44\x45VICE_LOW_BATTERY\x10\x0e\x42)\n\x1d\x63om.google.android.bisto.nanoB\x08\x41polloQA')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'apollo_qa_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\035com.google.android.bisto.nanoB\010ApolloQA'
-  _APOLLOQATRACE.fields_by_name['data']._options = None
-  _APOLLOQATRACE.fields_by_name['data']._serialized_options = b'\020\001\222?\002\020\005'
-  _APOLLOQAGETVERRESPONSE.fields_by_name['build_label']._options = None
-  _APOLLOQAGETVERRESPONSE.fields_by_name['build_label']._serialized_options = b'\222?\002\010 '
-  _APOLLOQAFACTORYINFORESPONSE.fields_by_name['crystal_trim']._options = None
-  _APOLLOQAFACTORYINFORESPONSE.fields_by_name['crystal_trim']._serialized_options = b'\222?\0028\020'
-  _APOLLOQAMESSAGETYPE._serialized_start=1217
-  _APOLLOQAMESSAGETYPE._serialized_end=1401
-  _TRACEID._serialized_start=1404
-  _TRACEID._serialized_end=1784
-  _AVRCPPLAYSTATUS._serialized_start=1786
-  _AVRCPPLAYSTATUS._serialized_end=1895
-  _PREVIOUSBOOTSTATUS._serialized_start=1897
-  _PREVIOUSBOOTSTATUS._serialized_end=1949
-  _APOLLOQAA2DPCODEC._serialized_start=1951
-  _APOLLOQAA2DPCODEC._serialized_end=1988
-  _SINKSTATE._serialized_start=1991
-  _SINKSTATE._serialized_end=2335
-  _APOLLOQATRACE._serialized_start=71
-  _APOLLOQATRACE._serialized_end=187
-  _APOLLOQAGETVERRESPONSE._serialized_start=190
-  _APOLLOQAGETVERRESPONSE._serialized_end=523
-  _APOLLOQAGETCODECRESPONSE._serialized_start=525
-  _APOLLOQAGETCODECRESPONSE._serialized_end=642
-  _APOLLOQAGETDSPSTATUSRESPONSE._serialized_start=645
-  _APOLLOQAGETDSPSTATUSRESPONSE._serialized_end=811
-  _APOLLOQAFACTORYPLAYSOUND._serialized_start=814
-  _APOLLOQAFACTORYPLAYSOUND._serialized_end=999
-  _APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE._serialized_start=933
-  _APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE._serialized_end=999
-  _APOLLOQAFACTORYINFOREQUEST._serialized_start=1001
-  _APOLLOQAFACTORYINFOREQUEST._serialized_end=1029
-  _APOLLOQAFACTORYINFORESPONSE._serialized_start=1032
-  _APOLLOQAFACTORYINFORESPONSE._serialized_end=1214
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py
deleted file mode 100644
index 123a079..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: audiowear.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x61udiowear.proto\x12$apollo.lib.apollo_dev_util_lib.proto*\x8d\x02\n\x0cMessageGroup\x12\x19\n\x15UNKNOWN_MESSAGE_GROUP\x10\x00\x12\x10\n\x0c\x44\x45VICE_INPUT\x10\x01\x12\x07\n\x03OTA\x10\x02\x12\x15\n\x11\x44\x45VICE_CAPABILITY\x10\x03\x12\x11\n\rDEVICE_STATUS\x10\x04\x12\x0b\n\x07LOGGING\x10\x05\x12\x0b\n\x07SENSORS\x10\x06\x12\x14\n\x10\x43OMPANION_STATUS\x10\x07\x12\x12\n\x0e\x44\x45VICE_COMMAND\x10\x08\x12\x12\n\x0e\x42ISTO_SETTINGS\x10\t\x12\x0c\n\x08WELLNESS\x10\n\x12\x08\n\x04TEST\x10\x0b\x12\x0f\n\x0b\x42LE_SERVICE\x10\x0c\x12\r\n\tAPOLLO_QA\x10~\x12\r\n\tTRANSLATE\x10\x7f\x42)\n\x1d\x63om.google.android.bisto.nanoB\x08Protocol')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'audiowear_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\035com.google.android.bisto.nanoB\010Protocol'
-  _MESSAGEGROUP._serialized_start=58
-  _MESSAGEGROUP._serialized_end=327
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py
deleted file mode 100644
index dd9775c..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py
+++ /dev/null
@@ -1,1482 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/descriptor.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-if _descriptor._USE_C_DESCRIPTORS == False:
-  DESCRIPTOR = _descriptor.FileDescriptor(
-    name='google/protobuf/descriptor.proto',
-    package='google.protobuf',
-    syntax='proto2',
-    serialized_options=None,
-    create_key=_descriptor._internal_create_key,
-    serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xe4\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xa9\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xcc\x04\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xb1\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01'
-  )
-else:
-  DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xe4\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xa9\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xcc\x04\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xb1\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
-
-if _descriptor._USE_C_DESCRIPTORS == False:
-  _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
-    name='Type',
-    full_name='google.protobuf.FieldDescriptorProto.Type',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_DOUBLE', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FLOAT', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_INT64', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_UINT64', index=3, number=4,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_INT32', index=4, number=5,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FIXED64', index=5, number=6,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_FIXED32', index=6, number=7,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_BOOL', index=7, number=8,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_STRING', index=8, number=9,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_GROUP', index=9, number=10,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_MESSAGE', index=10, number=11,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_BYTES', index=11, number=12,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_UINT32', index=12, number=13,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_ENUM', index=13, number=14,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SFIXED32', index=14, number=15,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SFIXED64', index=15, number=16,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SINT32', index=16, number=17,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='TYPE_SINT64', index=17, number=18,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
-
-  _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
-    name='Label',
-    full_name='google.protobuf.FieldDescriptorProto.Label',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_OPTIONAL', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_REQUIRED', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LABEL_REPEATED', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
-
-  _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
-    name='OptimizeMode',
-    full_name='google.protobuf.FileOptions.OptimizeMode',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='SPEED', index=0, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='CODE_SIZE', index=1, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='LITE_RUNTIME', index=2, number=3,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
-
-  _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
-    name='CType',
-    full_name='google.protobuf.FieldOptions.CType',
-    filename=None,
-    file=DESCRIPTOR,
-    create_key=_descriptor._internal_create_key,
-    values=[
-      _descriptor.EnumValueDescriptor(
-        name='STRING', index=0, number=0,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='CORD', index=1, number=1,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-      _descriptor.EnumValueDescriptor(
-        name='STRING_PIECE', index=2, number=2,
-        serialized_options=None,
-        type=None,
-        create_key=_descriptor._internal_create_key),
-    ],
-    containing_type=None,
-    serialized_options=None,
-  )
-  _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
-
-
-  _FILEDESCRIPTORSET = _descriptor.Descriptor(
-    name='FileDescriptorSet',
-    full_name='google.protobuf.FileDescriptorSet',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
-        number=1, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FILEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='FileDescriptorProto',
-    full_name='google.protobuf.FileDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
-        number=3, type=9, cpp_type=9, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3,
-        number=10, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4,
-        number=11, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5,
-        number=4, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6,
-        number=5, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7,
-        number=6, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8,
-        number=7, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9,
-        number=8, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10,
-        number=9, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11,
-        number=12, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
-    name='ExtensionRange',
-    full_name='google.protobuf.DescriptorProto.ExtensionRange',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
-        number=1, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
-        number=2, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _DESCRIPTORPROTO = _descriptor.Descriptor(
-    name='DescriptorProto',
-    full_name='google.protobuf.DescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
-        number=6, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
-        number=3, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
-        number=4, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
-        number=5, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6,
-        number=8, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.DescriptorProto.options', index=7,
-        number=7, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='FieldDescriptorProto',
-    full_name='google.protobuf.FieldDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
-        number=3, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
-        number=4, type=14, cpp_type=8, label=1,
-        has_default_value=False, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
-        number=5, type=14, cpp_type=8, label=1,
-        has_default_value=False, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
-        number=6, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
-        number=7, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7,
-        number=9, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=8,
-        number=8, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FIELDDESCRIPTORPROTO_TYPE,
-      _FIELDDESCRIPTORPROTO_LABEL,
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='OneofDescriptorProto',
-    full_name='google.protobuf.OneofDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='EnumDescriptorProto',
-    full_name='google.protobuf.EnumDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='EnumValueDescriptorProto',
-    full_name='google.protobuf.EnumValueDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
-        number=2, type=5, cpp_type=1, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='ServiceDescriptorProto',
-    full_name='google.protobuf.ServiceDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
-        number=3, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _METHODDESCRIPTORPROTO = _descriptor.Descriptor(
-    name='MethodDescriptorProto',
-    full_name='google.protobuf.MethodDescriptorProto',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
-        number=2, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
-        number=4, type=11, cpp_type=10, label=1,
-        has_default_value=False, default_value=None,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4,
-        number=5, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5,
-        number=6, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _FILEOPTIONS = _descriptor.Descriptor(
-    name='FileOptions',
-    full_name='google.protobuf.FileOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
-        number=1, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
-        number=8, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
-        number=10, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3,
-        number=20, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4,
-        number=27, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5,
-        number=9, type=14, cpp_type=8, label=1,
-        has_default_value=True, default_value=1,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6,
-        number=11, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7,
-        number=16, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8,
-        number=17, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9,
-        number=18, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=10,
-        number=23, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=11,
-        number=31, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=12,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FILEOPTIONS_OPTIMIZEMODE,
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _MESSAGEOPTIONS = _descriptor.Descriptor(
-    name='MessageOptions',
-    full_name='google.protobuf.MessageOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
-        number=1, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3,
-        number=7, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _FIELDOPTIONS = _descriptor.Descriptor(
-    name='FieldOptions',
-    full_name='google.protobuf.FieldOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
-        number=1, type=14, cpp_type=8, label=1,
-        has_default_value=True, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=2,
-        number=5, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=3,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='weak', full_name='google.protobuf.FieldOptions.weak', index=4,
-        number=10, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=5,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-      _FIELDOPTIONS_CTYPE,
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMOPTIONS = _descriptor.Descriptor(
-    name='EnumOptions',
-    full_name='google.protobuf.EnumOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0,
-        number=2, type=8, cpp_type=7, label=1,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1,
-        number=3, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _ENUMVALUEOPTIONS = _descriptor.Descriptor(
-    name='EnumValueOptions',
-    full_name='google.protobuf.EnumValueOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0,
-        number=1, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _SERVICEOPTIONS = _descriptor.Descriptor(
-    name='ServiceOptions',
-    full_name='google.protobuf.ServiceOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0,
-        number=33, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _METHODOPTIONS = _descriptor.Descriptor(
-    name='MethodOptions',
-    full_name='google.protobuf.MethodOptions',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0,
-        number=33, type=8, cpp_type=7, label=1,
-        has_default_value=True, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=1,
-        number=999, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=True,
-    syntax='proto2',
-    extension_ranges=[(1000, 536870912), ],
-    oneofs=[
-    ],
-  )
-
-
-  _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
-    name='NamePart',
-    full_name='google.protobuf.UninterpretedOption.NamePart',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
-        number=1, type=9, cpp_type=9, label=2,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
-        number=2, type=8, cpp_type=7, label=2,
-        has_default_value=False, default_value=False,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _UNINTERPRETEDOPTION = _descriptor.Descriptor(
-    name='UninterpretedOption',
-    full_name='google.protobuf.UninterpretedOption',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
-        number=2, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
-        number=4, type=4, cpp_type=4, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
-        number=5, type=3, cpp_type=2, label=1,
-        has_default_value=False, default_value=0,
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
-        number=6, type=1, cpp_type=5, label=1,
-        has_default_value=False, default_value=float(0),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
-        number=7, type=12, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"",
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6,
-        number=8, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-
-  _SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
-    name='Location',
-    full_name='google.protobuf.SourceCodeInfo.Location',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0,
-        number=1, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1,
-        number=2, type=5, cpp_type=1, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2,
-        number=3, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-      _descriptor.FieldDescriptor(
-        name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3,
-        number=4, type=9, cpp_type=9, label=1,
-        has_default_value=False, default_value=b"".decode('utf-8'),
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _SOURCECODEINFO = _descriptor.Descriptor(
-    name='SourceCodeInfo',
-    full_name='google.protobuf.SourceCodeInfo',
-    filename=None,
-    file=DESCRIPTOR,
-    containing_type=None,
-    create_key=_descriptor._internal_create_key,
-    fields=[
-      _descriptor.FieldDescriptor(
-        name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0,
-        number=1, type=11, cpp_type=10, label=3,
-        has_default_value=False, default_value=[],
-        message_type=None, enum_type=None, containing_type=None,
-        is_extension=False, extension_scope=None,
-        serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    ],
-    extensions=[
-    ],
-    nested_types=[_SOURCECODEINFO_LOCATION, ],
-    enum_types=[
-    ],
-    serialized_options=None,
-    is_extendable=False,
-    syntax='proto2',
-    extension_ranges=[],
-    oneofs=[
-    ],
-  )
-
-  _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-  _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
-  _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
-  _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
-  _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
-  _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
-  _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
-  _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
-  _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
-  _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
-  _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
-  _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
-  _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
-  _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
-  _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
-  _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
-  _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
-  _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
-  _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
-  _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
-  _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
-  _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-  _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
-  _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
-  _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
-  _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
-  DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
-  DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
-  DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
-  DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
-  DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
-  DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
-  DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
-  DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
-  DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
-  DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
-  DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
-  _sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-else:
-  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  _FILEDESCRIPTORSET._serialized_start=53
-  _FILEDESCRIPTORSET._serialized_end=124
-  _FILEDESCRIPTORPROTO._serialized_start=127
-  _FILEDESCRIPTORPROTO._serialized_end=602
-  _DESCRIPTORPROTO._serialized_start=605
-  _DESCRIPTORPROTO._serialized_end=1089
-  _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1045
-  _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1089
-  _FIELDDESCRIPTORPROTO._serialized_start=1092
-  _FIELDDESCRIPTORPROTO._serialized_end=1773
-  _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1394
-  _FIELDDESCRIPTORPROTO_TYPE._serialized_end=1704
-  _FIELDDESCRIPTORPROTO_LABEL._serialized_start=1706
-  _FIELDDESCRIPTORPROTO_LABEL._serialized_end=1773
-  _ONEOFDESCRIPTORPROTO._serialized_start=1775
-  _ONEOFDESCRIPTORPROTO._serialized_end=1811
-  _ENUMDESCRIPTORPROTO._serialized_start=1814
-  _ENUMDESCRIPTORPROTO._serialized_end=1954
-  _ENUMVALUEDESCRIPTORPROTO._serialized_start=1956
-  _ENUMVALUEDESCRIPTORPROTO._serialized_end=2064
-  _SERVICEDESCRIPTORPROTO._serialized_start=2067
-  _SERVICEDESCRIPTORPROTO._serialized_end=2211
-  _METHODDESCRIPTORPROTO._serialized_start=2214
-  _METHODDESCRIPTORPROTO._serialized_end=2407
-  _FILEOPTIONS._serialized_start=2410
-  _FILEOPTIONS._serialized_end=2998
-  _FILEOPTIONS_OPTIMIZEMODE._serialized_start=2929
-  _FILEOPTIONS_OPTIMIZEMODE._serialized_end=2987
-  _MESSAGEOPTIONS._serialized_start=3001
-  _MESSAGEOPTIONS._serialized_end=3231
-  _FIELDOPTIONS._serialized_start=3234
-  _FIELDOPTIONS._serialized_end=3522
-  _FIELDOPTIONS_CTYPE._serialized_start=3464
-  _FIELDOPTIONS_CTYPE._serialized_end=3511
-  _ENUMOPTIONS._serialized_start=3525
-  _ENUMOPTIONS._serialized_end=3666
-  _ENUMVALUEOPTIONS._serialized_start=3668
-  _ENUMVALUEOPTIONS._serialized_end=3793
-  _SERVICEOPTIONS._serialized_start=3795
-  _SERVICEOPTIONS._serialized_end=3918
-  _METHODOPTIONS._serialized_start=3920
-  _METHODOPTIONS._serialized_end=4042
-  _UNINTERPRETEDOPTION._serialized_start=4045
-  _UNINTERPRETEDOPTION._serialized_end=4331
-  _UNINTERPRETEDOPTION_NAMEPART._serialized_start=4280
-  _UNINTERPRETEDOPTION_NAMEPART._serialized_end=4331
-  _SOURCECODEINFO._serialized_start=4334
-  _SOURCECODEINFO._serialized_end=4511
-  _SOURCECODEINFO_LOCATION._serialized_start=4412
-  _SOURCECODEINFO_LOCATION._serialized_end=4511
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py
deleted file mode 100644
index c23077a..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: nanopb.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cnanopb.proto\x1a google/protobuf/descriptor.proto\"\x80\x02\n\rNanoPBOptions\x12\x10\n\x08max_size\x18\x01 \x01(\x05\x12\x11\n\tmax_count\x18\x02 \x01(\x05\x12&\n\x08int_size\x18\x07 \x01(\x0e\x32\x08.IntSize:\nIS_DEFAULT\x12$\n\x04type\x18\x03 \x01(\x0e\x32\n.FieldType:\nFT_DEFAULT\x12\x18\n\nlong_names\x18\x04 \x01(\x08:\x04true\x12\x1c\n\rpacked_struct\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0cskip_message\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tno_unions\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\r\n\x05msgid\x18\t \x01(\r*Z\n\tFieldType\x12\x0e\n\nFT_DEFAULT\x10\x00\x12\x0f\n\x0b\x46T_CALLBACK\x10\x01\x12\x0e\n\nFT_POINTER\x10\x04\x12\r\n\tFT_STATIC\x10\x02\x12\r\n\tFT_IGNORE\x10\x03*D\n\x07IntSize\x12\x0e\n\nIS_DEFAULT\x10\x00\x12\x08\n\x04IS_8\x10\x08\x12\t\n\x05IS_16\x10\x10\x12\t\n\x05IS_32\x10 \x12\t\n\x05IS_64\x10@:E\n\x0enanopb_fileopt\x12\x1c.google.protobuf.FileOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:G\n\rnanopb_msgopt\x12\x1f.google.protobuf.MessageOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:E\n\x0enanopb_enumopt\x12\x1c.google.protobuf.EnumOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:>\n\x06nanopb\x12\x1d.google.protobuf.FieldOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptionsB\x1a\n\x18\x66i.kapsi.koti.jpa.nanopb')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'nanopb_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-  google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(nanopb_fileopt)
-  google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(nanopb_msgopt)
-  google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(nanopb_enumopt)
-  google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nanopb)
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\030fi.kapsi.koti.jpa.nanopb'
-  _FIELDTYPE._serialized_start=309
-  _FIELDTYPE._serialized_end=399
-  _INTSIZE._serialized_start=401
-  _INTSIZE._serialized_end=469
-  _NANOPBOPTIONS._serialized_start=51
-  _NANOPBOPTIONS._serialized_end=307
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py b/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py
deleted file mode 100644
index 79fffcd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: plugin.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cplugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB,\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtos')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'plugin_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtos'
-  _CODEGENERATORREQUEST._serialized_start=76
-  _CODEGENERATORREQUEST._serialized_end=201
-  _CODEGENERATORRESPONSE._serialized_start=204
-  _CODEGENERATORRESPONSE._serialized_end=374
-  _CODEGENERATORRESPONSE_FILE._serialized_start=312
-  _CODEGENERATORRESPONSE_FILE._serialized_end=374
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto
deleted file mode 100644
index e17c0cc..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto
+++ /dev/null
@@ -1,714 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc.  All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//     * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-//  Based on original Protocol Buffers design by
-//  Sanjay Ghemawat, Jeff Dean, and others.
-//
-// The messages in this file describe the definitions found in .proto files.
-// A valid .proto file can be translated directly to a FileDescriptorProto
-// without any other information (e.g. without reading its imports).
-
-
-syntax = "proto2";
-
-package google.protobuf;
-option java_package = "com.google.protobuf";
-option java_outer_classname = "DescriptorProtos";
-
-// descriptor.proto must be optimized for speed because reflection-based
-// algorithms don't work during bootstrapping.
-option optimize_for = SPEED;
-
-// The protocol compiler can output a FileDescriptorSet containing the .proto
-// files it parses.
-message FileDescriptorSet {
-  repeated FileDescriptorProto file = 1;
-}
-
-// Describes a complete .proto file.
-message FileDescriptorProto {
-  optional string name = 1;       // file name, relative to root of source tree
-  optional string package = 2;    // e.g. "foo", "foo.bar", etc.
-
-  // Names of files imported by this file.
-  repeated string dependency = 3;
-  // Indexes of the public imported files in the dependency list above.
-  repeated int32 public_dependency = 10;
-  // Indexes of the weak imported files in the dependency list.
-  // For Google-internal migration only. Do not use.
-  repeated int32 weak_dependency = 11;
-
-  // All top-level definitions in this file.
-  repeated DescriptorProto message_type = 4;
-  repeated EnumDescriptorProto enum_type = 5;
-  repeated ServiceDescriptorProto service = 6;
-  repeated FieldDescriptorProto extension = 7;
-
-  optional FileOptions options = 8;
-
-  // This field contains optional information about the original source code.
-  // You may safely remove this entire field without harming runtime
-  // functionality of the descriptors -- the information is needed only by
-  // development tools.
-  optional SourceCodeInfo source_code_info = 9;
-
-  // The syntax of the proto file.
-  // The supported values are "proto2" and "proto3".
-  optional string syntax = 12;
-}
-
-// Describes a message type.
-message DescriptorProto {
-  optional string name = 1;
-
-  repeated FieldDescriptorProto field = 2;
-  repeated FieldDescriptorProto extension = 6;
-
-  repeated DescriptorProto nested_type = 3;
-  repeated EnumDescriptorProto enum_type = 4;
-
-  message ExtensionRange {
-    optional int32 start = 1;
-    optional int32 end = 2;
-  }
-  repeated ExtensionRange extension_range = 5;
-
-  repeated OneofDescriptorProto oneof_decl = 8;
-
-  optional MessageOptions options = 7;
-}
-
-// Describes a field within a message.
-message FieldDescriptorProto {
-  enum Type {
-    // 0 is reserved for errors.
-    // Order is weird for historical reasons.
-    TYPE_DOUBLE         = 1;
-    TYPE_FLOAT          = 2;
-    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-    // negative values are likely.
-    TYPE_INT64          = 3;
-    TYPE_UINT64         = 4;
-    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-    // negative values are likely.
-    TYPE_INT32          = 5;
-    TYPE_FIXED64        = 6;
-    TYPE_FIXED32        = 7;
-    TYPE_BOOL           = 8;
-    TYPE_STRING         = 9;
-    TYPE_GROUP          = 10;  // Tag-delimited aggregate.
-    TYPE_MESSAGE        = 11;  // Length-delimited aggregate.
-
-    // New in version 2.
-    TYPE_BYTES          = 12;
-    TYPE_UINT32         = 13;
-    TYPE_ENUM           = 14;
-    TYPE_SFIXED32       = 15;
-    TYPE_SFIXED64       = 16;
-    TYPE_SINT32         = 17;  // Uses ZigZag encoding.
-    TYPE_SINT64         = 18;  // Uses ZigZag encoding.
-  };
-
-  enum Label {
-    // 0 is reserved for errors
-    LABEL_OPTIONAL      = 1;
-    LABEL_REQUIRED      = 2;
-    LABEL_REPEATED      = 3;
-    // TODO(sanjay): Should we add LABEL_MAP?
-  };
-
-  optional string name = 1;
-  optional int32 number = 3;
-  optional Label label = 4;
-
-  // If type_name is set, this need not be set.  If both this and type_name
-  // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
-  optional Type type = 5;
-
-  // For message and enum types, this is the name of the type.  If the name
-  // starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
-  // rules are used to find the type (i.e. first the nested types within this
-  // message are searched, then within the parent, on up to the root
-  // namespace).
-  optional string type_name = 6;
-
-  // For extensions, this is the name of the type being extended.  It is
-  // resolved in the same manner as type_name.
-  optional string extendee = 2;
-
-  // For numeric types, contains the original text representation of the value.
-  // For booleans, "true" or "false".
-  // For strings, contains the default text contents (not escaped in any way).
-  // For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
-  // TODO(kenton):  Base-64 encode?
-  optional string default_value = 7;
-
-  // If set, gives the index of a oneof in the containing type's oneof_decl
-  // list.  This field is a member of that oneof.  Extensions of a oneof should
-  // not set this since the oneof to which they belong will be inferred based
-  // on the extension range containing the extension's field number.
-  optional int32 oneof_index = 9;
-
-  optional FieldOptions options = 8;
-}
-
-// Describes a oneof.
-message OneofDescriptorProto {
-  optional string name = 1;
-}
-
-// Describes an enum type.
-message EnumDescriptorProto {
-  optional string name = 1;
-
-  repeated EnumValueDescriptorProto value = 2;
-
-  optional EnumOptions options = 3;
-}
-
-// Describes a value within an enum.
-message EnumValueDescriptorProto {
-  optional string name = 1;
-  optional int32 number = 2;
-
-  optional EnumValueOptions options = 3;
-}
-
-// Describes a service.
-message ServiceDescriptorProto {
-  optional string name = 1;
-  repeated MethodDescriptorProto method = 2;
-
-  optional ServiceOptions options = 3;
-}
-
-// Describes a method of a service.
-message MethodDescriptorProto {
-  optional string name = 1;
-
-  // Input and output type names.  These are resolved in the same way as
-  // FieldDescriptorProto.type_name, but must refer to a message type.
-  optional string input_type = 2;
-  optional string output_type = 3;
-
-  optional MethodOptions options = 4;
-
-  // Identifies if client streams multiple client messages
-  optional bool client_streaming = 5 [default=false];
-  // Identifies if server streams multiple server messages
-  optional bool server_streaming = 6 [default=false];
-}
-
-
-// ===================================================================
-// Options
-
-// Each of the definitions above may have "options" attached.  These are
-// just annotations which may cause code to be generated slightly differently
-// or may contain hints for code that manipulates protocol messages.
-//
-// Clients may define custom options as extensions of the *Options messages.
-// These extensions may not yet be known at parsing time, so the parser cannot
-// store the values in them.  Instead it stores them in a field in the *Options
-// message called uninterpreted_option. This field must have the same name
-// across all *Options messages. We then use this field to populate the
-// extensions when we build a descriptor, at which point all protos have been
-// parsed and so all extensions are known.
-//
-// Extension numbers for custom options may be chosen as follows:
-// * For options which will only be used within a single application or
-//   organization, or for experimental options, use field numbers 50000
-//   through 99999.  It is up to you to ensure that you do not use the
-//   same number for multiple options.
-// * For options which will be published and used publicly by multiple
-//   independent entities, e-mail protobuf-global-extension-registry@google.com
-//   to reserve extension numbers. Simply provide your project name (e.g.
-//   Object-C plugin) and your porject website (if available) -- there's no need
-//   to explain how you intend to use them. Usually you only need one extension
-//   number. You can declare multiple options with only one extension number by
-//   putting them in a sub-message. See the Custom Options section of the docs
-//   for examples:
-//   https://developers.google.com/protocol-buffers/docs/proto#options
-//   If this turns out to be popular, a web service will be set up
-//   to automatically assign option numbers.
-
-
-message FileOptions {
-
-  // Sets the Java package where classes generated from this .proto will be
-  // placed.  By default, the proto package is used, but this is often
-  // inappropriate because proto packages do not normally start with backwards
-  // domain names.
-  optional string java_package = 1;
-
-
-  // If set, all the classes from the .proto file are wrapped in a single
-  // outer class with the given name.  This applies to both Proto1
-  // (equivalent to the old "--one_java_file" option) and Proto2 (where
-  // a .proto always translates to a single class, but you may want to
-  // explicitly choose the class name).
-  optional string java_outer_classname = 8;
-
-  // If set true, then the Java code generator will generate a separate .java
-  // file for each top-level message, enum, and service defined in the .proto
-  // file.  Thus, these types will *not* be nested inside the outer class
-  // named by java_outer_classname.  However, the outer class will still be
-  // generated to contain the file's getDescriptor() method as well as any
-  // top-level extensions defined in the file.
-  optional bool java_multiple_files = 10 [default=false];
-
-  // If set true, then the Java code generator will generate equals() and
-  // hashCode() methods for all messages defined in the .proto file.
-  // - In the full runtime, this is purely a speed optimization, as the
-  // AbstractMessage base class includes reflection-based implementations of
-  // these methods.
-  //- In the lite runtime, setting this option changes the semantics of
-  // equals() and hashCode() to more closely match those of the full runtime;
-  // the generated methods compute their results based on field values rather
-  // than object identity. (Implementations should not assume that hashcodes
-  // will be consistent across runtimes or versions of the protocol compiler.)
-  optional bool java_generate_equals_and_hash = 20 [default=false];
-
-  // If set true, then the Java2 code generator will generate code that
-  // throws an exception whenever an attempt is made to assign a non-UTF-8
-  // byte sequence to a string field.
-  // Message reflection will do the same.
-  // However, an extension field still accepts non-UTF-8 byte sequences.
-  // This option has no effect on when used with the lite runtime.
-  optional bool java_string_check_utf8 = 27 [default=false];
-
-
-  // Generated classes can be optimized for speed or code size.
-  enum OptimizeMode {
-    SPEED = 1;        // Generate complete code for parsing, serialization,
-                      // etc.
-    CODE_SIZE = 2;    // Use ReflectionOps to implement these methods.
-    LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
-  }
-  optional OptimizeMode optimize_for = 9 [default=SPEED];
-
-  // Sets the Go package where structs generated from this .proto will be
-  // placed. If omitted, the Go package will be derived from the following:
-  //   - The basename of the package import path, if provided.
-  //   - Otherwise, the package statement in the .proto file, if present.
-  //   - Otherwise, the basename of the .proto file, without extension.
-  optional string go_package = 11;
-
-
-
-  // Should generic services be generated in each language?  "Generic" services
-  // are not specific to any particular RPC system.  They are generated by the
-  // main code generators in each language (without additional plugins).
-  // Generic services were the only kind of service generation supported by
-  // early versions of google.protobuf.
-  //
-  // Generic services are now considered deprecated in favor of using plugins
-  // that generate code specific to your particular RPC system.  Therefore,
-  // these default to false.  Old code which depends on generic services should
-  // explicitly set them to true.
-  optional bool cc_generic_services = 16 [default=false];
-  optional bool java_generic_services = 17 [default=false];
-  optional bool py_generic_services = 18 [default=false];
-
-  // Is this file deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for everything in the file, or it will be completely ignored; in the very
-  // least, this is a formalization for deprecating files.
-  optional bool deprecated = 23 [default=false];
-
-
-  // Enables the use of arenas for the proto messages in this file. This applies
-  // only to generated classes for C++.
-  optional bool cc_enable_arenas = 31 [default=false];
-
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message MessageOptions {
-  // Set true to use the old proto1 MessageSet wire format for extensions.
-  // This is provided for backwards-compatibility with the MessageSet wire
-  // format.  You should not use this for any other reason:  It's less
-  // efficient, has fewer features, and is more complicated.
-  //
-  // The message must be defined exactly as follows:
-  //   message Foo {
-  //     option message_set_wire_format = true;
-  //     extensions 4 to max;
-  //   }
-  // Note that the message cannot have any defined fields; MessageSets only
-  // have extensions.
-  //
-  // All extensions of your type must be singular messages; e.g. they cannot
-  // be int32s, enums, or repeated messages.
-  //
-  // Because this is an option, the above two restrictions are not enforced by
-  // the protocol compiler.
-  optional bool message_set_wire_format = 1 [default=false];
-
-  // Disables the generation of the standard "descriptor()" accessor, which can
-  // conflict with a field of the same name.  This is meant to make migration
-  // from proto1 easier; new code should avoid fields named "descriptor".
-  optional bool no_standard_descriptor_accessor = 2 [default=false];
-
-  // Is this message deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the message, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating messages.
-  optional bool deprecated = 3 [default=false];
-
-  // Whether the message is an automatically generated map entry type for the
-  // maps field.
-  //
-  // For maps fields:
-  //     map<KeyType, ValueType> map_field = 1;
-  // The parsed descriptor looks like:
-  //     message MapFieldEntry {
-  //         option map_entry = true;
-  //         optional KeyType key = 1;
-  //         optional ValueType value = 2;
-  //     }
-  //     repeated MapFieldEntry map_field = 1;
-  //
-  // Implementations may choose not to generate the map_entry=true message, but
-  // use a native map in the target language to hold the keys and values.
-  // The reflection APIs in such implementions still need to work as
-  // if the field is a repeated message field.
-  //
-  // NOTE: Do not set the option in .proto files. Always use the maps syntax
-  // instead. The option should only be implicitly set by the proto compiler
-  // parser.
-  optional bool map_entry = 7;
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message FieldOptions {
-  // The ctype option instructs the C++ code generator to use a different
-  // representation of the field than it normally would.  See the specific
-  // options below.  This option is not yet implemented in the open source
-  // release -- sorry, we'll try to include it in a future version!
-  optional CType ctype = 1 [default = STRING];
-  enum CType {
-    // Default mode.
-    STRING = 0;
-
-    CORD = 1;
-
-    STRING_PIECE = 2;
-  }
-  // The packed option can be enabled for repeated primitive fields to enable
-  // a more efficient representation on the wire. Rather than repeatedly
-  // writing the tag and type for each element, the entire array is encoded as
-  // a single length-delimited blob.
-  optional bool packed = 2;
-
-
-
-  // Should this field be parsed lazily?  Lazy applies only to message-type
-  // fields.  It means that when the outer message is initially parsed, the
-  // inner message's contents will not be parsed but instead stored in encoded
-  // form.  The inner message will actually be parsed when it is first accessed.
-  //
-  // This is only a hint.  Implementations are free to choose whether to use
-  // eager or lazy parsing regardless of the value of this option.  However,
-  // setting this option true suggests that the protocol author believes that
-  // using lazy parsing on this field is worth the additional bookkeeping
-  // overhead typically needed to implement it.
-  //
-  // This option does not affect the public interface of any generated code;
-  // all method signatures remain the same.  Furthermore, thread-safety of the
-  // interface is not affected by this option; const methods remain safe to
-  // call from multiple threads concurrently, while non-const methods continue
-  // to require exclusive access.
-  //
-  //
-  // Note that implementations may choose not to check required fields within
-  // a lazy sub-message.  That is, calling IsInitialized() on the outher message
-  // may return true even if the inner message has missing required fields.
-  // This is necessary because otherwise the inner message would have to be
-  // parsed in order to perform the check, defeating the purpose of lazy
-  // parsing.  An implementation which chooses not to check required fields
-  // must be consistent about it.  That is, for any particular sub-message, the
-  // implementation must either *always* check its required fields, or *never*
-  // check its required fields, regardless of whether or not the message has
-  // been parsed.
-  optional bool lazy = 5 [default=false];
-
-  // Is this field deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for accessors, or it will be completely ignored; in the very least, this
-  // is a formalization for deprecating fields.
-  optional bool deprecated = 3 [default=false];
-
-  // For Google-internal migration only. Do not use.
-  optional bool weak = 10 [default=false];
-
-
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message EnumOptions {
-
-  // Set this option to true to allow mapping different tag names to the same
-  // value.
-  optional bool allow_alias = 2;
-
-  // Is this enum deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the enum, or it will be completely ignored; in the very least, this
-  // is a formalization for deprecating enums.
-  optional bool deprecated = 3 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message EnumValueOptions {
-  // Is this enum value deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the enum value, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating enum values.
-  optional bool deprecated = 1 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message ServiceOptions {
-
-  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
-  //   framework.  We apologize for hoarding these numbers to ourselves, but
-  //   we were already using them long before we decided to release Protocol
-  //   Buffers.
-
-  // Is this service deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the service, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating services.
-  optional bool deprecated = 33 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-message MethodOptions {
-
-  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
-  //   framework.  We apologize for hoarding these numbers to ourselves, but
-  //   we were already using them long before we decided to release Protocol
-  //   Buffers.
-
-  // Is this method deprecated?
-  // Depending on the target platform, this can emit Deprecated annotations
-  // for the method, or it will be completely ignored; in the very least,
-  // this is a formalization for deprecating methods.
-  optional bool deprecated = 33 [default=false];
-
-  // The parser stores options it doesn't recognize here. See above.
-  repeated UninterpretedOption uninterpreted_option = 999;
-
-  // Clients can define custom options in extensions of this message. See above.
-  extensions 1000 to max;
-}
-
-
-// A message representing a option the parser does not recognize. This only
-// appears in options protos created by the compiler::Parser class.
-// DescriptorPool resolves these when building Descriptor objects. Therefore,
-// options protos in descriptor objects (e.g. returned by Descriptor::options(),
-// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
-// in them.
-message UninterpretedOption {
-  // The name of the uninterpreted option.  Each string represents a segment in
-  // a dot-separated name.  is_extension is true iff a segment represents an
-  // extension (denoted with parentheses in options specs in .proto files).
-  // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
-  // "foo.(bar.baz).qux".
-  message NamePart {
-    required string name_part = 1;
-    required bool is_extension = 2;
-  }
-  repeated NamePart name = 2;
-
-  // The value of the uninterpreted option, in whatever type the tokenizer
-  // identified it as during parsing. Exactly one of these should be set.
-  optional string identifier_value = 3;
-  optional uint64 positive_int_value = 4;
-  optional int64 negative_int_value = 5;
-  optional double double_value = 6;
-  optional bytes string_value = 7;
-  optional string aggregate_value = 8;
-}
-
-// ===================================================================
-// Optional source code info
-
-// Encapsulates information about the original source file from which a
-// FileDescriptorProto was generated.
-message SourceCodeInfo {
-  // A Location identifies a piece of source code in a .proto file which
-  // corresponds to a particular definition.  This information is intended
-  // to be useful to IDEs, code indexers, documentation generators, and similar
-  // tools.
-  //
-  // For example, say we have a file like:
-  //   message Foo {
-  //     optional string foo = 1;
-  //   }
-  // Let's look at just the field definition:
-  //   optional string foo = 1;
-  //   ^       ^^     ^^  ^  ^^^
-  //   a       bc     de  f  ghi
-  // We have the following locations:
-  //   span   path               represents
-  //   [a,i)  [ 4, 0, 2, 0 ]     The whole field definition.
-  //   [a,b)  [ 4, 0, 2, 0, 4 ]  The label (optional).
-  //   [c,d)  [ 4, 0, 2, 0, 5 ]  The type (string).
-  //   [e,f)  [ 4, 0, 2, 0, 1 ]  The name (foo).
-  //   [g,h)  [ 4, 0, 2, 0, 3 ]  The number (1).
-  //
-  // Notes:
-  // - A location may refer to a repeated field itself (i.e. not to any
-  //   particular index within it).  This is used whenever a set of elements are
-  //   logically enclosed in a single code segment.  For example, an entire
-  //   extend block (possibly containing multiple extension definitions) will
-  //   have an outer location whose path refers to the "extensions" repeated
-  //   field without an index.
-  // - Multiple locations may have the same path.  This happens when a single
-  //   logical declaration is spread out across multiple places.  The most
-  //   obvious example is the "extend" block again -- there may be multiple
-  //   extend blocks in the same scope, each of which will have the same path.
-  // - A location's span is not always a subset of its parent's span.  For
-  //   example, the "extendee" of an extension declaration appears at the
-  //   beginning of the "extend" block and is shared by all extensions within
-  //   the block.
-  // - Just because a location's span is a subset of some other location's span
-  //   does not mean that it is a descendent.  For example, a "group" defines
-  //   both a type and a field in a single declaration.  Thus, the locations
-  //   corresponding to the type and field and their components will overlap.
-  // - Code which tries to interpret locations should probably be designed to
-  //   ignore those that it doesn't understand, as more types of locations could
-  //   be recorded in the future.
-  repeated Location location = 1;
-  message Location {
-    // Identifies which part of the FileDescriptorProto was defined at this
-    // location.
-    //
-    // Each element is a field number or an index.  They form a path from
-    // the root FileDescriptorProto to the place where the definition.  For
-    // example, this path:
-    //   [ 4, 3, 2, 7, 1 ]
-    // refers to:
-    //   file.message_type(3)  // 4, 3
-    //       .field(7)         // 2, 7
-    //       .name()           // 1
-    // This is because FileDescriptorProto.message_type has field number 4:
-    //   repeated DescriptorProto message_type = 4;
-    // and DescriptorProto.field has field number 2:
-    //   repeated FieldDescriptorProto field = 2;
-    // and FieldDescriptorProto.name has field number 1:
-    //   optional string name = 1;
-    //
-    // Thus, the above path gives the location of a field name.  If we removed
-    // the last element:
-    //   [ 4, 3, 2, 7 ]
-    // this path refers to the whole field declaration (from the beginning
-    // of the label to the terminating semicolon).
-    repeated int32 path = 1 [packed=true];
-
-    // Always has exactly three or four elements: start line, start column,
-    // end line (optional, otherwise assumed same as start line), end column.
-    // These are packed into a single field for efficiency.  Note that line
-    // and column numbers are zero-based -- typically you will want to add
-    // 1 to each before displaying to a user.
-    repeated int32 span = 2 [packed=true];
-
-    // If this SourceCodeInfo represents a complete declaration, these are any
-    // comments appearing before and after the declaration which appear to be
-    // attached to the declaration.
-    //
-    // A series of line comments appearing on consecutive lines, with no other
-    // tokens appearing on those lines, will be treated as a single comment.
-    //
-    // Only the comment content is provided; comment markers (e.g. //) are
-    // stripped out.  For block comments, leading whitespace and an asterisk
-    // will be stripped from the beginning of each line other than the first.
-    // Newlines are included in the output.
-    //
-    // Examples:
-    //
-    //   optional int32 foo = 1;  // Comment attached to foo.
-    //   // Comment attached to bar.
-    //   optional int32 bar = 2;
-    //
-    //   optional string baz = 3;
-    //   // Comment attached to baz.
-    //   // Another line attached to baz.
-    //
-    //   // Comment attached to qux.
-    //   //
-    //   // Another line attached to qux.
-    //   optional double qux = 4;
-    //
-    //   optional string corge = 5;
-    //   /* Block comment attached
-    //    * to corge.  Leading asterisks
-    //    * will be removed. */
-    //   /* Block comment attached to
-    //    * grault. */
-    //   optional int32 grault = 6;
-    optional string leading_comments = 3;
-    optional string trailing_comments = 4;
-  }
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto
deleted file mode 100644
index 5053dfd..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/nanopb.proto
+++ /dev/null
@@ -1,89 +0,0 @@
-// Custom options for defining:
-// - Maximum size of string/bytes
-// - Maximum number of elements in array
-//
-// These are used by nanopb to generate statically allocable structures
-// for memory-limited environments.
-
-syntax = "proto2";
-import "google/protobuf/descriptor.proto";
-
-option java_package = "fi.kapsi.koti.jpa.nanopb";
-
-enum FieldType {
-    FT_DEFAULT = 0; // Automatically decide field type, generate static field if possible.
-    FT_CALLBACK = 1; // Always generate a callback field.
-    FT_POINTER = 4; // Always generate a dynamically allocated field.
-    FT_STATIC = 2; // Generate a static field or raise an exception if not possible.
-    FT_IGNORE = 3; // Ignore the field completely.
-}
-
-enum IntSize {
-    IS_DEFAULT = 0; // Default, 32/64bit based on type in .proto
-    IS_8 = 8;
-    IS_16 = 16;
-    IS_32 = 32;
-    IS_64 = 64;
-}
-
-// This is the inner options message, which basically defines options for
-// a field. When it is used in message or file scope, it applies to all
-// fields.
-message NanoPBOptions {
-  // Allocated size for 'bytes' and 'string' fields.
-  optional int32 max_size = 1;
-
-  // Allocated number of entries in arrays ('repeated' fields)
-  optional int32 max_count = 2;
-
-  // Size of integer fields. Can save some memory if you don't need
-  // full 32 bits for the value.
-  optional IntSize int_size = 7 [default = IS_DEFAULT];
-
-  // Force type of field (callback or static allocation)
-  optional FieldType type = 3 [default = FT_DEFAULT];
-
-  // Use long names for enums, i.e. EnumName_EnumValue.
-  optional bool long_names = 4 [default = true];
-
-  // Add 'packed' attribute to generated structs.
-  // Note: this cannot be used on CPUs that break on unaligned
-  // accesses to variables.
-  optional bool packed_struct = 5 [default = false];
-
-  // Skip this message
-  optional bool skip_message = 6 [default = false];
-
-  // Generate oneof fields as normal optional fields instead of union.
-  optional bool no_unions = 8 [default = false];
-
-  // integer type tag for a message
-  optional uint32 msgid = 9;
-}
-
-// Extensions to protoc 'Descriptor' type in order to define options
-// inside a .proto file.
-//
-// Protocol Buffers extension number registry
-// --------------------------------
-// Project:  Nanopb
-// Contact:  Petteri Aimonen <jpa@kapsi.fi>
-// Web site: http://kapsi.fi/~jpa/nanopb
-// Extensions: 1010 (all types)
-// --------------------------------
-
-extend google.protobuf.FileOptions {
-    optional NanoPBOptions nanopb_fileopt = 1010;
-}
-
-extend google.protobuf.MessageOptions {
-    optional NanoPBOptions nanopb_msgopt = 1010;
-}
-
-extend google.protobuf.EnumOptions {
-    optional NanoPBOptions nanopb_enumopt = 1010;
-}
-
-extend google.protobuf.FieldOptions {
-    optional NanoPBOptions nanopb = 1010;
-}
diff --git a/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto b/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto
deleted file mode 100644
index e627289..0000000
--- a/src/antlion/controllers/buds_lib/dev_utils/proto/plugin.proto
+++ /dev/null
@@ -1,148 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc.  All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//     * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-//
-// WARNING:  The plugin interface is currently EXPERIMENTAL and is subject to
-//   change.
-//
-// protoc (aka the Protocol Compiler) can be extended via plugins.  A plugin is
-// just a program that reads a CodeGeneratorRequest from stdin and writes a
-// CodeGeneratorResponse to stdout.
-//
-// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
-// of dealing with the raw protocol defined here.
-//
-// A plugin executable needs only to be placed somewhere in the path.  The
-// plugin should be named "protoc-gen-$NAME", and will then be used when the
-// flag "--${NAME}_out" is passed to protoc.
-
-syntax = "proto2";
-package google.protobuf.compiler;
-option java_package = "com.google.protobuf.compiler";
-option java_outer_classname = "PluginProtos";
-
-import "google/protobuf/descriptor.proto";
-
-// An encoded CodeGeneratorRequest is written to the plugin's stdin.
-message CodeGeneratorRequest {
-  // The .proto files that were explicitly listed on the command-line.  The
-  // code generator should generate code only for these files.  Each file's
-  // descriptor will be included in proto_file, below.
-  repeated string file_to_generate = 1;
-
-  // The generator parameter passed on the command-line.
-  optional string parameter = 2;
-
-  // FileDescriptorProtos for all files in files_to_generate and everything
-  // they import.  The files will appear in topological order, so each file
-  // appears before any file that imports it.
-  //
-  // protoc guarantees that all proto_files will be written after
-  // the fields above, even though this is not technically guaranteed by the
-  // protobuf wire format.  This theoretically could allow a plugin to stream
-  // in the FileDescriptorProtos and handle them one by one rather than read
-  // the entire set into memory at once.  However, as of this writing, this
-  // is not similarly optimized on protoc's end -- it will store all fields in
-  // memory at once before sending them to the plugin.
-  repeated FileDescriptorProto proto_file = 15;
-}
-
-// The plugin writes an encoded CodeGeneratorResponse to stdout.
-message CodeGeneratorResponse {
-  // Error message.  If non-empty, code generation failed.  The plugin process
-  // should exit with status code zero even if it reports an error in this way.
-  //
-  // This should be used to indicate errors in .proto files which prevent the
-  // code generator from generating correct code.  Errors which indicate a
-  // problem in protoc itself -- such as the input CodeGeneratorRequest being
-  // unparseable -- should be reported by writing a message to stderr and
-  // exiting with a non-zero status code.
-  optional string error = 1;
-
-  // Represents a single generated file.
-  message File {
-    // The file name, relative to the output directory.  The name must not
-    // contain "." or ".." components and must be relative, not be absolute (so,
-    // the file cannot lie outside the output directory).  "/" must be used as
-    // the path separator, not "\".
-    //
-    // If the name is omitted, the content will be appended to the previous
-    // file.  This allows the generator to break large files into small chunks,
-    // and allows the generated text to be streamed back to protoc so that large
-    // files need not reside completely in memory at one time.  Note that as of
-    // this writing protoc does not optimize for this -- it will read the entire
-    // CodeGeneratorResponse before writing files to disk.
-    optional string name = 1;
-
-    // If non-empty, indicates that the named file should already exist, and the
-    // content here is to be inserted into that file at a defined insertion
-    // point.  This feature allows a code generator to extend the output
-    // produced by another code generator.  The original generator may provide
-    // insertion points by placing special annotations in the file that look
-    // like:
-    //   @@protoc_insertion_point(NAME)
-    // The annotation can have arbitrary text before and after it on the line,
-    // which allows it to be placed in a comment.  NAME should be replaced with
-    // an identifier naming the point -- this is what other generators will use
-    // as the insertion_point.  Code inserted at this point will be placed
-    // immediately above the line containing the insertion point (thus multiple
-    // insertions to the same point will come out in the order they were added).
-    // The double-@ is intended to make it unlikely that the generated code
-    // could contain things that look like insertion points by accident.
-    //
-    // For example, the C++ code generator places the following line in the
-    // .pb.h files that it generates:
-    //   // @@protoc_insertion_point(namespace_scope)
-    // This line appears within the scope of the file's package namespace, but
-    // outside of any particular class.  Another plugin can then specify the
-    // insertion_point "namespace_scope" to generate additional classes or
-    // other declarations that should be placed in this scope.
-    //
-    // Note that if the line containing the insertion point begins with
-    // whitespace, the same whitespace will be added to every line of the
-    // inserted text.  This is useful for languages like Python, where
-    // indentation matters.  In these languages, the insertion point comment
-    // should be indented the same amount as any inserted code will need to be
-    // in order to work correctly in that context.
-    //
-    // The code generator that generates the initial file and the one which
-    // inserts into it must both run as part of a single invocation of protoc.
-    // Code generators are executed in the order in which they appear on the
-    // command line.
-    //
-    // If |insertion_point| is present, |name| must also be present.
-    optional string insertion_point = 2;
-
-    // The file contents.
-    optional string content = 15;
-  }
-  repeated File file = 15;
-}
diff --git a/src/antlion/controllers/buds_lib/latency.py b/src/antlion/controllers/buds_lib/latency.py
deleted file mode 100644
index 1cd9c8d..0000000
--- a/src/antlion/controllers/buds_lib/latency.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Processes profiling data to output latency numbers."""
-#
-# Type "python latency.py -h" for help
-#
-# Currently the log data is assumed to be in the following format:
-# PROF:<event-id> <timestamp>
-# The <event-id> and <timestamp> can be in the form of any valid
-# (positive)integer literal in Python
-# Examples:
-#   PROF:0x0006 0x00000155e0d043f1
-#   PROF:6 1468355593201
-
-import argparse
-from collections import defaultdict
-import csv
-import logging
-import math
-import os
-import string
-import xml.etree.ElementTree as ET
-
-valid_fname_chars = '-_.()%s%s' % (string.ascii_letters, string.digits)
-PERCENTILE_STEP = 1
-PROFILER_DATA_PREFIX = 'PROF:'
-
-
-class EventPair(object):
-
-    def __init__(self, pair_id, latency, name):
-        self.pair_id = pair_id
-        self.latency = latency
-        self.name = name
-
-
-class LatencyEntry(object):
-
-    def __init__(self, start_timestamp, latency):
-        self.start_timestamp = start_timestamp
-        self.latency = latency
-
-
-def parse_xml(xml_file):
-    """
-    Parse the configuration xml file.
-
-    Returns:
-      event_pairs_by_pair_id: dict mapping event id to event pair object
-      event_pairs_by_start_id: dict mapping starting event to list of event pairs
-                               with that starting event.
-      event_pairs_by_end_id: dict mapping ending event to list of event pairs
-                             with that ending event.
-    """
-    root = ET.parse(xml_file).getroot()
-    event_pairs = root.findall('event-pair')
-    event_pairs_by_pair_id = {}
-    event_pairs_by_start_id = defaultdict(list)
-    event_pairs_by_end_id = defaultdict(list)
-
-    for event_pair in event_pairs:
-        start_evt = root.find(
-            "./event[@id='{0:}']".format(event_pair.attrib['start-event']))
-        end_evt = root.find(
-            "./event[@id='{0:}']".format(event_pair.attrib['end-event']))
-        start = int(start_evt.attrib['id'], 0)
-        end = int(end_evt.attrib['id'], 0)
-        paird_id = start << 32 | end
-        if paird_id in event_pairs_by_pair_id:
-            logging.error('Latency event repeated: start id = %d, end id = %d',
-                          start,
-                          end)
-            continue
-        # Create the output file name base by concatenating:
-        # "input file name base" + start event name + "_to_" + end event name
-        evt_pair_name = start_evt.attrib['name'] + '_to_' + end_evt.attrib[
-            'name']
-        evt_pair_name = [
-            c if c in valid_fname_chars else '_' for c in evt_pair_name
-        ]
-        evt_pair_name = ''.join(evt_pair_name)
-        evt_list = EventPair(paird_id, 0, evt_pair_name)
-        event_pairs_by_pair_id[paird_id] = evt_list
-        event_pairs_by_start_id[start].append(evt_list)
-        event_pairs_by_end_id[end].append(evt_list)
-    return (event_pairs_by_pair_id, event_pairs_by_start_id,
-            event_pairs_by_end_id)
-
-
-def percentile_to_index(num_entries, percentile):
-    """
-    Returns the index in an array corresponding to a percentile.
-
-    Arguments:
-      num_entries: the number of entries in the array.
-      percentile: which percentile to calculate the index for.
-    Returns:
-      ind: the index in the array corresponding to the percentile.
-    """
-    ind = int(math.floor(float(num_entries) * percentile / 100))
-    if ind > 0:
-        ind -= 1
-    return ind
-
-
-def compute_latencies(input_file, event_pairs_by_start_id,
-                      event_pairs_by_end_id):
-    """Parse the input data file and compute latencies."""
-    line_num = 0
-    lat_tables_by_pair_id = defaultdict(list)
-    while True:
-        line_num += 1
-        line = input_file.readline()
-        if not line:
-            break
-        data = line.partition(PROFILER_DATA_PREFIX)[2]
-        if not data:
-            continue
-        try:
-            event_id, timestamp = [int(x, 0) for x in data.split()]
-        except ValueError:
-            logging.error('Badly formed event entry at line #%s: %s', line_num,
-                          line)
-            continue
-        # We use event_pair.latency to temporarily store the timestamp
-        # of the start event
-        for event_pair in event_pairs_by_start_id[event_id]:
-            event_pair.latency = timestamp
-        for event_pair in event_pairs_by_end_id[event_id]:
-            # compute the latency only if we have seen the corresponding
-            # start event already
-            if event_pair.latency:
-                lat_tables_by_pair_id[event_pair.pair_id].append(
-                    LatencyEntry(event_pair.latency,
-                                 timestamp - event_pair.latency))
-                event_pair.latency = 0
-    return lat_tables_by_pair_id
-
-
-def write_data(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id):
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_pair = event_pairs_by_pair_id[event_id]
-        with open(fname_base + '_' + event_pair.name + '_data.csv',
-                  'wb') as out_file:
-            csv_writer = csv.writer(out_file)
-            for dat in lat_table:
-                csv_writer.writerow([dat.start_timestamp, dat.latency])
-
-
-def write_summary(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id):
-    summaries = get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id)
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_pair = event_pairs_by_pair_id[event_id]
-        summary = summaries[event_pair.name]
-        latencies = summary['latencies']
-        num_latencies = summary['num_latencies']
-        with open(fname_base + '_' + event_pair.name + '_summary.txt',
-                  'wb') as out_file:
-            csv_writer = csv.writer(out_file)
-            csv_writer.writerow(['Percentile', 'Latency'])
-            # Write percentile table
-            for percentile in range(1, 101):
-                ind = percentile_to_index(num_latencies, percentile)
-                csv_writer.writerow([percentile, latencies[ind]])
-
-            # Write summary
-            print('\n\nTotal number of samples = {}'.format(num_latencies),
-                  file=out_file)
-            print('Min = {}'.format(summary['min_lat']), file=out_file)
-            print('Max = {}'.format(summary['max_lat']), file=out_file)
-            print('Average = {}'.format(summary['average_lat']), file=out_file)
-            print('Median = {}'.format(summary['median_lat']), file=out_file)
-            print('90 %ile = {}'.format(summary['90pctile']), file=out_file)
-            print('95 %ile = {}'.format(summary['95pctile']), file=out_file)
-
-
-def process_latencies(config_xml, input_file):
-    """
-    End to end function to compute latencies and summaries from input file.
-    Writes latency results to files in current directory.
-
-    Arguments:
-       config_xml: xml file specifying which event pairs to compute latency
-                   btwn.
-       input_file: text file containing the timestamped events, like a log file.
-    """
-    # Parse the event configuration file
-    (event_pairs_by_pair_id, event_pairs_by_start_id,
-     event_pairs_by_end_id) = parse_xml(config_xml)
-    # Compute latencies
-    lat_tables_by_pair_id = compute_latencies(input_file,
-                                              event_pairs_by_start_id,
-                                              event_pairs_by_end_id)
-    fname_base = os.path.splitext(os.path.basename(input_file.name))[0]
-    # Write the latency data and summary to respective files
-    write_data(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id)
-    write_summary(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id)
-
-
-def get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id):
-    """
-    Process significant summaries from a table of latencies.
-
-    Arguments:
-      event_pairs_by_pair_id: dict mapping event id to event pair object
-      lat_tables_by_pair_id: dict mapping event id to latency table
-    Returns:
-      summaries: dict mapping event pair name to significant summary metrics.
-    """
-    summaries = {}
-    for event_id, lat_table in lat_tables_by_pair_id.items():
-        event_summary = {}
-        event_pair = event_pairs_by_pair_id[event_id]
-        latencies = [entry.latency for entry in lat_table]
-        latencies.sort()
-        event_summary['latencies'] = latencies
-        event_summary['num_latencies'] = len(latencies)
-        event_summary['min_lat'] = latencies[0]
-        event_summary['max_lat'] = latencies[-1]
-        event_summary['average_lat'] = sum(latencies) / len(latencies)
-        event_summary['median'] = latencies[len(latencies) // 2]
-        event_summary['90pctile'] = latencies[percentile_to_index(
-            len(latencies), 90)]
-        event_summary['95pctile'] = latencies[percentile_to_index(
-            len(latencies), 95)]
-        summaries[event_pair.name] = event_summary
-    return summaries
-
-
-def get_summaries_from_log(input_file_name, config_xml=None):
-    """
-    End to end function to compute latencies and summaries from input file.
-    Returns a summary dictionary.
-
-    Arguments:
-      input_file_name: text file containing the timestamped events, like a
-                       log file.
-      config_xml: xml file specifying which event pairs to compute latency btwn.
-    Returns:
-      summaries: dict mapping event pair name to significant summary metrics.
-    """
-    config_xml = config_xml or os.path.join(os.path.dirname(__file__),
-                                            'latency.xml')
-    (event_pairs_by_pair_id, event_pairs_by_start_id,
-     event_pairs_by_end_id) = parse_xml(config_xml)
-    # Compute latencies
-    input_file = open(input_file_name, 'r')
-    lat_tables_by_pair_id = compute_latencies(input_file,
-                                              event_pairs_by_start_id,
-                                              event_pairs_by_end_id)
-    return get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id)
-
-
-if __name__ == '__main__':
-    # Parse command-line arguments
-    parser = argparse.ArgumentParser(
-        description='Processes profiling data to output latency numbers')
-    parser.add_argument(
-        '--events-config',
-        type=argparse.FileType('r'),
-        default=os.path.join(os.path.dirname(__file__), 'latency.xml'),
-        help='The configuration XML file for events.'
-             ' If not specified uses latency.xml from current folder')
-    parser.add_argument(
-        'input', type=argparse.FileType('r'), help='The input log')
-    args = parser.parse_args()
-    process_latencies(args.events_config, args.input)
diff --git a/src/antlion/controllers/buds_lib/latency.xml b/src/antlion/controllers/buds_lib/latency.xml
deleted file mode 100644
index 320979b..0000000
--- a/src/antlion/controllers/buds_lib/latency.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0"?>
-<root>
-    <event id="1" name="Button Down" />
-    <event id="3" name="Play/Pause Button Event" />
-    <event id="4" name="A2DP Start Ind" />
-    <event id="6" name="A2DP Start Streaming" />
-    <event id="9" name="AVRCP Play Notification" />
-    <event id="10" name="AVRCP Pause Notification" />
-    <event id="12" name="Voice Cmd Btn Held" />
-    <event id="13" name="Voice Cmd Btn Released" />
-
-    <!-- Event pairs that we are interested in measuring the latency of -->
-    <event-pair start-event="1" end-event="3" />
-    <event-pair start-event="1" end-event="12" />
-    <event-pair start-event="3" end-event="9" />
-    <event-pair start-event="9" end-event="6" />
-    <event-pair start-event="1" end-event="6" />
-    <event-pair start-event="3" end-event="10" />
-    <event-pair start-event="1" end-event="10" />
-    <event-pair start-event="12" end-event="13" />
-    <event-pair start-event="13" end-event="6" />
-</root>
diff --git a/src/antlion/controllers/buds_lib/logserial.py b/src/antlion/controllers/buds_lib/logserial.py
deleted file mode 100644
index 7b71f8d..0000000
--- a/src/antlion/controllers/buds_lib/logserial.py
+++ /dev/null
@@ -1,409 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-import select
-import subprocess
-import sys
-import time
-import uuid
-from logging import Logger
-from threading import Thread
-
-import serial
-from serial.tools import list_ports
-
-from antlion.controllers.buds_lib import tako_trace_logger
-
-logging = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-
-RETRIES = 0
-
-
-class LogSerialException(Exception):
-    """LogSerial Exception."""
-
-
-class PortCheck(object):
-    def get_serial_ports(self):
-        """Gets the computer available serial ports.
-
-        Returns:
-            Dictionary object with all the serial port names.
-        """
-        result = {}
-        ports = list_ports.comports()
-        for port_name, description, address in ports:
-            result[port_name] = (description, address)
-        return result
-
-    # TODO: Clean up this function. The boolean logic can be simplified.
-    def search_port_by_property(self, search_params):
-        """Search ports by a dictionary of the search parameters.
-
-        Args:
-            search_params: Dictionary object with the parameters
-                           to search. i.e:
-                           {'ID_SERIAL_SHORT':'213213',
-                           'ID_USB_INTERFACE_NUM': '01'}
-        Returns:
-            Array with the ports found
-        """
-        ports_result = []
-        for port in self.get_serial_ports():
-            properties = self.get_port_properties(port=port)
-            if properties:
-                properties_exists = True
-                for port_property in search_params:
-                    properties_exists *= (port_property in properties)
-                properties_exists = True if properties_exists == 1 else False
-                if properties_exists:
-                    found = True
-                    for port_property in search_params.keys():
-                        search_value = search_params[port_property]
-                        if properties[port_property] == search_value:
-                            found *= True
-                        else:
-                            found = False
-                            break
-                    found = True if found == 1 else False
-                    if found:
-                        ports_result.append(port)
-        return ports_result
-
-    def get_port_properties(self, port):
-        """Get all the properties from a given port.
-
-        Args:
-            port: String object with the port name. i.e. '/dev/ttyACM1'
-
-        Returns:
-            dictionary object with all the properties.
-        """
-        ports = self.get_serial_ports()
-        if port in ports:
-            result = {}
-            port_address = ports[port][1]
-            property_list = None
-            if sys.platform.startswith('linux') or sys.platform.startswith(
-                    'cygwin'):
-                try:
-                    command = 'udevadm info -q property -n {}'.format(port)
-                    property_list = subprocess.check_output(command, shell=True)
-                    property_list = property_list.decode(errors='replace')
-                except subprocess.CalledProcessError as error:
-                    logging.error(error)
-                if property_list:
-                    properties = filter(None, property_list.split('\n'))
-                    for prop in properties:
-                        p = prop.split('=')
-                        result[p[0]] = p[1]
-            elif sys.platform.startswith('win'):
-                regex = ('(?P<type>[A-Z]*)\sVID\:PID\=(?P<vid>\w*)'
-                         '\:(?P<pid>\w*)\s+(?P<adprop>.*$)')
-                m = re.search(regex, port_address)
-                if m:
-                    result['type'] = m.group('type')
-                    result['vid'] = m.group('vid')
-                    result['pid'] = m.group('pid')
-                    adprop = m.group('adprop').strip()
-                    if adprop:
-                        prop_array = adprop.split(' ')
-                        for prop in prop_array:
-                            p = prop.split('=')
-                            result[p[0]] = p[1]
-                    if 'LOCATION' in result:
-                        interface = int(result['LOCATION'].split('.')[1])
-                        if interface < 10:
-                            result['ID_USB_INTERFACE_NUM'] = '0{}'.format(
-                                interface)
-                        else:
-                            result['ID_USB_INTERFACE_NUM'] = '{}'.format(
-                                interface)
-                    win_vid_pid = '*VID_{}*PID_{}*'.format(result['vid'],
-                                                           result['pid'])
-                    command = (
-                            'powershell gwmi "Win32_USBControllerDevice |' +
-                            ' %{[wmi]($_.Dependent)} |' +
-                            ' Where-Object -Property PNPDeviceID -Like "' +
-                            win_vid_pid + '" |' +
-                            ' Where-Object -Property Service -Eq "usbccgp" |' +
-                            ' Select-Object -Property PNPDeviceID"')
-                    res = subprocess.check_output(command, shell=True)
-                    r = res.decode('ascii')
-                    m = re.search('USB\\\\.*', r)
-                    if m:
-                        result['ID_SERIAL_SHORT'] = (
-                            m.group().strip().split('\\')[2])
-            return result
-
-    def port_exists(self, port):
-        """Check if a serial port exists in the computer by the port name.
-
-        Args:
-            port: String object with the port name. i.e. '/dev/ttyACM1'
-
-        Returns:
-            True if it was found, False if not.
-        """
-        exists = port in self.get_serial_ports()
-        return exists
-
-
-class LogSerial(object):
-    def __init__(self,
-                 port,
-                 baudrate,
-                 bytesize=8,
-                 parity='N',
-                 stopbits=1,
-                 timeout=0.15,
-                 retries=0,
-                 flush_output=True,
-                 terminator='\n',
-                 output_path=None,
-                 serial_logger=None):
-        global RETRIES
-        self.set_log = False
-        self.output_path = None
-        self.set_output_path(output_path)
-        if serial_logger:
-            self.set_logger(serial_logger)
-        self.monitor_port = PortCheck()
-        if self.monitor_port.port_exists(port=port):
-            self.connection_handle = serial.Serial()
-            RETRIES = retries
-            self.reading = True
-            self.log = []
-            self.log_thread = Thread()
-            self.command_ini_index = None
-            self.is_logging = False
-            self.flush_output = flush_output
-            self.terminator = terminator
-            if port:
-                self.connection_handle.port = port
-            if baudrate:
-                self.connection_handle.baudrate = baudrate
-            if bytesize:
-                self.connection_handle.bytesize = bytesize
-            if parity:
-                self.connection_handle.parity = parity
-            if stopbits:
-                self.connection_handle.stopbits = stopbits
-            if timeout:
-                self.connection_handle.timeout = timeout
-            try:
-                self.open()
-            except Exception as e:
-                self.close()
-                logging.error(e)
-        else:
-            raise LogSerialException(
-                'The port {} does not exist'.format(port))
-
-    def set_logger(self, serial_logger):
-        global logging
-        logging = serial_logger
-        self.set_output_path(getattr(logging, 'output_path', '/tmp'))
-        self.set_log = True
-
-    def set_output_path(self, output_path):
-        """Set the output path for the flushed log.
-
-        Args:
-            output_path: String object with the path
-        """
-        if output_path:
-            if os.path.exists(output_path):
-                self.output_path = output_path
-            else:
-                raise LogSerialException('The output path does not exist.')
-
-    def refresh_port_connection(self, port):
-        """Will update the port connection without closing the read thread.
-
-        Args:
-            port: String object with the new port name. i.e. '/dev/ttyACM1'
-
-        Raises:
-            LogSerialException if the port is not alive.
-        """
-        if self.monitor_port.port_exists(port=port):
-            self.connection_handle.port = port
-            self.open()
-        else:
-            raise LogSerialException(
-                'The port {} does not exist'.format(port))
-
-    def is_port_alive(self):
-        """Verify if the current port is alive in the computer.
-
-        Returns:
-            True if its alive, False if its missing.
-        """
-        alive = self.monitor_port.port_exists(port=self.connection_handle.port)
-        return alive
-
-    def open(self):
-        """Will open the connection with the current port settings."""
-        while self.connection_handle.isOpen():
-            self.connection_handle.close()
-            time.sleep(0.5)
-        self.connection_handle.open()
-        if self.flush_output:
-            self.flush()
-        self.start_reading()
-        logging.info('Connection Open')
-
-    def close(self):
-        """Will close the connection and the read thread."""
-        self.stop_reading()
-        if self.connection_handle:
-            self.connection_handle.close()
-        if not self.set_log:
-            logging.flush_log()
-        self.flush_log()
-        logging.info('Connection Closed')
-
-    def flush(self):
-        """Will flush any input from the serial connection."""
-        self.write('\n')
-        self.connection_handle.flushInput()
-        self.connection_handle.flush()
-        flushed = 0
-        while True:
-            ready_r, _, ready_x = (select.select([self.connection_handle], [],
-                                                 [self.connection_handle], 0))
-            if ready_x:
-                logging.exception('exception from serial port')
-                return
-            elif ready_r:
-                flushed += 1
-                # This may cause underlying buffering.
-                self.connection_handle.read(1)
-                # Flush the underlying buffer too.
-                self.connection_handle.flush()
-            else:
-                break
-            if flushed > 0:
-                logging.debug('dropped >{} bytes'.format(flushed))
-
-    def write(self, command, wait_time=0.2):
-        """Will write into the serial connection.
-
-        Args:
-            command: String object with the text to write.
-            wait_time: Float object with the seconds to wait after the
-                       command was issued.
-        """
-        if command:
-            if self.terminator:
-                command += self.terminator
-            self.command_ini_index = len(self.log)
-            self.connection_handle.write(command.encode())
-            if wait_time:
-                time.sleep(wait_time)
-            logging.info('cmd [{}] sent.'.format(command.strip()))
-
-    def flush_log(self):
-        """Will output the log into a CSV file."""
-        if len(self.log) > 0:
-            path = ''
-            if not self.output_path:
-                self.output_path = os.getcwd()
-            elif not os.path.exists(self.output_path):
-                self.output_path = os.getcwd()
-            path = os.path.join(self.output_path,
-                                str(uuid.uuid4()) + '_serial.log')
-            with open(path, 'a') as log_file:
-                for info in self.log:
-                    log_file.write('{}, {}\n'.format(info[0], info[1]))
-
-    def read(self):
-        """Will read from the log the output from the serial connection
-        after a write command was issued. It will take the initial time
-        of the command as a reference.
-
-        Returns:
-            Array object with the log lines.
-        """
-        buf_read = []
-        command_end_index = len(self.log)
-        info = self.query_serial_log(self.command_ini_index, command_end_index)
-        for line in info:
-            buf_read.append(line[1])
-        self.command_ini_index = command_end_index
-        return buf_read
-
-    def get_all_log(self):
-        """Gets the log object that collects the logs.
-
-        Returns:
-            DataFrame object with all the logs.
-        """
-        return self.log
-
-    def query_serial_log(self, from_index, to_index):
-        """Will query the session log from a given time in EPOC format.
-
-        Args:
-            from_timestamp: Double value with the EPOC timestamp to start
-                            the search.
-            to_timestamp: Double value with the EPOC timestamp to finish the
-                          rearch.
-
-        Returns:
-            DataFrame with the result query.
-        """
-        if from_index < to_index:
-            info = self.log[from_index:to_index]
-            return info
-
-    def _start_reading_thread(self):
-        if self.connection_handle.isOpen():
-            self.reading = True
-            while self.reading:
-                try:
-                    data = self.connection_handle.readline().decode('utf-8')
-                    if data:
-                        self.is_logging = True
-                        data.replace('/n', '')
-                        data.replace('/r', '')
-                        data = data.strip()
-                        self.log.append([time.time(), data])
-                    else:
-                        self.is_logging = False
-                except Exception:
-                    time.sleep(1)
-            logging.info('Read thread closed')
-
-    def start_reading(self):
-        """Method to start the log collection."""
-        if not self.log_thread.isAlive():
-            self.log_thread = Thread(target=self._start_reading_thread, args=())
-            self.log_thread.daemon = True
-            try:
-                self.log_thread.start()
-            except(KeyboardInterrupt, SystemExit):
-                self.close()
-        else:
-            logging.warning('Not running log thread, is already alive')
-
-    def stop_reading(self):
-        """Method to stop the log collection."""
-        self.reading = False
-        self.log_thread.join(timeout=600)
diff --git a/src/antlion/controllers/buds_lib/tako_trace_logger.py b/src/antlion/controllers/buds_lib/tako_trace_logger.py
deleted file mode 100644
index 187cfdc..0000000
--- a/src/antlion/controllers/buds_lib/tako_trace_logger.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-
-from antlion import tracelogger
-
-
-class TakoTraceLogger(tracelogger.TraceLogger):
-    def __init__(self, *args, **kwargs):
-        super().__init__(*args, **kwargs)
-        self.d = self.debug
-        self.e = self.error
-        self.i = self.info
-        self.t = self.step
-        self.w = self.warning
-
-    def _logger_level(self, level_name):
-        level = logging.getLevelName(level_name)
-        return lambda *args, **kwargs: self._logger.log(level, *args, **kwargs)
-
-    def step(self, msg, *args, **kwargs):
-        """Delegate a step call to the underlying logger."""
-        self._log_with(self._logger_level('STEP'), 1, msg, *args, **kwargs)
-
-    def device(self, msg, *args, **kwargs):
-        """Delegate a device call to the underlying logger."""
-        self._log_with(self._logger_level('DEVICE'), 1, msg, *args, **kwargs)
-
-    def suite(self, msg, *args, **kwargs):
-        """Delegate a device call to the underlying logger."""
-        self._log_with(self._logger_level('SUITE'), 1, msg, *args, **kwargs)
-
-    def case(self, msg, *args, **kwargs):
-        """Delegate a case call to the underlying logger."""
-        self._log_with(self._logger_level('CASE'), 1, msg, *args, **kwargs)
-
-    def flush_log(self):
-        """This function exists for compatibility with Tako's logserial module.
-
-        Note that flushing the log is handled automatically by python's logging
-        module.
-        """
diff --git a/src/antlion/controllers/buds_lib/test_actions/__init__.py b/src/antlion/controllers/buds_lib/test_actions/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py b/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py
deleted file mode 100644
index 3d03cbd..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/agsa_acts.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import re
-
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.buds_lib.test_actions.base_test_actions import BaseTestAction
-from antlion.controllers.buds_lib.test_actions.base_test_actions import timed_action
-
-PHONE_DFU_PATH = ('/storage/emulated/0/Android/data/com.google.android'
-                  '.googlequicksearchbox/files/download_cache/apollo.dfu')
-
-AGSA_BROADCAST = (
-    'am  broadcast -a \'action_ota\' --es dfu_url %s --es build_label 9.9.9 '
-    '--ez is_force %s com.google.android.googlequicksearchbox/com.google'
-    '.android.apps.gsa.broadcastreceiver.CommonBroadcastReceiver')
-
-
-class AgsaOTAError(Exception):
-    """OTA Error"""
-
-
-class AgsaTestActions(BaseTestAction):
-    """AGSA test action library."""
-
-    def __init__(self, android_dev, logger=None):
-        """
-        Simple init code to keep the android object for future reference.
-        Args:
-           android_dev: devcontrollers.android_device.AndroidDevice
-        """
-        super(AgsaTestActions, self).__init__(logger)
-        self.dut = android_dev
-
-    @timed_action
-    def _initiate_agsa_ota(self, file_path, destination=None, force=True):
-        """Pushes the dfu file to phone and issues broadcast to start AGSA OTA
-
-        Args:
-            file_path: (string) path of dfu file
-            destination: (string) destination path on the phone uses
-                         $PHONE_DFU_PATH if not specified
-            force: (bool) option to force the issued broadcast?
-        """
-        if not destination:
-            destination = PHONE_DFU_PATH
-        if self.dut.push_file_to_phone(file_path, destination):
-            if force:
-                force = 'true'
-            else:
-                force = 'false'
-
-            command = AGSA_BROADCAST % (destination, force)
-            output = self.dut.adb.shell(command.split())
-            if 'result=0' in output:
-                self.logger.info('Agsa broadcast successful!')
-                return True
-            else:
-                self.logger.error('Agsa broadcast failed')
-                return False
-
-    @timed_action
-    def _wait_for_ota_to_finish(self, timeout=660):
-        """Logcat is continuously read to keep track of the OTA transfer
-
-        Args:
-           timeout: (int) time to wait before timing out.
-
-        Returns:
-            True on success
-
-        Raises: AgsaOTAError if the timeout is reached.
-        """
-        # regex that confirms completion
-        transfer_completion_match = \
-            re.compile('OTA progress: 100 %|OTA img rcvd')
-        # time now + $timeout
-        expiry_time = datetime.datetime.now() + \
-                      datetime.timedelta(seconds=timeout)
-        self.logger.info('Waiting for OTA transfer to complete....')
-        while True:
-            # time now - 1 minute: to be used in getting logs from a minute back
-            now_plus_minute = datetime.datetime.now() - \
-                              datetime.timedelta(seconds=5)
-            try:
-                # grep logcat for 'DeviceLog'
-                filtered_log = self.dut.logcat_filter_message(
-                    now_plus_minute.strftime('%m-%d %H:%M:%S.000'),
-                    'Devicelog:')
-                if filtered_log and \
-                        transfer_completion_match.search(filtered_log):
-                    self.logger.info('Transfer completed!')
-                    break
-            except AdbError:
-                # gets thrown if no matching string is found
-                pass
-            if datetime.datetime.now() > expiry_time:
-                self.logger.error('Timed out waiting for OTA to complete.')
-                raise AgsaOTAError('Timed out waiting for OTA to complete.')
-        return True
-
-    @timed_action
-    def initiate_agsa_and_wait_until_transfer(self, file_path, destination=None,
-                                              force=True, timeout=660):
-        """Calls _initiate_agsa_ota and _wait_for_ota_to_finish
-
-        Returns:
-            True on success and False otherwise
-        """
-        self._initiate_agsa_ota(file_path, destination, force)
-        return self._wait_for_ota_to_finish(timeout)
-
-    @timed_action
-    def install_agsa(self, version, force=False):
-        """
-        Installs the specified version of AGSA if different from the one
-        currently installed, unless force is set to True.
-
-        Args:
-            version: (string) ex: '7.14.21.release'
-            force: (bool) installs only if currently installed version is
-                   different than the one to be installed. True installs
-                   by-passing version check
-        Return:
-            True on Success and False otherwise
-        """
-        # get currently installed version, and install agsa only if different
-        # from what is requested
-        current_version = self.dut.get_agsa_version()
-        if (not (version.replace('alpha', '').replace('release', '')
-                 in current_version)) or force:
-            self.logger.info('Current AGSA version is %s' % current_version)
-            self.logger.info('Installing AGSA version %s...' % version)
-            if self.and_actions.install_agsa(version):
-                self.logger.info('Install success!')
-                return True
-            else:
-                self.logger.error('Failed to install version %s' % version)
-                return False
diff --git a/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py b/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py
deleted file mode 100644
index ac2fa6d..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/apollo_acts.py
+++ /dev/null
@@ -1,616 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-A comprehensive interface for performing test actions on an Apollo device.
-"""
-
-import time
-
-from antlion.controllers.android_lib.tel.tel_utils import initiate_call
-from antlion.controllers.android_lib.tel.tel_utils import wait_for_droid_in_call
-from antlion.controllers.buds_lib.apollo_lib import DeviceError
-from antlion.controllers.buds_lib.test_actions.agsa_acts import AgsaOTAError
-from antlion.controllers.buds_lib.test_actions.base_test_actions import BaseTestAction
-from antlion.controllers.buds_lib.test_actions.base_test_actions import timed_action
-from antlion.controllers.buds_lib.test_actions.bt_utils import BTUtils
-from antlion.libs.utils.timer import TimeRecorder
-from antlion.utils import wait_until
-
-PACKAGE_NAME_AGSA = 'com.google.android.googlequicksearchbox'
-PACKAGE_NAME_GMS = 'com.google.android.gms'
-PACKAGE_NAME_NEARBY = 'com.google.android.gms.policy_nearby'
-PACKAGE_NAME_SETTINGS = 'com.android.settings'
-BISTO_MP_DETECT_HEADER = 'Pixel Buds'
-BISTO_MP_DEVICE_TEXT = 'Pixel Buds'
-BISTO_MP_DETECT_TEXT = BISTO_MP_DETECT_HEADER + BISTO_MP_DEVICE_TEXT
-BISTO_MP_CANCEL_TEXT = 'CANCEL'
-BISTO_MP_CONNECT_TEXT = 'TAP TO CONNECT'
-BISTO_MP_CONNECT_FAIL_TEXT = 'Can\'t connect to'
-BISTO_MP_CONNECT_RETRY_TEXT = 'TRY AGAIN'
-BISTO_MP_CONNECTED_TEXT = 'Now set up your Google Assistant'
-BISTO_MP_CONNECTED_EXIT_TEXT = 'NO THANKS'
-BISTO_MP_EXIT_PROMPT_TEXT = 'Exit setup?'
-BISTO_MP_EXIT_CONFIRM_TEXT = 'EXIT'
-PROFILES_CONNECTED = {
-    'HFP(pri.)': 'TRUE',
-    'A2DP(pri)': 'TRUE',
-}
-PROFILES_DISCONNECTED = {
-    'HFP(pri.)': 'FALSE',
-    'A2DP(pri)': 'FALSE',
-}
-COMP_PROFILE_CONNECTED = {'Comp': 'TRUE'}
-COMP_PROFILE_DISCONNECTED = {'Comp': 'FALSE'}
-AVRCPSTATUS = 'AvrcpPlayPause'
-DEFAULT_TIMEOUT = 60  # wait 60 seconds max for bond/connect.
-DEFAULT_CMD_INTERVAL = 0.5  # default interval between serial commands
-DEFAULT_CMD_RETRY = 5  # default retry times when a command failed.
-DEFAULT_BT_PROFILES = [
-    'HFP Pri', 'HFP Sec', 'A2DP Pri', 'A2DP Sec', 'CTRL', 'AUDIO', 'DEBUG',
-    'TRANS'
-]
-DEFAULT_BT_STATUS = ['A2DP(pri)', 'HFP(pri.)', 'Comp']
-
-
-class TestActsError(Exception):
-    """Exception from Apollo Acts Error."""
-
-
-class ApolloTestActions(BaseTestAction):
-    """Test action class for all Apollo test actions."""
-
-    def __init__(self, apollo_dev, logger=None):
-        """
-        Args:
-             apollo_dev: apollo.lib.apollo_lib.Device the Apollo device
-        """
-        super(ApolloTestActions, self).__init__(logger)
-        self.dut = apollo_dev
-        # need a embedded timer for connection time measurements.
-        self.measurement_timer = TimeRecorder()
-
-    def bluetooth_get_status(self):
-        status = self.dut.get_bt_status()
-        self.logger.info(status)
-
-    def wait_for_bluetooth_disconnection(self, timeout=60):
-        """ Set pairing mode and disconnect.
-
-        This action will wait until the apollo profiles are false.
-
-        Args:
-             timeout: integer, timeout value in seconds.
-        """
-        result = True
-        apollo_status = self.dut.get_bt_status()
-        self.logger.info('Waiting for the disconnection.')
-        time.sleep(1)
-        ini_time = time.time()
-        while len(apollo_status) != len(
-            [s for s in apollo_status.values() if s == 'FALSE']):
-            apollo_status = self.dut.get_bt_status()
-            if (time.time() - ini_time) > timeout:
-                self.logger.warning('Timeout waiting for the disconnection.')
-                return False
-            time.sleep(1)
-        return result
-
-    def pair(self, phone, companion_app=True):
-        """Pairs phone with apollo and validates bluetooth profiles.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-            companion_app (optional): True if the phone has a companion app
-                                      installed. False otherwise.
-
-        Raises:
-            TestActsError: Bluetooth pairing failed/ Dut BT status check failed.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        if bt_util.android_device_in_connected_state(phone, target_addr):
-            self.logger.info('Already paired and connected, skipping pairing.')
-        else:
-            if bt_util.android_device_in_paired_state(phone, target_addr):
-                self.logger.info(
-                    'Device is paired but not connected, unpair first.')
-                if not bt_util.bt_unpair(phone, self.dut):
-                    raise TestActsError('Unable to unpair the device')
-            bt_util.bt_pair_and_connect(phone, self.dut)
-            self.logger.info('DEVICE PAIRED')
-            if companion_app:
-                profiles = PROFILES_CONNECTED.copy()
-                profiles.update(COMP_PROFILE_CONNECTED)
-            else:
-                profiles = PROFILES_CONNECTED
-            self.logger.info(profiles)
-            if not bt_util.check_device_bt(device=self.dut, profiles=profiles):
-                raise TestActsError('Dut BT status check failed.')
-            else:
-                return True
-
-    def unpair(self, phone, companion_app=True, factory_reset_dut=True):
-        """Unpairs phone from apollo and validates bluetooth profiles.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-            companion_app (optional): True if the phone has a companion app
-                                      installed. False otherwise.
-
-        Raises:
-            TestActsError: Bluetooth unpairing/Dut BT status check failed.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        if not bt_util.android_device_in_paired_state(phone, target_addr):
-            self.logger.info('Device is already unpaired, skipping unpairing.')
-        else:
-            result = bt_util.bt_unpair(
-                phone, self.dut, factory_reset_dut=factory_reset_dut)
-            if not result:
-                raise TestActsError('Bluetooth unpairing failed.')
-            if companion_app:
-                profiles = PROFILES_DISCONNECTED.copy()
-                profiles.update(COMP_PROFILE_DISCONNECTED)
-            else:
-                profiles = PROFILES_DISCONNECTED
-            if not bt_util.check_device_bt(device=self.dut, profiles=profiles):
-                raise TestActsError('Dut BT status check failed.')
-            else:
-                return True
-
-    def is_paired(self, phone):
-        """Check if the given apollo is paired with the android device.
-
-        Args:
-            phone: android phone
-            apollo: apollo device
-
-        Returns:
-            Bool: True if apollo is paired with the phone.
-        """
-        bt_util = BTUtils()
-        target_addr = self.dut.bluetooth_address
-        return bt_util.android_device_in_paired_state(phone, target_addr)
-
-    def send_music_play_event_and_validate(self):
-        """Send the play event on Apollo and validate the response and DSP
-        Status.
-
-        Raises:
-            TestActsError: Error while playing the music.
-        """
-        play_detection_timeout = 1
-        if self.dut.is_streaming():
-            self.logger.info('Music already streaming. Skipping play event..')
-            return
-        self.logger.info('Playing video...')
-        is_played = self.dut.music_control_events(
-            AVRCPSTATUS, self.dut.apollo_log_regex.AVRCP_PLAY_REGEX)
-        if not is_played:
-            self.logger.error('AVRCP Played status not found')
-            raise TestActsError('AVRCP Played status not found.')
-        wait_until(
-            lambda: self.dut.is_streaming(),
-            play_detection_timeout,
-            sleep_s=0.25)
-        if not self.dut.is_streaming():
-            self.logger.error('Device is NOT in a deviceA2DPStreaming state')
-            raise TestActsError(
-                'Device is NOT in a deviceA2DPStreaming state.')
-
-    def send_music_pause_event_and_validate(self):
-        """Send the pause event on Apollo and validate the responses and DSP
-        Status.
-
-        Raises:
-            TestActsError: Error while pausing the music.
-        """
-        paused_detection_timeout = 10
-        if not self.dut.is_streaming():
-            self.logger.info('Music not streaming. Skipping pause event..')
-            return
-        self.logger.info("Pausing video...")
-        is_paused = self.dut.music_control_events(
-            AVRCPSTATUS, self.dut.apollo_log_regex.AVRCP_PAUSE_REGEX)
-        if not is_paused:
-            self.logger.error('AVRCP Paused statue not found')
-            raise TestActsError('AVRCP Paused status not found.')
-        wait_until(
-            lambda: not self.dut.is_streaming(),
-            paused_detection_timeout,
-            sleep_s=0.25)
-        if self.dut.is_streaming():
-            self.logger.error('Device is still in deviceA2DPStreaming state')
-            raise TestActsError(
-                'Device is still in deviceA2DPStreaming state.')
-
-    def vol_down_and_validate(self):
-        """Send volume down twice and validate by comparing two levels
-
-        Raises:
-            TestActsError: Error
-        """
-        self.logger.info('Decreasing volume')
-        before_vol = self.dut.volume('Down', 1)
-        time.sleep(2)
-        after_vol = self.dut.volume('Down', 1)
-        if not after_vol or not before_vol or after_vol >= before_vol:
-            self.logger.error(
-                'Unable to decrease the volume. Before: %s. After: %s' %
-                (before_vol, after_vol))
-            raise TestActsError('error decreasing volume')
-
-    def vol_up_and_validate(self):
-        """Send volume up twice and validate by comparing two levels
-
-        Raises:
-            TestActsError: Error
-        """
-        self.logger.info('Increasing volume')
-        before_vol = self.dut.volume('Up', 1)
-        time.sleep(2)
-        after_vol = self.dut.volume('Up', 1)
-        if not after_vol or not before_vol or after_vol <= before_vol:
-            self.logger.error(
-                'Unable to increase the volume. Before: %s. After: %s' %
-                (before_vol, after_vol))
-            raise TestActsError('error increasing volume')
-
-    def call_and_validate_ringing(self,
-                                  calling_phone,
-                                  number_to_call,
-                                  call_retries=10):
-        for i in range(call_retries):
-            initiate_call(self.logger, calling_phone, number_to_call)
-            is_calling = wait_for_droid_in_call(
-                self.logger, calling_phone, max_time=10)
-            if is_calling:
-                self.logger.info('Call initiated!')
-                break
-            else:
-                self.logger.warning('Call is not initiating.')
-                if i == call_retries:
-                    self.logger.error('Call initiation retries exhausted')
-                    raise TestActsError(
-                        '%s retries failed to initiate the call' %
-                        (call_retries))
-            self.logger.warning('Retrying call...')
-        # wait for offhook state and return
-        wait_until(
-            (lambda: calling_phone.droid.telecomGetCallState() == 'OFFHOOK'),
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        self.logger.info('Phone call initiated on %s' % calling_phone.serial)
-
-    def answer_phone_and_validate_call_received(self, receiving_phone):
-        # wait until the phone rings (assumes that a call is initiated prior to
-        # running the command)
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'RINGING',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        self.logger.info('Ring detected on %s - now answering the call...' %
-                         (receiving_phone.serial))
-        # answer the phone call
-        self.dut.tap()
-        # wait until OFFHOOK state
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'OFFHOOK',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-
-    def hangup_phone_and_validate_call_hung(self, receiving_phone):
-        # wait for phone to be in OFFHOOK state (assumed that a call is answered
-        # and engaged)
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'OFFHOOK',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-        # end the call (post and pre 1663 have different way of ending call)
-        self.logger.info(
-            'Hanging up the call on %s...' % receiving_phone.serial)
-        if self.dut.version < 1663:
-            self.dut.tap()
-        else:
-            self.dut.hold(duration=100)
-        # wait for idle state
-        wait_until(
-            lambda: receiving_phone.droid.telecomGetCallState() == 'IDLE',
-            timeout_s=40,
-            condition=True,
-            sleep_s=.5)
-
-    @timed_action
-    def factory_reset(self):
-        ret = False
-        try:
-            self.dut.factory_reset()
-            ret = True
-        except DeviceError as ex:
-            self.logger.warning('Failed to reset Apollo: %s' % ex)
-        return ret
-
-    @timed_action
-    def wait_for_magic_pairing_notification(self, android_act, timeout=60):
-        dut_detected = False
-        start_time = time.time()
-        self.logger.info('Waiting for MP prompt: %s' % BISTO_MP_DEVICE_TEXT)
-        while not dut_detected:
-            android_act.dut.ui_util.uia.wait.update()
-            self.sleep(1)
-            if android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_DETECT_HEADER, enabled=True).exists:
-                if android_act.dut.ui_util.uia(
-                        textContains=BISTO_MP_DEVICE_TEXT,
-                        enabled=True).exists:
-                    self.logger.info('DUT Apollo MP prompt detected!')
-                    dut_detected = True
-                else:
-                    self.logger.info(
-                        'NONE DUT Apollo MP prompt detected! Cancel and RETRY!'
-                    )
-                    android_act.dut.ui_util.click_by_text(BISTO_MP_CANCEL_TEXT)
-            if time.time() - start_time > timeout:
-                break
-        if not dut_detected:
-            self.logger.info(
-                'Failed to get %s MP prompt' % BISTO_MP_DEVICE_TEXT)
-        return dut_detected
-
-    @timed_action
-    def start_magic_pairing(self, android_act, timeout=30, retries=3):
-        paired = False
-        android_act.dut.ui_util.click_by_text(
-            BISTO_MP_CONNECT_TEXT, timeout=timeout)
-        connect_start_time = time.time()
-        count = 0
-        timeout = 30
-
-        while not paired and count < retries:
-            android_act.dut.ui_util.uia.wait.update()
-            self.sleep(1)
-            if time.time() - connect_start_time > timeout:
-                self.logger.info('Time out! %s seconds' % time)
-                android_act.app_force_close_agsa()
-                self.logger.info('Timeout(s): %s' % timeout)
-                break
-            if android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_CONNECT_FAIL_TEXT,
-                    enabled=True).exists:
-                count += 1
-                self.logger.info('MP FAILED! Retry %s.' % count)
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_CONNECT_RETRY_TEXT)
-                connect_start_time = time.time()
-            elif android_act.dut.ui_util.uia(
-                    textContains=BISTO_MP_CONNECTED_TEXT, enabled=True).exists:
-                self.logger.info('MP SUCCESSFUL! Exiting AGSA...')
-                paired = True
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_CONNECTED_EXIT_TEXT)
-                android_act.dut.ui_util.wait_for_text(
-                    BISTO_MP_EXIT_PROMPT_TEXT)
-                android_act.dut.ui_util.click_by_text(
-                    BISTO_MP_EXIT_CONFIRM_TEXT)
-        return paired
-
-    @timed_action
-    def turn_bluetooth_on(self):
-        self.dut.cmd('pow 1')
-        return True
-
-    @timed_action
-    def turn_bluetooth_off(self):
-        self.dut.cmd('pow 0')
-        return True
-
-    @timed_action
-    def wait_for_bluetooth_a2dp_hfp(self,
-                                    timeout=DEFAULT_TIMEOUT,
-                                    interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT connection by checking if A2DP and HFP connected.
-
-        This is used for BT pair+connect test.
-
-        Args:
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-            timer: TimeRecorder, time recorder to save the connection time.
-        """
-        # Need to check these two profiles
-        pass_profiles = ['A2DP Pri', 'HFP Pri']
-        # TODO(b/122730302): Change to just raise an error
-        ret = False
-        try:
-            ret = self._wait_for_bluetooth_profile_connection(
-                pass_profiles, timeout, interval, self.measurement_timer)
-        except DeviceError as ex:
-            self.logger.warning('Failed to wait for BT connection: %s' % ex)
-        return ret
-
-    def _wait_for_bluetooth_profile_connection(self, profiles_to_check,
-                                               timeout, interval, timer):
-        """A generic method to wait for specified BT profile connection.
-
-        Args:
-            profiles_to_check: list, profile names (A2DP, HFP, etc.) to be
-                               checked.
-            timeout: float, timeout value in second.
-            interval: float, interval between polling BT profiles.
-            timer: TimeRecorder, time recorder to save the connection time.
-
-        Returns:
-            bool, True if checked profiles are connected, False otherwise.
-        """
-        timer.start_timer(profiles_to_check, force=True)
-        start_time = time.time()
-        while time.time() - start_time < timeout:
-            profiles = self._bluetooth_check_profile_connection()
-            for profile in profiles:
-                if profiles[profile]:
-                    timer.stop_timer(profile)
-            # now check if the specified profile connected.
-            all_connected = True
-            for profile in profiles_to_check:
-                if not profiles[profile]:
-                    all_connected = False
-                    break
-            if all_connected:
-                return True
-            time.sleep(interval)
-        # make sure the profile timer are stopped.
-        timer.stop_timer(profiles_to_check)
-        return False
-
-    def _bluetooth_check_profile_connection(self):
-        """Return profile connection in a boolean dict.
-
-        key=<profile name>, val = T/F
-        """
-        profiles = dict()
-        output = self.dut.get_conn_devices()
-        # need to strip all whitespaces.
-        conn_devs = {}
-
-        for key in output:
-            conn_devs[key.strip()] = output[key].strip()
-        for key in conn_devs:
-            self.logger.info('%s:%s' % (key, conn_devs[key]))
-            if 'XXXXXXXX' in conn_devs[key]:
-                profiles[key] = conn_devs[key]
-            else:
-                profiles[key] = False
-        return profiles
-
-    @timed_action
-    def wait_for_bluetooth_status_connection_all(
-            self, timeout=DEFAULT_TIMEOUT, interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT connection by checking if A2DP, HFP and COMP connected.
-
-        This is used for BT reconnect test.
-
-        Args:
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-        """
-        ret = False
-        self.measurement_timer.start_timer(DEFAULT_BT_STATUS, force=True)
-        # All profile not connected by default.
-        connected_status = {key: False for key in DEFAULT_BT_STATUS}
-        start_time = time.time()
-        while time.time() < start_time + timeout:
-            try:
-                time.sleep(interval)
-                status = self.dut.get_bt_status()
-                for key in DEFAULT_BT_STATUS:
-                    if (not connected_status[key] and key in status
-                            and 'TRUE' == status[key]):
-                        self.measurement_timer.stop_timer(key)
-                        connected_status[key] = True
-                        self.logger.info(
-                            'BT status %s connected at %fs.' %
-                            (key, self.measurement_timer.elapsed(key)))
-                if False not in connected_status.values():
-                    ret = True
-                    break
-            except DeviceError as ex:
-                self.logger.warning(
-                    'Device exception when waiting for reconnection: %s' % ex)
-        self.measurement_timer.stop_timer(DEFAULT_BT_STATUS)
-        return ret
-
-    def initiate_ota_via_agsa_verify_transfer_completion_in_logcat(
-            self,
-            agsa_action,
-            dfu_path,
-            destination=None,
-            force=True,
-            apply_image=True,
-            reconnect=True):
-        """
-        Starts an OTA by issuing an intent to AGSA after copying the dfu file to
-        the appropriate location on the phone
-
-        Args:
-            agsa_action: projects.agsa.lib.test_actions.agsa_acts
-                         .AgsaTestActions
-            dfu_path: string - absolute path of dfu file
-            destination: string - absolute path of file on phone if not
-                         specified will use
-                         /storage/emulated/0/Android/data/com.google.android
-                         .googlequicksearchbox/files/download_cache/apollo.dfu
-            force: value set in the intent sent to AGSA
-            True if success False otherwise
-        """
-        try:
-            agsa_action.initiate_agsa_and_wait_until_transfer(
-                dfu_path, destination=destination, force=force)
-            if apply_image:
-                # set in case
-                self.dut.set_in_case(reconnect=reconnect)
-        except AgsaOTAError as ex:
-            self.logger.error('Failed to OTA via AGSA %s' % ex)
-            return False
-        except DeviceError as ex:
-            self.logger.error('Failed to bring up device %s' % ex)
-            return False
-        return True
-
-    @timed_action
-    def wait_for_bluetooth_a2dp_hfp_rfcomm_connect(
-            self, address, timeout=DEFAULT_TIMEOUT,
-            interval=DEFAULT_CMD_INTERVAL):
-        """Wait for BT reconnection by checking if A2DP, HFP and COMP connected
-        to the specified address.
-
-        This is used for BT connection switch test.
-
-        Args:
-            address: str, MAC of the address to connect.
-            timeout: float, timeout value in second.
-            interval: float, float, interval between polling BT profiles.
-
-        Returns:
-            True if the specified address is connected. False otherwise.
-        """
-        last_4_hex = address.replace(':', '')[-4:].lower()
-        profiles_to_check = ['HFP Pri', 'A2DP Pri', 'CTRL', 'AUDIO']
-        self.measurement_timer.start_timer(profiles_to_check, force=True)
-        end_time = time.time() + timeout
-        all_connected = True
-        while time.time() < end_time:
-            all_connected = True
-            profiles = self._bluetooth_check_profile_connection()
-            for profile in profiles_to_check:
-                if (profile in profiles and profiles[profile]
-                        and last_4_hex in profiles[profile].lower()):
-                    self.measurement_timer.stop_timer(profile)
-                else:
-                    all_connected = False
-            if all_connected:
-                break
-            time.sleep(interval)
-        # make sure the profile timer are stopped.
-        self.measurement_timer.stop_timer(profiles_to_check)
-
-        return all_connected
diff --git a/src/antlion/controllers/buds_lib/test_actions/audio_utils.py b/src/antlion/controllers/buds_lib/test_actions/audio_utils.py
deleted file mode 100644
index fdc4bfa..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/audio_utils.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A generic library for audio related test actions"""
-
-import datetime
-import time
-
-from antlion import utils
-from antlion.controllers.buds_lib import tako_trace_logger
-
-
-class AudioUtilsError(Exception):
-    """Generic AudioUtils Error."""
-
-
-class AudioUtils(object):
-    """A utility that manages generic audio interactions and actions on one or
-    more devices under test.
-
-    To be maintained such that it is compatible with any devices that pair with
-    phone.
-    """
-
-    def __init__(self):
-        self.logger = tako_trace_logger.TakoTraceLogger()
-
-    def play_audio_into_device(self, audio_file_path, audio_player, dut):
-        """Open mic on DUT, play audio into DUT, close mic on DUT.
-
-        Args:
-            audio_file_path: the path to the audio file to play, relative to the
-                           audio_player
-            audio_player: the device from which to play the audio file
-            dut: the device with the microphone
-
-        Returns:
-            bool: result of opening and closing DUT mic
-        """
-
-        if not dut.open_mic():
-            self.logger.error('DUT open_mic did not return True')
-            return False
-        audio_player.play(audio_file_path)
-        if not dut.close_mic():
-            self.logger.error('DUT close_mic did not return True.')
-            return False
-        return True
-
-    def get_agsa_interpretation_of_audio_file(self, audio_file_path,
-                                              target_interpretation,
-                                              audio_player, dut,
-                                              android_device):
-        """Gets AGSA interpretation from playing audio into DUT.
-
-        **IMPORTANT**: AGSA on android device must be connected to DUT and able
-        to receive info from DUT mic.
-
-        Args:
-          audio_file_path: the path to the audio file to play, relative to the
-                           audio_player
-          target_interpretation: what agsa interpretation should be
-          audio_player: the device from which to play the audio file
-          dut: the device with the microphone
-          android_device: android device to which dut is connected
-
-        Returns:
-          interpretation: agsa interpretation of audio file
-          score: similarity score between interpretation and target
-                 interpretation
-        """
-
-        play_start_time = datetime.datetime.now()
-        interpretation, score = '', 0.0
-        if self.play_audio_into_device(audio_file_path=audio_file_path,
-                                       audio_player=audio_player,
-                                       dut=dut):
-            time.sleep(1)
-            interpretation = android_device.agsa_interpretation(
-                cutoff_time=play_start_time,
-                target_interpretation=target_interpretation,
-                source='bisto')
-            score = utils.string_similarity(target_interpretation,
-                                            interpretation)
-
-        return interpretation, score
diff --git a/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py b/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py
deleted file mode 100644
index 518f9c6..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/base_test_actions.py
+++ /dev/null
@@ -1,194 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Base test action class, provide a base class for representing a collection of
-test actions.
-"""
-
-import datetime
-import inspect
-import time
-
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.libs.utils.timer import TimeRecorder
-
-# All methods start with "_" are considered hidden.
-DEFAULT_HIDDEN_ACTION_PREFIX = '_'
-
-
-def timed_action(method):
-    """A common decorator for test actions."""
-
-    def timed(self, *args, **kw):
-        """Log the enter/exit/time of the action method."""
-        func_name = self._convert_default_action_name(method.__name__)
-        if not func_name:
-            func_name = method.__name__
-        self.log_step('%s...' % func_name)
-        self.timer.start_timer(func_name, True)
-        result = method(self, *args, **kw)
-        # TODO: Method run time collected can be used for automatic KPI checks
-        self.timer.stop_timer(func_name)
-        return result
-
-    return timed
-
-
-class TestActionNotFoundError(Exception):
-    pass
-
-
-class BaseTestAction(object):
-    """Class for organizing a collection of test actions.
-
-    Test actions are just normal python methods, and should perform a specified
-    action. @timed_action decorator can log the entry/exit of the test action,
-    and the execution time.
-
-    The BaseTestAction class also provides a mapping between human friendly
-    names and test action methods in order to support configuration base
-    execution. By default, all methods not hidden (not start with "_") is
-    exported as human friendly name by replacing "_" with space.
-
-    Test action method can be called directly, or via
-    _perform_action(<human friendly name>, <args...>)
-    method.
-    """
-
-    @classmethod
-    def _fill_default_action_map(cls):
-        """Parse current class and get all test actions methods."""
-        # a <human readable name>:<method name> map.
-        cls._action_map = dict()
-        for name, _ in inspect.getmembers(cls, inspect.ismethod):
-            act_name = cls._convert_default_action_name(name)
-            if act_name:
-                cls._action_map[act_name] = name
-
-    @classmethod
-    def _convert_default_action_name(cls, func_name):
-        """Default conversion between method name -> human readable action name.
-        """
-        if not func_name.startswith(DEFAULT_HIDDEN_ACTION_PREFIX):
-            act_name = func_name.lower()
-            act_name = act_name.replace('_', ' ')
-            act_name = act_name.title()
-            return act_name.strip()
-        else:
-            return ''
-
-    @classmethod
-    def _add_action_alias(cls, default_act_name, alias):
-        """Add an alias to an existing test action."""
-        if default_act_name in cls._action_map:
-            cls._action_map[alias] = cls._action_map[default_act_name]
-            return True
-        else:
-            return False
-
-    @classmethod
-    def _get_action_names(cls):
-        if not hasattr(cls, '_action_map'):
-            cls._fill_default_action_map()
-        return cls._action_map.keys()
-
-    @classmethod
-    def get_current_time_logcat_format(cls):
-        return datetime.datetime.now().strftime('%m-%d %H:%M:%S.000')
-
-    @classmethod
-    def _action_exists(cls, action_name):
-        """Verify if an human friendly action name exists or not."""
-        if not hasattr(cls, '_action_map'):
-            cls._fill_default_action_map()
-        return action_name in cls._action_map
-
-    @classmethod
-    def _validate_actions(cls, action_list):
-        """Verify if an human friendly action name exists or not.
-
-        Args:
-          :param action_list: list of actions to be validated.
-
-        Returns:
-          tuple of (is valid, list of invalid/non-existent actions)
-        """
-        not_found = []
-        for action_name in action_list:
-            if not cls._action_exists(action_name):
-                not_found.append(action_name)
-        all_valid = False if not_found else True
-        return all_valid, not_found
-
-    def __init__(self, logger=None):
-        if logger is None:
-            self.logger = tako_trace_logger.TakoTraceLogger()
-            self.log_step = self.logger.step
-        else:
-            self.logger = logger
-            self.log_step = self.logger.info
-        self.timer = TimeRecorder()
-        self._fill_default_action_map()
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *args):
-        pass
-
-    def _perform_action(self, action_name, *args, **kwargs):
-        """Perform the specified human readable action."""
-        if action_name not in self._action_map:
-            raise TestActionNotFoundError('Action %s not found this class.'
-                                          % action_name)
-
-        method = self._action_map[action_name]
-        ret = getattr(self, method)(*args, **kwargs)
-        return ret
-
-    @timed_action
-    def print_actions(self):
-        """Example action methods.
-
-        All test action method must:
-            1. return a value. False means action failed, any other value means
-               pass.
-            2. should not start with "_". Methods start with "_" is hidden.
-        All test action method may:
-            1. have optional arguments. Mutable argument can be used to pass
-               value
-            2. raise exceptions. Test case class is expected to handle
-               exceptions
-        """
-        num_acts = len(self._action_map)
-
-        self.logger.info('I can do %d action%s:' %
-                      (num_acts, 's' if num_acts != 1 else ''))
-        for act in self._action_map.keys():
-            self.logger.info(' - %s' % act)
-        return True
-
-    @timed_action
-    def sleep(self, seconds):
-        self.logger.info('%s seconds' % seconds)
-        time.sleep(seconds)
-
-
-if __name__ == '__main__':
-    acts = BaseTestAction()
-    acts.print_actions()
-    acts._perform_action('print actions')
-    print(acts._get_action_names())
diff --git a/src/antlion/controllers/buds_lib/test_actions/bt_utils.py b/src/antlion/controllers/buds_lib/test_actions/bt_utils.py
deleted file mode 100644
index a650e5f..0000000
--- a/src/antlion/controllers/buds_lib/test_actions/bt_utils.py
+++ /dev/null
@@ -1,294 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# TODO: In the future to decide whether to move it to a common directory rather
-# than the one specific to apollo.
-# TODO: The move is contingent on understanding the functions that should be
-# supported by the dut device (sec_device).
-
-"""A generic library with bluetooth related functions. The connection is assumed
-to be between and android phone with any dut (referred to as secondary device)
-device that supports the following calls:
-        sec_device.turn_on_bluetooth()
-        sec_device.is_bt_enabled():
-        sec_device.bluetooth_address
-        sec_device.set_pairing_mode()
-        sec_device.factory_reset()
-
-"""
-import queue
-import time
-from logging import Logger
-
-from antlion import asserts
-from antlion.controllers.buds_lib import tako_trace_logger
-from antlion.utils import TimeoutError
-from antlion.utils import wait_until
-
-# Add connection profile for future devices in this dictionary
-WEARABLE_BT_PROTOCOLS = {
-    'rio': {
-        'Comp. App': 'FALSE',
-        'HFP (pri.)': 'FALSE',
-        'HFP (sec.)': 'FALSE',
-        'A2DP (pri.)': 'FALSE',
-        'A2DP (sec.)': 'FALSE',
-    },
-    'apollo': {
-        'Comp': 'FALSE',
-        'HFP(pri.)': 'FALSE',
-        'HFP(sec.)': 'FALSE',
-        'A2DP(pri)': 'FALSE',
-        'A2DP(sec)': 'FALSE',
-    }
-}
-
-
-class BTUtilsError(Exception):
-    """Generic BTUtils error"""
-
-
-class BTUtils(object):
-    """A utility that provides access to bluetooth controls.
-
-    This class to be maintained as a generic class such that it is compatible
-    with any devices that pair with a phone.
-    """
-
-    def __init__(self):
-        self.default_timeout = 60
-        self.logger = tako_trace_logger.TakoTraceLogger(Logger(__file__))
-
-    def bt_pair_and_connect(self, pri_device, sec_device):
-        """Pair and connect a pri_device to a sec_device.
-
-        Args:
-        pri_device: an android device with sl4a installed.
-        sec_device: a wearable device.
-
-        Returns:
-        (Tuple)True if pair and connect successful. False Otherwise.
-        Time in ms to execute the flow.
-        """
-
-        pair_time = self.bt_pair(pri_device, sec_device)
-        connect_result, connect_time = self.bt_connect(pri_device, sec_device)
-        return connect_result, pair_time + connect_time
-
-    def bt_pair(self, pri_device, sec_device):
-        """Pair a pri_device to a sec_device.
-
-        Args:
-        pri_device: an android device with sl4a installed.
-        sec_device: a wearable device.
-
-        Returns:
-            (Tuple)True if pair successful. False Otherwise.
-            Time in ms to execute the flow.
-         """
-        start_time = time.time()
-        # Enable BT on the primary device if it's not currently ON.
-        if not pri_device.droid.bluetoothCheckState():
-            pri_device.droid.bluetoothToggleState(True)
-            try:
-                pri_device.ed.pop_event(event_name='BluetoothStateChangedOn',
-                                        timeout=10)
-            except queue.Empty:
-                raise BTUtilsError(
-                    'Failed to toggle Bluetooth on the primary device.')
-        sec_device.turn_on_bluetooth()
-        if not sec_device.is_bt_enabled():
-            raise BTUtilsError('Could not turn on Bluetooth on secondary '
-                               'devices')
-        target_addr = sec_device.bluetooth_address
-        sec_device.set_pairing_mode()
-
-        pri_device.droid.bluetoothDiscoverAndBond(target_addr)
-        # Loop until we have bonded successfully or timeout.
-        self.logger.info('Verifying devices are bonded')
-        try:
-            wait_until(lambda: self.android_device_in_paired_state(pri_device,
-                                                                   target_addr),
-                       self.default_timeout)
-        except TimeoutError as err:
-            raise BTUtilsError('bt_pair failed: {}'.format(err))
-        end_time = time.time()
-        return end_time - start_time
-
-    def bt_connect(self, pri_device, sec_device):
-        """Connect a previously paired sec_device to a pri_device.
-
-        Args:
-          pri_device: an android device with sl4a installed.
-          sec_device: a wearable device.
-
-        Returns:
-          (Tuple)True if connect successful. False otherwise.
-          Time in ms to execute the flow.
-        """
-        start_time = end_time = time.time()
-        target_addr = sec_device.bluetooth_address
-        # First check that devices are bonded.
-        paired = False
-        for paired_device in pri_device.droid.bluetoothGetBondedDevices():
-            if paired_device['address'] == target_addr:
-                paired = True
-                break
-        if not paired:
-            self.logger.error('Not paired to %s', sec_device.device_name)
-            return False, 0
-
-        self.logger.info('Attempting to connect.')
-        pri_device.droid.bluetoothConnectBonded(target_addr)
-
-        self.logger.info('Verifying devices are connected')
-        wait_until(
-            lambda: self.android_device_in_connected_state(pri_device,
-                                                           target_addr),
-            self.default_timeout)
-        end_time = time.time()
-        return True, end_time - start_time
-
-    def android_device_in_paired_state(self, device, mac_address):
-        """Check device in paired list."""
-        bonded_devices = device.droid.bluetoothGetBondedDevices()
-        for d in bonded_devices:
-            if d['address'] == mac_address:
-                self.logger.info('Successfully bonded to device')
-                return True
-        return False
-
-    def android_device_in_connected_state(self, device, mac_address):
-        """Check device in connected list."""
-        connected_devices = device.droid.bluetoothGetConnectedDevices()
-        for d in connected_devices:
-            if d['address'] == mac_address:
-                self.logger.info('Successfully connected to device')
-                return True
-        return False
-
-    def bt_unpair(self, pri_device, sec_device, factory_reset_dut=True):
-        """Unpairs two Android devices using bluetooth.
-
-        Args:
-          pri_device: an android device with sl4a installed.
-          sec_device: a wearable device.
-
-        Returns:
-          (Tuple)True: if the devices successfully unpaired.
-          Time in ms to execute the flow.
-        Raises:
-          Error: When devices fail to unpair.
-        """
-        target_address = sec_device.bluetooth_address
-        if not self.android_device_in_paired_state(pri_device, target_address):
-            self.logger.debug('Already unpaired.')
-            return True, 0
-        self.logger.debug('Unpairing from %s' % target_address)
-        start_time = end_time = time.time()
-        asserts.assert_true(
-            pri_device.droid.bluetoothUnbond(target_address),
-            'Failed to request device unpairing.')
-
-        # Check that devices have unpaired successfully.
-        self.logger.debug('Verifying devices are unpaired')
-
-        # Loop until we have unbonded successfully or timeout.
-        wait_until(
-            lambda: self.android_device_in_paired_state(pri_device,
-                                                        target_address),
-            self.default_timeout,
-            condition=False)
-
-        self.logger.info('Successfully unpaired from %s' % target_address)
-        if factory_reset_dut:
-            self.logger.info('Factory reset DUT')
-            sec_device.factory_reset()
-        end_time = time.time()
-        return True, end_time - start_time
-
-    def check_device_bt(self, device, **kwargs):
-        """Check the Bluetooth connection status from device.
-
-        Args:
-          device: a wearable device.
-          **kwargs: additional parameters
-
-        Returns:
-          True: if bt status check success, False otherwise.
-        """
-        if device.dut_type in ['rio', 'apollo']:
-            profiles = kwargs.get('profiles')
-            return self.check_dut_status(device, profiles)
-
-    def check_dut_status(self, device, profiles=None):
-        """Check the Bluetooth connection status from rio/apollo device.
-
-        Args:
-          device: rio/apollo device
-          profiles: A dict of profiles, eg. {'HFP (pri.)': 'TRUE', 'Comp. App':
-            'TRUE', 'A2DP (pri.)': 'TRUE'}
-
-        Returns:
-          True: if bt status check success, False otherwise.
-        """
-        expected = WEARABLE_BT_PROTOCOLS
-        self.logger.info(profiles)
-        for key in profiles:
-            expected[device.dut_type][key] = profiles[key]
-        try:
-            wait_until(lambda: self._compare_profile(device,
-                                                     expected[device.dut_type]),
-                       self.default_timeout)
-        except TimeoutError:
-            status = device.get_bt_status()
-            msg_fmt = self._get_formatted_output(expected[device.dut_type],
-                                                 status)
-            self.logger.error(msg_fmt)
-            return False
-        return True
-
-    def _get_formatted_output(self, expected, actual):
-        """On BT status mismatch generate formatted output string.
-
-        Args:
-          expected: Expected BT status hash.
-          actual: Actual BT status hash from Rio.
-
-        Returns:
-          Formatted mismatch string.
-
-        Raises:
-          Error: When unexpcted parameter encounterd.
-        """
-        msg = ''
-        mismatch_format = '{}: Expected {} Actual {}. '
-        if actual is None:
-            raise BTUtilsError('None is not expected.')
-        for key in expected.keys():
-            if expected[key] != actual[key]:
-                msg += mismatch_format.format(key, expected[key], actual[key])
-        return msg
-
-    def _compare_profile(self, device, expected):
-        """Compare input expected profile with actual."""
-        actual = device.get_bt_status()
-        if actual is None:
-            raise BTUtilsError('None is not expected.')
-        for key in expected.keys():
-            if expected[key] != actual[key]:
-                return False
-        return True
diff --git a/src/antlion/controllers/cellular_lib/AndroidCellularDut.py b/src/antlion/controllers/cellular_lib/AndroidCellularDut.py
deleted file mode 100644
index b9fa887..0000000
--- a/src/antlion/controllers/cellular_lib/AndroidCellularDut.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.android_lib.tel import tel_utils
-from antlion.controllers.cellular_lib import BaseCellularDut
-import os
-
-GET_BUILD_VERSION = 'getprop ro.build.version.release'
-PIXELLOGGER_CONTROL = 'am broadcast -n com.android.pixellogger/.receiver.' \
-                      'AlwaysOnLoggingReceiver -a com.android.pixellogger.' \
-                      'service.logging.LoggingService.' \
-                      'ACTION_CONFIGURE_ALWAYS_ON_LOGGING ' \
-                      '-e intent_key_enable "{}"'
-
-NETWORK_TYPE_TO_BITMASK = {
-    BaseCellularDut.PreferredNetworkType.LTE_ONLY: '01000001000000000000',
-    BaseCellularDut.PreferredNetworkType.NR_LTE: '11000001000000000000',
-    BaseCellularDut.PreferredNetworkType.WCDMA_ONLY: '00000100001110000100',
-}
-
-class AndroidCellularDut(BaseCellularDut.BaseCellularDut):
-    """ Android implementation of the cellular DUT class."""
-    def __init__(self, ad, logger):
-        """ Keeps a handler to the android device.
-
-        Args:
-           ad: Android device handler
-           logger: a handler to the logger object
-        """
-        self.ad = ad
-        self.log = logger
-        logger.info('Initializing Android DUT with baseband version {}'.format(
-            ad.adb.getprop('gsm.version.baseband')))
-
-    def toggle_airplane_mode(self, new_state=True):
-        """ Turns airplane mode on / off.
-
-        Args:
-          new_state: True if airplane mode needs to be enabled.
-        """
-        tel_utils.toggle_airplane_mode(self.log, self.ad, new_state)
-
-    def toggle_data_roaming(self, new_state=True):
-        """ Enables or disables cellular data roaming.
-
-        Args:
-          new_state: True if data roaming needs to be enabled.
-        """
-        tel_utils.toggle_cell_data_roaming(self.ad, new_state)
-
-    def get_rx_tx_power_levels(self):
-        """ Obtains Rx and Tx power levels measured from the DUT.
-
-        Returns:
-            A tuple where the first element is an array with the RSRP value
-            in each Rx chain, and the second element is the Tx power in dBm.
-            Values for invalid or disabled Rx / Tx chains are set to None.
-        """
-        return tel_utils.get_rx_tx_power_levels(self.log, self.ad)
-
-    def set_apn(self, name, apn, type='default'):
-        """ Sets the Access Point Name.
-
-        Args:
-          name: the APN name
-          apn: the APN
-          type: the APN type
-        """
-        self.ad.droid.telephonySetAPN(name, apn, type)
-
-    def set_preferred_network_type(self, type):
-        """ Sets the preferred RAT.
-
-        Args:
-          type: an instance of class PreferredNetworkType
-        """
-
-        # If android version is S or later, uses bit mask to set and return.
-        version = self.ad.adb.shell(GET_BUILD_VERSION)
-        try:
-            version_in_number = int(version)
-            if version_in_number > 11:
-                set_network_cmd = 'cmd phone set-allowed-network-types-for-users '
-                set_network_cmd += NETWORK_TYPE_TO_BITMASK[type]
-                self.ad.adb.shell(set_network_cmd)
-                get_network_cmd = 'cmd phone get-allowed-network-types-for-users'
-                allowed_network = self.ad.adb.shell(get_network_cmd)
-                self.log.info('The allowed network: {}'.format(allowed_network))
-                return
-        except ValueError:
-            self.log.info('The android version is older than S, use sl4a')
-
-        if type == BaseCellularDut.PreferredNetworkType.LTE_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_LTE_ONLY
-        elif type == BaseCellularDut.PreferredNetworkType.WCDMA_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_WCDMA_ONLY
-        elif type == BaseCellularDut.PreferredNetworkType.GSM_ONLY:
-            formatted_type = tel_utils.NETWORK_MODE_GSM_ONLY
-        else:
-            raise ValueError('Invalid RAT type.')
-
-        if not self.ad.droid.telephonySetPreferredNetworkTypesForSubscription(
-                formatted_type, self.ad.droid.subscriptionGetDefaultSubId()):
-            self.log.error("Could not set preferred network type.")
-        else:
-            self.log.info("Preferred network type set.")
-
-    def get_telephony_signal_strength(self):
-        """ Wrapper for the method with the same name in tel_utils.
-
-        Will be deprecated and replaced by get_rx_tx_power_levels. """
-        tel_utils.get_telephony_signal_strength(self.ad)
-
-    def start_modem_logging(self):
-        """ Starts on-device log collection. """
-        self.ad.adb.shell('rm /data/vendor/slog/*.* -f')
-        self.ad.adb.shell(PIXELLOGGER_CONTROL.format('true'))
-
-    def stop_modem_logging(self):
-        """ Stops log collection and pulls logs. """
-        output_path = self.ad.device_log_path + '/modem/'
-        os.makedirs(output_path, exist_ok=True)
-        self.ad.adb.shell(PIXELLOGGER_CONTROL.format('false'))
diff --git a/src/antlion/controllers/cellular_lib/BaseCellConfig.py b/src/antlion/controllers/cellular_lib/BaseCellConfig.py
deleted file mode 100644
index 71939fd..0000000
--- a/src/antlion/controllers/cellular_lib/BaseCellConfig.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class BaseCellConfig:
-    """ Base cell configuration class.
-
-    Attributes:
-      output_power: a float indicating the required signal level at the
-          instrument's output.
-      input_power: a float indicating the required signal level at the
-          instrument's input.
-    """
-    # Configuration dictionary keys
-    PARAM_UL_PW = 'pul'
-    PARAM_DL_PW = 'pdl'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-            parameters to None.
-        Args:
-            log: logger object.
-        """
-        self.log = log
-        self.output_power = None
-        self.input_power = None
-        self.band = None
-
-    def incorporate(self, new_config):
-        """ Incorporates a different configuration by replacing the current
-            values with the new ones for all the parameters different to None.
-        Args:
-            new_config: 5G cell configuration object.
-        """
-        for attr, value in vars(new_config).items():
-            if value and not hasattr(self, attr):
-                setattr(self, attr, value)
diff --git a/src/antlion/controllers/cellular_lib/BaseCellularDut.py b/src/antlion/controllers/cellular_lib/BaseCellularDut.py
deleted file mode 100644
index 2e677a6..0000000
--- a/src/antlion/controllers/cellular_lib/BaseCellularDut.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-
-
-class PreferredNetworkType(Enum):
-    """ Available preferred network types that can be passed to
-  set_preferred_network_type"""
-    LTE_ONLY = 'lte-only'
-    GSM_ONLY = 'gsm-only'
-    WCDMA_ONLY = 'wcdma-only'
-    NR_LTE = 'nr-lte'
-
-
-class BaseCellularDut():
-    """ Base class for DUTs used with cellular simulators. """
-    def toggle_airplane_mode(self, new_state=True):
-        """ Turns airplane mode on / off.
-
-        Args:
-          new_state: True if airplane mode needs to be enabled.
-        """
-        raise NotImplementedError()
-
-    def toggle_data_roaming(self, new_state=True):
-        """ Enables or disables cellular data roaming.
-
-        Args:
-          new_state: True if data roaming needs to be enabled.
-        """
-        raise NotImplementedError()
-
-    def get_rx_tx_power_levels(self):
-        """ Obtains Rx and Tx power levels measured from the DUT.
-
-        Returns:
-          A tuple where the first element is an array with the RSRP value
-          in each Rx chain, and the second element is the Tx power in dBm.
-          Values for invalid or disabled Rx / Tx chains are set to None.
-        """
-        raise NotImplementedError()
-
-    def set_apn(self, name, apn, type='default'):
-        """ Sets the Access Point Name.
-
-        Args:
-          name: the APN name
-          apn: the APN
-          type: the APN type
-        """
-        raise NotImplementedError()
-
-    def set_preferred_network_type(self, type):
-        """ Sets the preferred RAT.
-
-        Args:
-          type: an instance of class PreferredNetworkType
-        """
-        raise NotImplementedError()
-
-    def get_telephony_signal_strength(self):
-        """ Wrapper for the method with the same name in tel_utils.
-
-        Will be deprecated and replaced by get_rx_tx_power_levels. """
-        raise NotImplementedError()
-
-    def start_modem_logging(self):
-        """ Starts on-device log collection. """
-        raise NotImplementedError()
-
-    def stop_modem_logging(self):
-        """ Stops log collection and pulls logs. """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/cellular_lib/BaseSimulation.py b/src/antlion/controllers/cellular_lib/BaseSimulation.py
deleted file mode 100644
index 043f802..0000000
--- a/src/antlion/controllers/cellular_lib/BaseSimulation.py
+++ /dev/null
@@ -1,741 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from enum import Enum
-
-import numpy as np
-from antlion.controllers import cellular_simulator
-from antlion.controllers.cellular_lib.BaseCellConfig import BaseCellConfig
-
-
-class BaseSimulation(object):
-    """ Base class for cellular connectivity simulations.
-
-    Classes that inherit from this base class implement different simulation
-    setups. The base class contains methods that are common to all simulation
-    configurations.
-
-    """
-
-    NUM_UL_CAL_READS = 3
-    NUM_DL_CAL_READS = 5
-    MAX_BTS_INPUT_POWER = 30
-    MAX_PHONE_OUTPUT_POWER = 23
-    UL_MIN_POWER = -60.0
-
-    # Keys to obtain settings from the test_config dictionary.
-    KEY_CALIBRATION = "calibration"
-    KEY_ATTACH_RETRIES = "attach_retries"
-    KEY_ATTACH_TIMEOUT = "attach_timeout"
-
-    # Filepath to the config files stored in the Anritsu callbox. Needs to be
-    # formatted to replace {} with either A or B depending on the model.
-    CALLBOX_PATH_FORMAT_STR = 'C:\\Users\\MD8475{}\\Documents\\DAN_configs\\'
-
-    # Time in seconds to wait for the phone to settle
-    # after attaching to the base station.
-    SETTLING_TIME = 10
-
-    # Default time in seconds to wait for the phone to attach to the basestation
-    # after toggling airplane mode. This setting can be changed with the
-    # KEY_ATTACH_TIMEOUT keyword in the test configuration file.
-    DEFAULT_ATTACH_TIMEOUT = 120
-
-    # The default number of attach retries. This setting can be changed with
-    # the KEY_ATTACH_RETRIES keyword in the test configuration file.
-    DEFAULT_ATTACH_RETRIES = 3
-
-    # These two dictionaries allow to map from a string to a signal level and
-    # have to be overridden by the simulations inheriting from this class.
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {}
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {}
-
-    # Units for downlink signal level. This variable has to be overridden by
-    # the simulations inheriting from this class.
-    DOWNLINK_SIGNAL_LEVEL_UNITS = None
-
-    def __init__(
-        self, simulator, log, dut, test_config, calibration_table,
-        nr_mode=None):
-        """ Initializes the Simulation object.
-
-        Keeps a reference to the callbox, log and dut handlers and
-        initializes the class attributes.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-        """
-
-        self.simulator = simulator
-        self.log = log
-        self.dut = dut
-        self.calibration_table = calibration_table
-        self.nr_mode = nr_mode
-
-        # Turn calibration on or off depending on the test config value. If the
-        # key is not present, set to False by default
-        if self.KEY_CALIBRATION not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to off by default. To '
-                             'turn calibration on, include the key with '
-                             'a true/false value.'.format(
-                                 self.KEY_CALIBRATION))
-
-        self.calibration_required = test_config.get(self.KEY_CALIBRATION,
-                                                    False)
-
-        # Obtain the allowed number of retries from the test configs
-        if self.KEY_ATTACH_RETRIES not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to {} by default.'.format(
-                                 self.KEY_ATTACH_RETRIES,
-                                 self.DEFAULT_ATTACH_RETRIES))
-
-        self.attach_retries = test_config.get(self.KEY_ATTACH_RETRIES,
-                                              self.DEFAULT_ATTACH_RETRIES)
-
-        # Obtain the attach timeout from the test configs
-        if self.KEY_ATTACH_TIMEOUT not in test_config:
-            self.log.warning('The {} key is not set in the testbed '
-                             'parameters. Setting to {} by default.'.format(
-                                 self.KEY_ATTACH_TIMEOUT,
-                                 self.DEFAULT_ATTACH_TIMEOUT))
-
-        self.attach_timeout = test_config.get(self.KEY_ATTACH_TIMEOUT,
-                                              self.DEFAULT_ATTACH_TIMEOUT)
-
-        # Create an empty list for cell configs.
-        self.cell_configs = []
-
-        # Store the current calibrated band
-        self.current_calibrated_band = None
-
-        # Path loss measured during calibration
-        self.dl_path_loss = None
-        self.ul_path_loss = None
-
-        # Target signal levels obtained during configuration
-        self.sim_dl_power = None
-        self.sim_ul_power = None
-
-        # Stores RRC status change timer
-        self.rrc_sc_timer = None
-
-        # Set to default APN
-        log.info("Configuring APN.")
-        self.dut.set_apn('test', 'test')
-
-        # Enable roaming on the phone
-        self.dut.toggle_data_roaming(True)
-
-        # Make sure airplane mode is on so the phone won't attach right away
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for airplane mode setting to propagate
-        time.sleep(2)
-
-        # Prepare the simulator for this simulation setup
-        self.setup_simulator()
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-        raise NotImplementedError()
-
-    def attach(self):
-        """ Attach the phone to the basestation.
-
-        Sets a good signal level, toggles airplane mode
-        and waits for the phone to attach.
-
-        Returns:
-            True if the phone was able to attach, False if not.
-        """
-
-        # Turn on airplane mode
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for airplane mode setting to propagate
-        time.sleep(2)
-
-        # Provide a good signal power for the phone to attach easily
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = -10
-        new_config.output_power = -30
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-        # Try to attach the phone.
-        for i in range(self.attach_retries):
-
-            try:
-
-                # Turn off airplane mode
-                self.dut.toggle_airplane_mode(False)
-
-                # Wait for the phone to attach.
-                self.simulator.wait_until_attached(timeout=self.attach_timeout)
-
-            except cellular_simulator.CellularSimulatorError:
-
-                # The phone failed to attach
-                self.log.info(
-                    "UE failed to attach on attempt number {}.".format(i + 1))
-
-                # Turn airplane mode on to prepare the phone for a retry.
-                self.dut.toggle_airplane_mode(True)
-
-                # Wait for APM to propagate
-                time.sleep(3)
-
-                # Retry
-                if i < self.attach_retries - 1:
-                    # Retry
-                    continue
-                else:
-                    # No more retries left. Return False.
-                    return False
-
-            else:
-                # The phone attached successfully.
-                time.sleep(self.SETTLING_TIME)
-                self.log.info("UE attached to the callbox.")
-                break
-
-        return True
-
-    def detach(self):
-        """ Detach the phone from the basestation.
-
-        Turns airplane mode and resets basestation.
-        """
-
-        # Set the DUT to airplane mode so it doesn't see the
-        # cellular network going off
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for APM to propagate
-        time.sleep(2)
-
-        # Power off basestation
-        self.simulator.detach()
-
-    def stop(self):
-        """  Detach phone from the basestation by stopping the simulation.
-
-        Stop the simulation and turn airplane mode on. """
-
-        # Set the DUT to airplane mode so it doesn't see the
-        # cellular network going off
-        self.dut.toggle_airplane_mode(True)
-
-        # Wait for APM to propagate
-        time.sleep(2)
-
-        # Stop the simulation
-        self.simulator.stop()
-
-    def start(self):
-        """ Start the simulation by attaching the phone and setting the
-        required DL and UL power.
-
-        Note that this refers to starting the simulated testing environment
-        and not to starting the signaling on the cellular instruments,
-        which might have been done earlier depending on the cellular
-        instrument controller implementation. """
-
-        if not self.attach():
-            raise RuntimeError('Could not attach to base station.')
-
-        # Starts IP traffic while changing this setting to force the UE to be
-        # in Communication state, as UL power cannot be set in Idle state
-        self.start_traffic_for_calibration()
-
-        # Wait until it goes to communication state
-        self.simulator.wait_until_communication_state()
-
-        # Set uplink power to a low value before going to the actual desired
-        # value. This avoid inconsistencies produced by the hysteresis in the
-        # PA switching points.
-        self.log.info('Setting UL power to -5 dBm before going to the '
-                      'requested value to avoid incosistencies caused by '
-                      'hysteresis.')
-        self.set_uplink_tx_power(-5)
-
-        # Set signal levels obtained from the test parameters
-        self.set_downlink_rx_power(self.sim_dl_power)
-        self.set_uplink_tx_power(self.sim_ul_power)
-
-        # Verify signal level
-        try:
-            rx_power, tx_power = self.dut.get_rx_tx_power_levels()
-
-            if not tx_power or not rx_power[0]:
-                raise RuntimeError('The method return invalid Tx/Rx values.')
-
-            self.log.info('Signal level reported by the DUT in dBm: Tx = {}, '
-                          'Rx = {}.'.format(tx_power, rx_power))
-
-            if abs(self.sim_ul_power - tx_power) > 1:
-                self.log.warning('Tx power at the UE is off by more than 1 dB')
-
-        except RuntimeError as e:
-            self.log.error('Could not verify Rx / Tx levels: %s.' % e)
-
-        # Stop IP traffic after setting the UL power level
-        self.stop_traffic_for_calibration()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Children classes need to call this method first.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Setup uplink power
-        ul_power = self.get_uplink_power_from_parameters(parameters)
-
-        # Power is not set on the callbox until after the simulation is
-        # started. Saving this value in a variable for later
-        self.sim_ul_power = ul_power
-
-        # Setup downlink power
-
-        dl_power = self.get_downlink_power_from_parameters(parameters)
-
-        # Power is not set on the callbox until after the simulation is
-        # started. Saving this value in a variable for later
-        self.sim_dl_power = dl_power
-
-    def set_uplink_tx_power(self, signal_level):
-        """ Configure the uplink tx power level
-
-        Args:
-            signal_level: calibrated tx power in dBm
-        """
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = self.calibrated_uplink_tx_power(
-            self.cell_configs[0], signal_level)
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-    def set_downlink_rx_power(self, signal_level):
-        """ Configure the downlink rx power level
-
-        Args:
-            signal_level: calibrated rx power in dBm
-        """
-        new_config = BaseCellConfig(self.log)
-        new_config.output_power = self.calibrated_downlink_rx_power(
-            self.cell_configs[0], signal_level)
-        self.simulator.configure_bts(new_config)
-        self.cell_configs[0].incorporate(new_config)
-
-    def get_uplink_power_from_parameters(self, parameters):
-        """ Reads uplink power from the parameter dictionary. """
-
-        if BaseCellConfig.PARAM_UL_PW in parameters:
-            value = parameters[BaseCellConfig.PARAM_UL_PW]
-            if value in self.UPLINK_SIGNAL_LEVEL_DICTIONARY:
-                return self.UPLINK_SIGNAL_LEVEL_DICTIONARY[value]
-            else:
-                try:
-                    if isinstance(value[0], str) and value[0] == 'n':
-                        # Treat the 'n' character as a negative sign
-                        return -int(value[1:])
-                    else:
-                        return int(value)
-                except ValueError:
-                    pass
-
-        # If the method got to this point it is because PARAM_UL_PW was not
-        # included in the test parameters or the provided value was invalid.
-        raise ValueError(
-            "The config dictionary must include a key {} with the desired "
-            "uplink power expressed by an integer number in dBm or with one of "
-            "the following values: {}. To indicate negative "
-            "values, use the letter n instead of - sign.".format(
-                BaseCellConfig.PARAM_UL_PW,
-                list(self.UPLINK_SIGNAL_LEVEL_DICTIONARY.keys())))
-
-    def get_downlink_power_from_parameters(self, parameters):
-        """ Reads downlink power from a the parameter dictionary. """
-
-        if BaseCellConfig.PARAM_DL_PW in parameters:
-            value = parameters[BaseCellConfig.PARAM_DL_PW]
-            if value not in self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY:
-                raise ValueError(
-                    "Invalid signal level value {}.".format(value))
-            else:
-                return self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY[value]
-        else:
-            # Use default value
-            power = self.DOWNLINK_SIGNAL_LEVEL_DICTIONARY['excellent']
-            self.log.info("No DL signal level value was indicated in the test "
-                          "parameters. Using default value of {} {}.".format(
-                              power, self.DOWNLINK_SIGNAL_LEVEL_UNITS))
-            return power
-
-    def calibrated_downlink_rx_power(self, bts_config, signal_level):
-        """ Calculates the power level at the instrument's output in order to
-        obtain the required rx power level at the DUT's input.
-
-        If calibration values are not available, returns the uncalibrated signal
-        level.
-
-        Args:
-            bts_config: the current configuration at the base station. derived
-                classes implementations can use this object to indicate power as
-                spectral power density or in other units.
-            signal_level: desired downlink received power, can be either a
-                key value pair, an int or a float
-        """
-
-        # Obtain power value if the provided signal_level is a key value pair
-        if isinstance(signal_level, Enum):
-            power = signal_level.value
-        else:
-            power = signal_level
-
-        # Try to use measured path loss value. If this was not set, it will
-        # throw an TypeError exception
-        try:
-            calibrated_power = round(power + self.dl_path_loss)
-            if calibrated_power > self.simulator.MAX_DL_POWER:
-                self.log.warning(
-                    "Cannot achieve phone DL Rx power of {} dBm. Requested TX "
-                    "power of {} dBm exceeds callbox limit!".format(
-                        power, calibrated_power))
-                calibrated_power = self.simulator.MAX_DL_POWER
-                self.log.warning(
-                    "Setting callbox Tx power to max possible ({} dBm)".format(
-                        calibrated_power))
-
-            self.log.info(
-                "Requested phone DL Rx power of {} dBm, setting callbox Tx "
-                "power at {} dBm".format(power, calibrated_power))
-            time.sleep(2)
-            # Power has to be a natural number so calibration wont be exact.
-            # Inform the actual received power after rounding.
-            self.log.info(
-                "Phone downlink received power is {0:.2f} dBm".format(
-                    calibrated_power - self.dl_path_loss))
-            return calibrated_power
-        except TypeError:
-            self.log.info("Phone downlink received power set to {} (link is "
-                          "uncalibrated).".format(round(power)))
-            return round(power)
-
-    def calibrated_uplink_tx_power(self, bts_config, signal_level):
-        """ Calculates the power level at the instrument's input in order to
-        obtain the required tx power level at the DUT's output.
-
-        If calibration values are not available, returns the uncalibrated signal
-        level.
-
-        Args:
-            bts_config: the current configuration at the base station. derived
-                classes implementations can use this object to indicate power as
-                spectral power density or in other units.
-            signal_level: desired uplink transmitted power, can be either a
-                key value pair, an int or a float
-        """
-
-        # Obtain power value if the provided signal_level is a key value pair
-        if isinstance(signal_level, Enum):
-            power = signal_level.value
-        else:
-            power = signal_level
-
-        # Try to use measured path loss value. If this was not set, it will
-        # throw an TypeError exception
-        try:
-            calibrated_power = round(power - self.ul_path_loss)
-            if calibrated_power < self.UL_MIN_POWER:
-                self.log.warning(
-                    "Cannot achieve phone UL Tx power of {} dBm. Requested UL "
-                    "power of {} dBm exceeds callbox limit!".format(
-                        power, calibrated_power))
-                calibrated_power = self.UL_MIN_POWER
-                self.log.warning(
-                    "Setting UL Tx power to min possible ({} dBm)".format(
-                        calibrated_power))
-
-            self.log.info(
-                "Requested phone UL Tx power of {} dBm, setting callbox Rx "
-                "power at {} dBm".format(power, calibrated_power))
-            time.sleep(2)
-            # Power has to be a natural number so calibration wont be exact.
-            # Inform the actual transmitted power after rounding.
-            self.log.info(
-                "Phone uplink transmitted power is {0:.2f} dBm".format(
-                    calibrated_power + self.ul_path_loss))
-            return calibrated_power
-        except TypeError:
-            self.log.info("Phone uplink transmitted power set to {} (link is "
-                          "uncalibrated).".format(round(power)))
-            return round(power)
-
-    def calibrate(self, band):
-        """ Calculates UL and DL path loss if it wasn't done before.
-
-        The should be already set to the required band before calling this
-        method.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-
-        if self.dl_path_loss and self.ul_path_loss:
-            self.log.info("Measurements are already calibrated.")
-
-        # Attach the phone to the base station
-        if not self.attach():
-            self.log.info(
-                "Skipping calibration because the phone failed to attach.")
-            return
-
-        # If downlink or uplink were not yet calibrated, do it now
-        if not self.dl_path_loss:
-            self.dl_path_loss = self.downlink_calibration()
-        if not self.ul_path_loss:
-            self.ul_path_loss = self.uplink_calibration()
-
-        # Detach after calibrating
-        self.detach()
-        time.sleep(2)
-
-    def start_traffic_for_calibration(self):
-        """
-            Starts UDP IP traffic before running calibration. Uses APN_1
-            configured in the phone.
-        """
-        self.simulator.start_data_traffic()
-
-    def stop_traffic_for_calibration(self):
-        """
-            Stops IP traffic after calibration.
-        """
-        self.simulator.stop_data_traffic()
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """ Computes downlink path loss and returns the calibration value
-
-        The DUT needs to be attached to the base station before calling this
-        method.
-
-        Args:
-            rat: desired RAT to calibrate (matching the label reported by
-                the phone)
-            power_units_conversion_func: a function to convert the units
-                reported by the phone to dBm. needs to take two arguments: the
-                reported signal level and bts. use None if no conversion is
-                needed.
-        Returns:
-            Downlink calibration value and measured DL power.
-        """
-
-        # Check if this parameter was set. Child classes may need to override
-        # this class passing the necessary parameters.
-        if not rat:
-            raise ValueError(
-                "The parameter 'rat' has to indicate the RAT being used as "
-                "reported by the phone.")
-
-        # Save initial output level to restore it after calibration
-        restoration_config = BaseCellConfig(self.log)
-        restoration_config.output_power = self.cell_configs[0].output_power
-
-        # Set BTS to a good output level to minimize measurement error
-        new_config = BaseCellConfig(self.log)
-        new_config.output_power = self.simulator.MAX_DL_POWER - 5
-        self.simulator.configure_bts(new_config)
-
-        # Starting IP traffic
-        self.start_traffic_for_calibration()
-
-        down_power_measured = []
-        for i in range(0, self.NUM_DL_CAL_READS):
-            # For some reason, the RSRP gets updated on Screen ON event
-            signal_strength = self.dut.get_telephony_signal_strength()
-            down_power_measured.append(signal_strength[rat])
-            time.sleep(5)
-
-        # Stop IP traffic
-        self.stop_traffic_for_calibration()
-
-        # Reset bts to original settings
-        self.simulator.configure_bts(restoration_config)
-        time.sleep(2)
-
-        # Calculate the mean of the measurements
-        reported_asu_power = np.nanmean(down_power_measured)
-
-        # Convert from RSRP to signal power
-        if power_units_conversion_func:
-            avg_down_power = power_units_conversion_func(
-                reported_asu_power, self.cell_configs[0])
-        else:
-            avg_down_power = reported_asu_power
-
-        # Calculate Path Loss
-        dl_target_power = self.simulator.MAX_DL_POWER - 5
-        down_call_path_loss = dl_target_power - avg_down_power
-
-        # Validate the result
-        if not 0 < down_call_path_loss < 100:
-            raise RuntimeError(
-                "Downlink calibration failed. The calculated path loss value "
-                "was {} dBm.".format(down_call_path_loss))
-
-        self.log.info(
-            "Measured downlink path loss: {} dB".format(down_call_path_loss))
-
-        return down_call_path_loss
-
-    def uplink_calibration(self):
-        """ Computes uplink path loss and returns the calibration value
-
-        The DUT needs to be attached to the base station before calling this
-        method.
-
-        Returns:
-            Uplink calibration value and measured UL power
-        """
-
-        # Save initial input level to restore it after calibration
-        restoration_config = BaseCellConfig(self.log)
-        restoration_config.input_power = self.cell_configs[0].input_power
-
-        # Set BTS1 to maximum input allowed in order to perform
-        # uplink calibration
-        target_power = self.MAX_PHONE_OUTPUT_POWER
-        new_config = BaseCellConfig(self.log)
-        new_config.input_power = self.MAX_BTS_INPUT_POWER
-        self.simulator.configure_bts(new_config)
-
-        # Start IP traffic
-        self.start_traffic_for_calibration()
-
-        up_power_per_chain = []
-        # Get the number of chains
-        cmd = 'MONITOR? UL_PUSCH'
-        uplink_meas_power = self.anritsu.send_query(cmd)
-        str_power_chain = uplink_meas_power.split(',')
-        num_chains = len(str_power_chain)
-        for ichain in range(0, num_chains):
-            up_power_per_chain.append([])
-
-        for i in range(0, self.NUM_UL_CAL_READS):
-            uplink_meas_power = self.anritsu.send_query(cmd)
-            str_power_chain = uplink_meas_power.split(',')
-
-            for ichain in range(0, num_chains):
-                if (str_power_chain[ichain] == 'DEACTIVE'):
-                    up_power_per_chain[ichain].append(float('nan'))
-                else:
-                    up_power_per_chain[ichain].append(
-                        float(str_power_chain[ichain]))
-
-            time.sleep(3)
-
-        # Stop IP traffic
-        self.stop_traffic_for_calibration()
-
-        # Reset bts to original settings
-        self.simulator.configure_bts(restoration_config)
-        time.sleep(2)
-
-        # Phone only supports 1x1 Uplink so always chain 0
-        avg_up_power = np.nanmean(up_power_per_chain[0])
-        if np.isnan(avg_up_power):
-            raise RuntimeError(
-                "Calibration failed because the callbox reported the chain to "
-                "be deactive.")
-
-        up_call_path_loss = target_power - avg_up_power
-
-        # Validate the result
-        if not 0 < up_call_path_loss < 100:
-            raise RuntimeError(
-                "Uplink calibration failed. The calculated path loss value "
-                "was {} dBm.".format(up_call_path_loss))
-
-        self.log.info(
-            "Measured uplink path loss: {} dB".format(up_call_path_loss))
-
-        return up_call_path_loss
-
-    def load_pathloss_if_required(self):
-        """ If calibration is required, try to obtain the pathloss values from
-        the calibration table and measure them if they are not available. """
-        # Invalidate the previous values
-        self.dl_path_loss = None
-        self.ul_path_loss = None
-
-        # Load the new ones
-        if self.calibration_required:
-
-            band = self.cell_configs[0].band
-
-            # Try loading the path loss values from the calibration table. If
-            # they are not available, use the automated calibration procedure.
-            try:
-                self.dl_path_loss = self.calibration_table[band]["dl"]
-                self.ul_path_loss = self.calibration_table[band]["ul"]
-            except KeyError:
-                self.calibrate(band)
-
-            # Complete the calibration table with the new values to be used in
-            # the next tests.
-            if band not in self.calibration_table:
-                self.calibration_table[band] = {}
-
-            if "dl" not in self.calibration_table[band] and self.dl_path_loss:
-                self.calibration_table[band]["dl"] = self.dl_path_loss
-
-            if "ul" not in self.calibration_table[band] and self.ul_path_loss:
-                self.calibration_table[band]["ul"] = self.ul_path_loss
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-        simulation state.
-
-        Because thoughput is dependent on the RAT, this method needs to be
-        implemented by children classes.
-
-        Returns:
-            Maximum throughput in mbps
-        """
-        raise NotImplementedError()
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-        simulation state.
-
-        Because thoughput is dependent on the RAT, this method needs to be
-        implemented by children classes.
-
-        Returns:
-            Maximum throughput in mbps
-        """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/cellular_lib/GsmSimulation.py b/src/antlion/controllers/cellular_lib/GsmSimulation.py
deleted file mode 100644
index f0ebf03..0000000
--- a/src/antlion/controllers/cellular_lib/GsmSimulation.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ntpath
-
-import time
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_DCS1800
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_EGSM900
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_GSM850
-from antlion.controllers.anritsu_lib.band_constants import GSM_BAND_RGSM900
-from antlion.controllers.anritsu_lib.md8475a import BtsGprsMode
-from antlion.controllers.anritsu_lib.md8475a import BtsNumber
-from antlion.controllers.anritsu_lib import md8475_cellular_simulator as anritsusim
-from antlion.controllers.cellular_lib import BaseCellularDut
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib.BaseCellConfig import BaseCellConfig
-
-
-class GsmSimulation(BaseSimulation):
-    """ Single base station GSM. """
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-
-    GSM_BASIC_SIM_FILE = 'SIM_default_GSM.wnssp'
-
-    GSM_CELL_FILE = 'CELL_GSM_config.wnscp'
-
-    # Configuration dictionary keys
-    PARAM_BAND = "band"
-    PARAM_GPRS = "gprs"
-    PARAM_EGPRS = "edge"
-    PARAM_NO_GPRS = "nogprs"
-    PARAM_SLOTS = "slots"
-
-    bands_parameter_mapping = {
-        '850': GSM_BAND_GSM850,
-        '900': GSM_BAND_EGSM900,
-        '1800': GSM_BAND_DCS1800,
-        '1900': GSM_BAND_RGSM900
-    }
-
-    def __init__(self, simulator, log, dut, test_config, calibration_table):
-        """ Initializes the simulator for a single-carrier GSM simulation.
-
-        Loads a simple LTE simulation environment with 1 basestation. It also
-        creates the BTS handle so we can change the parameters as desired.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-        # The GSM simulation relies on the cellular simulator to be a MD8475
-        if not isinstance(self.simulator, anritsusim.MD8475CellularSimulator):
-            raise ValueError('The GSM simulation relies on the simulator to '
-                             'be an Anritsu MD8475 A/B instrument.')
-
-        # The Anritsu controller needs to be unwrapped before calling
-        # super().__init__ because setup_simulator() requires self.anritsu and
-        # will be called during the parent class initialization.
-        self.anritsu = self.simulator.anritsu
-        self.bts1 = self.anritsu.get_BTS(BtsNumber.BTS1)
-
-        super().__init__(simulator, log, dut, test_config, calibration_table)
-
-        self.dut.set_preferred_network_type(
-            BaseCellularDut.PreferredNetworkType.GSM_ONLY)
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-
-        # Load callbox config files
-        callbox_config_path = self.CALLBOX_PATH_FORMAT_STR.format(
-            self.anritsu._md8475_version)
-
-        self.anritsu.load_simulation_paramfile(
-            ntpath.join(callbox_config_path, self.GSM_BASIC_SIM_FILE))
-        self.anritsu.load_cell_paramfile(
-            ntpath.join(callbox_config_path, self.GSM_CELL_FILE))
-
-        # Start simulation if it wasn't started
-        self.anritsu.start_simulation()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes GSM configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Don't call super() because Gsm doesn't control Tx power.
-
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include key '{}' with the "
-                "required band number.".format(self.PARAM_BAND))
-
-        self.set_band(self.bts1, parameters[self.PARAM_BAND])
-        self.load_pathloss_if_required()
-
-        # Setup GPRS mode
-
-        if self.PARAM_GPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.GPRS
-        elif self.PARAM_EGPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.EGPRS
-        elif self.PARAM_NO_GPRS in parameters:
-            self.bts1.gsm_gprs_mode = BtsGprsMode.NO_GPRS
-        else:
-            raise ValueError(
-                "GPRS mode needs to be indicated in the config dictionary by "
-                "including either {}, {} or {} as a key.".format(
-                    self.PARAM_GPRS, self.PARAM_EGPRS, self.PARAM_NO_GPRS))
-
-        # Setup slot allocation
-        if self.PARAM_SLOTS not in parameters or len(
-                parameters[self.PARAM_SLOTS]) != 2:
-            raise ValueError(
-                "The config dictionary must include key {} with a list of two "
-                "int values indicating DL and UL slots.".format(
-                    self.PARAM_SLOTS))
-        values = parameters[self.PARAM_SLOTS]
-        self.bts1.gsm_slots = (int(values[0]), int(values[1]))
-
-    def set_band(self, bts, band):
-        """ Sets the band used for communication.
-
-        Args:
-            bts: basestation handle
-            band: desired band
-        """
-
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
diff --git a/src/antlion/controllers/cellular_lib/LteCellConfig.py b/src/antlion/controllers/cellular_lib/LteCellConfig.py
deleted file mode 100644
index 8666f75..0000000
--- a/src/antlion/controllers/cellular_lib/LteCellConfig.py
+++ /dev/null
@@ -1,488 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import antlion.controllers.cellular_lib.BaseCellConfig as base_cell
-import antlion.controllers.cellular_lib.LteSimulation as lte_sim
-import math
-
-
-class LteCellConfig(base_cell.BaseCellConfig):
-    """ Extension of the BaseBtsConfig to implement parameters that are
-         exclusive to LTE.
-
-    Attributes:
-        band: an integer indicating the required band number.
-        dlul_config: an integer indicating the TDD config number.
-        ssf_config: an integer indicating the Special Sub-Frame config.
-        bandwidth: a float indicating the required channel bandwidth.
-        mimo_mode: an instance of LteSimulation.MimoMode indicating the
-            required MIMO mode for the downlink signal.
-        transmission_mode: an instance of LteSimulation.TransmissionMode
-            indicating the required TM.
-        scheduling_mode: an instance of LteSimulation.SchedulingMode
-            indicating whether to use Static or Dynamic scheduling.
-        dl_rbs: an integer indicating the number of downlink RBs
-        ul_rbs: an integer indicating the number of uplink RBs
-        dl_mcs: an integer indicating the MCS for the downlink signal
-        ul_mcs: an integer indicating the MCS for the uplink signal
-        dl_256_qam_enabled: a boolean indicating if 256 QAM is enabled
-        ul_64_qam_enabled: a boolean indicating if 256 QAM is enabled
-        mac_padding: a boolean indicating whether RBs should be allocated
-            when there is no user data in static scheduling
-        dl_channel: an integer indicating the downlink channel number
-        cfi: an integer indicating the Control Format Indicator
-        paging_cycle: an integer indicating the paging cycle duration in
-            milliseconds
-        phich: a string indicating the PHICH group size parameter
-        drx_connected_mode: a boolean indicating whether cDRX mode is
-            on or off
-        drx_on_duration_timer: number of PDCCH subframes representing
-            DRX on duration
-        drx_inactivity_timer: number of PDCCH subframes to wait before
-            entering DRX mode
-        drx_retransmission_timer: number of consecutive PDCCH subframes
-            to wait for retransmission
-        drx_long_cycle: number of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        drx_long_cycle_offset: number representing offset in range
-            0 to drx_long_cycle - 1
-    """
-    PARAM_FRAME_CONFIG = "tddconfig"
-    PARAM_BW = "bw"
-    PARAM_SCHEDULING = "scheduling"
-    PARAM_SCHEDULING_STATIC = "static"
-    PARAM_SCHEDULING_DYNAMIC = "dynamic"
-    PARAM_PATTERN = "pattern"
-    PARAM_TM = "tm"
-    PARAM_BAND = "band"
-    PARAM_MIMO = "mimo"
-    PARAM_DL_MCS = 'dlmcs'
-    PARAM_UL_MCS = 'ulmcs'
-    PARAM_SSF = 'ssf'
-    PARAM_CFI = 'cfi'
-    PARAM_PAGING = 'paging'
-    PARAM_PHICH = 'phich'
-    PARAM_DRX = 'drx'
-    PARAM_PADDING = 'mac_padding'
-    PARAM_DL_256_QAM_ENABLED = "256_qam_dl_enabled"
-    PARAM_UL_64_QAM_ENABLED = "64_qam_ul_enabled"
-    PARAM_DL_EARFCN = 'dl_earfcn'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-        parameters to None.
-        Args:
-            log: logger object.
-        """
-        super().__init__(log)
-        self.band = None
-        self.dlul_config = None
-        self.ssf_config = None
-        self.bandwidth = None
-        self.mimo_mode = None
-        self.transmission_mode = None
-        self.scheduling_mode = None
-        self.dl_rbs = None
-        self.ul_rbs = None
-        self.dl_mcs = None
-        self.ul_mcs = None
-        self.dl_256_qam_enabled = None
-        self.ul_64_qam_enabled = None
-        self.mac_padding = None
-        self.dl_channel = None
-        self.cfi = None
-        self.paging_cycle = None
-        self.phich = None
-        self.drx_connected_mode = None
-        self.drx_on_duration_timer = None
-        self.drx_inactivity_timer = None
-        self.drx_retransmission_timer = None
-        self.drx_long_cycle = None
-        self.drx_long_cycle_offset = None
-
-    def __str__(self):
-        return str(vars(self))
-
-    def configure(self, parameters):
-        """ Configures an LTE cell using a dictionary of parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-
-        self.band = parameters[self.PARAM_BAND]
-
-        if self.PARAM_DL_EARFCN not in parameters:
-            band = int(self.band)
-            channel = int(lte_sim.LteSimulation.LOWEST_DL_CN_DICTIONARY[band] +
-                          lte_sim.LteSimulation.LOWEST_DL_CN_DICTIONARY[band +
-                                                                        1]) / 2
-            self.log.warning(
-                "Key '{}' was not set. Using center band channel {} by default."
-                .format(self.PARAM_DL_EARFCN, channel))
-            self.dl_channel = channel
-        else:
-            self.dl_channel = parameters[self.PARAM_DL_EARFCN]
-
-        # Set TDD-only configs
-        if self.get_duplex_mode() == lte_sim.DuplexMode.TDD:
-
-            # Sub-frame DL/UL config
-            if self.PARAM_FRAME_CONFIG not in parameters:
-                raise ValueError("When a TDD band is selected the frame "
-                                 "structure has to be indicated with the '{}' "
-                                 "key with a value from 0 to 6.".format(
-                                     self.PARAM_FRAME_CONFIG))
-
-            self.dlul_config = int(parameters[self.PARAM_FRAME_CONFIG])
-
-            # Special Sub-Frame configuration
-            if self.PARAM_SSF not in parameters:
-                self.log.warning(
-                    'The {} parameter was not provided. Setting '
-                    'Special Sub-Frame config to 6 by default.'.format(
-                        self.PARAM_SSF))
-                self.ssf_config = 6
-            else:
-                self.ssf_config = int(parameters[self.PARAM_SSF])
-
-        # Setup bandwidth
-        if self.PARAM_BW not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter {} with an "
-                "int value (to indicate 1.4 MHz use 14).".format(
-                    self.PARAM_BW))
-
-        bw = float(parameters[self.PARAM_BW])
-
-        if abs(bw - 14) < 0.00000000001:
-            bw = 1.4
-
-        self.bandwidth = bw
-
-        # Setup mimo mode
-        if self.PARAM_MIMO not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter '{}' with the "
-                "mimo mode.".format(self.PARAM_MIMO))
-
-        for mimo_mode in lte_sim.MimoMode:
-            if parameters[self.PARAM_MIMO] == mimo_mode.value:
-                self.mimo_mode = mimo_mode
-                break
-        else:
-            raise ValueError("The value of {} must be one of the following:"
-                             "1x1, 2x2 or 4x4.".format(self.PARAM_MIMO))
-
-        # Setup transmission mode
-        if self.PARAM_TM not in parameters:
-            raise ValueError(
-                "The config dictionary must include key {} with an "
-                "int value from 1 to 4 indicating transmission mode.".format(
-                    self.PARAM_TM))
-
-        for tm in lte_sim.TransmissionMode:
-            if parameters[self.PARAM_TM] == tm.value[2:]:
-                self.transmission_mode = tm
-                break
-        else:
-            raise ValueError(
-                "The {} key must have one of the following values:"
-                "1, 2, 3, 4, 7, 8 or 9.".format(self.PARAM_TM))
-
-        # Setup scheduling mode
-        if self.PARAM_SCHEDULING not in parameters:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-            self.log.warning(
-                "The test config does not include the '{}' key. Setting to "
-                "static by default.".format(self.PARAM_SCHEDULING))
-        elif parameters[
-                self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_DYNAMIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.DYNAMIC
-        elif parameters[self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_STATIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-        else:
-            raise ValueError("Key '{}' must have a value of "
-                             "'dynamic' or 'static'.".format(
-                                 self.PARAM_SCHEDULING))
-
-        if self.scheduling_mode == lte_sim.SchedulingMode.STATIC:
-
-            if self.PARAM_PADDING not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set. Enabling MAC padding by "
-                    "default.".format(self.PARAM_PADDING))
-                self.mac_padding = True
-            else:
-                self.mac_padding = parameters[self.PARAM_PADDING]
-
-            if self.PARAM_PATTERN not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set, using 100% RBs for both "
-                    "DL and UL. To set the percentages of total RBs include "
-                    "the '{}' key with a list of two ints indicating downlink "
-                    "and uplink percentages.".format(self.PARAM_PATTERN,
-                                                     self.PARAM_PATTERN))
-                dl_pattern = 100
-                ul_pattern = 100
-            else:
-                dl_pattern = int(parameters[self.PARAM_PATTERN][0])
-                ul_pattern = int(parameters[self.PARAM_PATTERN][1])
-
-            if not (0 <= dl_pattern <= 100 and 0 <= ul_pattern <= 100):
-                raise ValueError(
-                    "The scheduling pattern parameters need to be two "
-                    "positive numbers between 0 and 100.")
-
-            self.dl_rbs, self.ul_rbs = (self.allocation_percentages_to_rbs(
-                dl_pattern, ul_pattern))
-
-            # Check if 256 QAM is enabled for DL MCS
-            if self.PARAM_DL_256_QAM_ENABLED not in parameters:
-                self.log.warning("The key '{}' is not set in the test config. "
-                                 "Setting to false by default.".format(
-                                     self.PARAM_DL_256_QAM_ENABLED))
-
-            self.dl_256_qam_enabled = parameters.get(
-                self.PARAM_DL_256_QAM_ENABLED, False)
-
-            # Look for a DL MCS configuration in the test parameters. If it is
-            # not present, use a default value.
-            if self.PARAM_DL_MCS in parameters:
-                self.dl_mcs = int(parameters[self.PARAM_DL_MCS])
-            else:
-                self.log.warning(
-                    'The test config does not include the {} key. Setting '
-                    'to the max value by default'.format(self.PARAM_DL_MCS))
-                if self.dl_256_qam_enabled and self.bandwidth == 1.4:
-                    self.dl_mcs = 26
-                elif (not self.dl_256_qam_enabled and self.mac_padding
-                      and self.bandwidth != 1.4):
-                    self.dl_mcs = 28
-                else:
-                    self.dl_mcs = 27
-
-            # Check if 64 QAM is enabled for UL MCS
-            if self.PARAM_UL_64_QAM_ENABLED not in parameters:
-                self.log.warning("The key '{}' is not set in the config file. "
-                                 "Setting to false by default.".format(
-                                     self.PARAM_UL_64_QAM_ENABLED))
-
-            self.ul_64_qam_enabled = parameters.get(
-                self.PARAM_UL_64_QAM_ENABLED, False)
-
-            # Look for an UL MCS configuration in the test parameters. If it is
-            # not present, use a default value.
-            if self.PARAM_UL_MCS in parameters:
-                self.ul_mcs = int(parameters[self.PARAM_UL_MCS])
-            else:
-                self.log.warning(
-                    'The test config does not include the {} key. Setting '
-                    'to the max value by default'.format(self.PARAM_UL_MCS))
-                if self.ul_64_qam_enabled:
-                    self.ul_mcs = 28
-                else:
-                    self.ul_mcs = 23
-
-        # Configure the simulation for DRX mode
-        if self.PARAM_DRX in parameters and len(
-                parameters[self.PARAM_DRX]) == 5:
-            self.drx_connected_mode = True
-            self.drx_on_duration_timer = parameters[self.PARAM_DRX][0]
-            self.drx_inactivity_timer = parameters[self.PARAM_DRX][1]
-            self.drx_retransmission_timer = parameters[self.PARAM_DRX][2]
-            self.drx_long_cycle = parameters[self.PARAM_DRX][3]
-            try:
-                long_cycle = int(parameters[self.PARAM_DRX][3])
-                long_cycle_offset = int(parameters[self.PARAM_DRX][4])
-                if long_cycle_offset in range(0, long_cycle):
-                    self.drx_long_cycle_offset = long_cycle_offset
-                else:
-                    self.log.error(
-                        ("The cDRX long cycle offset must be in the "
-                         "range 0 to (long cycle  - 1). Setting "
-                         "long cycle offset to 0"))
-                    self.drx_long_cycle_offset = 0
-
-            except ValueError:
-                self.log.error(("cDRX long cycle and long cycle offset "
-                                "must be integers. Disabling cDRX mode."))
-                self.drx_connected_mode = False
-        else:
-            self.log.warning(
-                ("DRX mode was not configured properly. "
-                 "Please provide a list with the following values: "
-                 "1) DRX on duration timer "
-                 "2) Inactivity timer "
-                 "3) Retransmission timer "
-                 "4) Long DRX cycle duration "
-                 "5) Long DRX cycle offset "
-                 "Example: [2, 6, 16, 20, 0]."))
-
-        # Channel Control Indicator
-        if self.PARAM_CFI not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'CFI to BESTEFFORT.'.format(self.PARAM_CFI))
-            self.cfi = 'BESTEFFORT'
-        else:
-            self.cfi = parameters[self.PARAM_CFI]
-
-        # PHICH group size
-        if self.PARAM_PHICH not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'PHICH group size to 1 by default.'.format(
-                                 self.PARAM_PHICH))
-            self.phich = '1'
-        else:
-            if parameters[self.PARAM_PHICH] == '16':
-                self.phich = '1/6'
-            elif parameters[self.PARAM_PHICH] == '12':
-                self.phich = '1/2'
-            elif parameters[self.PARAM_PHICH] in ['1/6', '1/2', '1', '2']:
-                self.phich = parameters[self.PARAM_PHICH]
-            else:
-                raise ValueError('The {} parameter can only be followed by 1,'
-                                 '2, 1/2 (or 12) and 1/6 (or 16).'.format(
-                                     self.PARAM_PHICH))
-
-        # Paging cycle duration
-        if self.PARAM_PAGING not in parameters:
-            self.log.warning('The {} parameter was not provided. Setting '
-                             'paging cycle duration to 1280 ms by '
-                             'default.'.format(self.PARAM_PAGING))
-            self.paging_cycle = 1280
-        else:
-            try:
-                self.paging_cycle = int(parameters[self.PARAM_PAGING])
-            except ValueError:
-                raise ValueError(
-                    'The {} key has to be followed by the paging cycle '
-                    'duration in milliseconds.'.format(self.PARAM_PAGING))
-
-    def get_duplex_mode(self):
-        """ Determines if the cell uses FDD or TDD duplex mode
-
-        Returns:
-          an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(self.band) <= 46:
-            return lte_sim.DuplexMode.TDD
-        else:
-            return lte_sim.DuplexMode.FDD
-
-    def allocation_percentages_to_rbs(self, dl, ul):
-        """ Converts usage percentages to number of DL/UL RBs
-
-        Because not any number of DL/UL RBs can be obtained for a certain
-        bandwidth, this function calculates the number of RBs that most
-        closely matches the desired DL/UL percentages.
-
-        Args:
-            dl: desired percentage of downlink RBs
-            ul: desired percentage of uplink RBs
-        Returns:
-            a tuple indicating the number of downlink and uplink RBs
-        """
-
-        # Validate the arguments
-        if (not 0 <= dl <= 100) or (not 0 <= ul <= 100):
-            raise ValueError("The percentage of DL and UL RBs have to be two "
-                             "positive between 0 and 100.")
-
-        # Get min and max values from tables
-        max_rbs = lte_sim.TOTAL_RBS_DICTIONARY[self.bandwidth]
-        min_dl_rbs = lte_sim.MIN_DL_RBS_DICTIONARY[self.bandwidth]
-        min_ul_rbs = lte_sim.MIN_UL_RBS_DICTIONARY[self.bandwidth]
-
-        def percentage_to_amount(min_val, max_val, percentage):
-            """ Returns the integer between min_val and max_val that is closest
-            to percentage/100*max_val
-            """
-
-            # Calculate the value that corresponds to the required percentage.
-            closest_int = round(max_val * percentage / 100)
-            # Cannot be less than min_val
-            closest_int = max(closest_int, min_val)
-            # RBs cannot be more than max_rbs
-            closest_int = min(closest_int, max_val)
-
-            return closest_int
-
-        # Calculate the number of DL RBs
-
-        # Get the number of DL RBs that corresponds to
-        #  the required percentage.
-        desired_dl_rbs = percentage_to_amount(min_val=min_dl_rbs,
-                                              max_val=max_rbs,
-                                              percentage=dl)
-
-        if self.transmission_mode == lte_sim.TransmissionMode.TM3 or \
-                self.transmission_mode == lte_sim.TransmissionMode.TM4:
-
-            # For TM3 and TM4 the number of DL RBs needs to be max_rbs or a
-            # multiple of the RBG size
-
-            if desired_dl_rbs == max_rbs:
-                dl_rbs = max_rbs
-            else:
-                dl_rbs = (math.ceil(
-                    desired_dl_rbs / lte_sim.RBG_DICTIONARY[self.bandwidth]) *
-                          lte_sim.RBG_DICTIONARY[self.bandwidth])
-
-        else:
-            # The other TMs allow any number of RBs between 1 and max_rbs
-            dl_rbs = desired_dl_rbs
-
-        # Calculate the number of UL RBs
-
-        # Get the number of UL RBs that corresponds
-        # to the required percentage
-        desired_ul_rbs = percentage_to_amount(min_val=min_ul_rbs,
-                                              max_val=max_rbs,
-                                              percentage=ul)
-
-        # Create a list of all possible UL RBs assignment
-        # The standard allows any number that can be written as
-        # 2**a * 3**b * 5**c for any combination of a, b and c.
-
-        def pow_range(max_value, base):
-            """ Returns a range of all possible powers of base under
-              the given max_value.
-          """
-            return range(int(math.ceil(math.log(max_value, base))))
-
-        possible_ul_rbs = [
-            2 ** a * 3 ** b * 5 ** c for a in pow_range(max_rbs, 2)
-            for b in pow_range(max_rbs, 3)
-            for c in pow_range(max_rbs, 5)
-            if 2 ** a * 3 ** b * 5 ** c <= max_rbs]  # yapf: disable
-
-        # Find the value in the list that is closest to desired_ul_rbs
-        differences = [abs(rbs - desired_ul_rbs) for rbs in possible_ul_rbs]
-        ul_rbs = possible_ul_rbs[differences.index(min(differences))]
-
-        # Report what are the obtained RB percentages
-        self.log.info("Requested a {}% / {}% RB allocation. Closest possible "
-                      "percentages are {}% / {}%.".format(
-                          dl, ul, round(100 * dl_rbs / max_rbs),
-                          round(100 * ul_rbs / max_rbs)))
-
-        return dl_rbs, ul_rbs
diff --git a/src/antlion/controllers/cellular_lib/LteImsSimulation.py b/src/antlion/controllers/cellular_lib/LteImsSimulation.py
deleted file mode 100644
index 9f74714..0000000
--- a/src/antlion/controllers/cellular_lib/LteImsSimulation.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-
-import antlion.controllers.anritsu_lib.md8475a as md8475a
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-
-# Time to wait for Anritsu's IMS CSCF state change
-MAX_WAIT_TIME_IMS_CSCF_STATE = 30
-# default ims virtual network id for Anritsu ims call test.
-DEFAULT_IMS_VIRTUAL_NETWORK_ID = 1
-
-
-class LteImsSimulation(LteSimulation):
-
-    LTE_BASIC_SIM_FILE = 'VoLTE_ATT_Sim.wnssp'
-    LTE_BASIC_CELL_FILE = 'VoLTE_ATT_Cell.wnscp'
-
-    def attach(self):
-        """ After attaching verify the UE has registered with the IMS server.
-
-        Returns:
-            True if the phone was able to attach, False if not.
-        """
-
-        if not super().attach():
-            return False
-
-        # The phone should have registered with the IMS server before attaching.
-        # Make sure the IMS registration was successful by verifying the CSCF
-        # status is SIP IDLE.
-        if not _wait_for_ims_cscf_status(
-                self.log,
-                self.simulator.anritsu,
-                DEFAULT_IMS_VIRTUAL_NETWORK_ID,
-                md8475a.ImsCscfStatus.SIPIDLE.value):
-            self.log.error('UE failed to register with the IMS server.')
-            return False
-
-        return True
-
-
-def _wait_for_ims_cscf_status(log,
-                              anritsu_handle,
-                              virtual_network_id,
-                              status,
-                              timeout=MAX_WAIT_TIME_IMS_CSCF_STATE):
-    """ Wait for IMS CSCF to be in expected state.
-
-    Args:
-        log: log object
-        anritsu_handle: anritsu object
-        virtual_network_id: virtual network id to be monitored
-        status: expected status
-        timeout: wait time
-    """
-    sleep_interval = 1
-    wait_time = timeout
-    while wait_time > 0:
-        if status == anritsu_handle.get_ims_cscf_status(virtual_network_id):
-            return True
-        time.sleep(sleep_interval)
-        wait_time = wait_time - sleep_interval
-    return False
diff --git a/src/antlion/controllers/cellular_lib/LteSimulation.py b/src/antlion/controllers/cellular_lib/LteSimulation.py
deleted file mode 100644
index b811a90..0000000
--- a/src/antlion/controllers/cellular_lib/LteSimulation.py
+++ /dev/null
@@ -1,923 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-from enum import Enum
-
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib.LteCellConfig import LteCellConfig
-from antlion.controllers.cellular_lib.NrCellConfig import NrCellConfig
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class TransmissionMode(Enum):
-    """ Transmission modes for LTE (e.g., TM1, TM4, ...) """
-    TM1 = "TM1"
-    TM2 = "TM2"
-    TM3 = "TM3"
-    TM4 = "TM4"
-    TM7 = "TM7"
-    TM8 = "TM8"
-    TM9 = "TM9"
-
-
-class MimoMode(Enum):
-    """ Mimo modes """
-    MIMO_1x1 = "1x1"
-    MIMO_2x2 = "2x2"
-    MIMO_4x4 = "4x4"
-
-
-class SchedulingMode(Enum):
-    """ Traffic scheduling modes (e.g., STATIC, DYNAMIC) """
-    DYNAMIC = "DYNAMIC"
-    STATIC = "STATIC"
-
-
-class DuplexMode(Enum):
-    """ DL/UL Duplex mode """
-    FDD = "FDD"
-    TDD = "TDD"
-
-
-class ModulationType(Enum):
-    """DL/UL Modulation order."""
-    QPSK = 'QPSK'
-    Q16 = '16QAM'
-    Q64 = '64QAM'
-    Q256 = '256QAM'
-
-
-# Bandwidth [MHz] to RB group size
-RBG_DICTIONARY = {20: 4, 15: 4, 10: 3, 5: 2, 3: 2, 1.4: 1}
-
-# Bandwidth [MHz] to total RBs mapping
-TOTAL_RBS_DICTIONARY = {20: 100, 15: 75, 10: 50, 5: 25, 3: 15, 1.4: 6}
-
-# Bandwidth [MHz] to minimum number of DL RBs that can be assigned to a UE
-MIN_DL_RBS_DICTIONARY = {20: 16, 15: 12, 10: 9, 5: 4, 3: 4, 1.4: 2}
-
-# Bandwidth [MHz] to minimum number of UL RBs that can be assigned to a UE
-MIN_UL_RBS_DICTIONARY = {20: 8, 15: 6, 10: 4, 5: 2, 3: 2, 1.4: 1}
-
-
-class LteSimulation(BaseSimulation):
-    """ Single-carrier LTE simulation. """
-    # Test config keywords
-    KEY_FREQ_BANDS = "freq_bands"
-
-    # Cell param keywords
-    PARAM_RRC_STATUS_CHANGE_TIMER = "rrcstatuschangetimer"
-
-    # Units in which signal level is defined in DOWNLINK_SIGNAL_LEVEL_DICTIONARY
-    DOWNLINK_SIGNAL_LEVEL_UNITS = "RSRP"
-
-    # RSRP signal levels thresholds (as reported by Android) in dBm/15KHz.
-    # Excellent is set to -75 since callbox B Tx power is limited to -30 dBm
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'excellent': -75,
-        'high': -110,
-        'medium': -115,
-        'weak': -120,
-        'disconnected': -170
-    }
-
-    # Transmitted output power for the phone (dBm)
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'max': 27,
-        'high': 13,
-        'medium': 3,
-        'low': -20
-    }
-
-    # Allowed bandwidth for each band.
-    allowed_bandwidth_dictionary = {
-        1: [5, 10, 15, 20],
-        2: [1.4, 3, 5, 10, 15, 20],
-        3: [1.4, 3, 5, 10, 15, 20],
-        4: [1.4, 3, 5, 10, 15, 20],
-        5: [1.4, 3, 5, 10],
-        7: [5, 10, 15, 20],
-        8: [1.4, 3, 5, 10],
-        10: [5, 10, 15, 20],
-        11: [5, 10],
-        12: [1.4, 3, 5, 10],
-        13: [5, 10],
-        14: [5, 10],
-        17: [5, 10],
-        18: [5, 10, 15],
-        19: [5, 10, 15],
-        20: [5, 10, 15, 20],
-        21: [5, 10, 15],
-        22: [5, 10, 15, 20],
-        24: [5, 10],
-        25: [1.4, 3, 5, 10, 15, 20],
-        26: [1.4, 3, 5, 10, 15],
-        27: [1.4, 3, 5, 10],
-        28: [3, 5, 10, 15, 20],
-        29: [3, 5, 10],
-        30: [5, 10],
-        31: [1.4, 3, 5],
-        32: [5, 10, 15, 20],
-        33: [5, 10, 15, 20],
-        34: [5, 10, 15],
-        35: [1.4, 3, 5, 10, 15, 20],
-        36: [1.4, 3, 5, 10, 15, 20],
-        37: [5, 10, 15, 20],
-        38: [20],
-        39: [5, 10, 15, 20],
-        40: [5, 10, 15, 20],
-        41: [5, 10, 15, 20],
-        42: [5, 10, 15, 20],
-        43: [5, 10, 15, 20],
-        44: [3, 5, 10, 15, 20],
-        45: [5, 10, 15, 20],
-        46: [10, 20],
-        47: [10, 20],
-        48: [5, 10, 15, 20],
-        49: [10, 20],
-        50: [3, 5, 10, 15, 20],
-        51: [3, 5],
-        52: [5, 10, 15, 20],
-        65: [5, 10, 15, 20],
-        66: [1.4, 3, 5, 10, 15, 20],
-        67: [5, 10, 15, 20],
-        68: [5, 10, 15],
-        69: [5],
-        70: [5, 10, 15],
-        71: [5, 10, 15, 20],
-        72: [1.4, 3, 5],
-        73: [1.4, 3, 5],
-        74: [1.4, 3, 5, 10, 15, 20],
-        75: [5, 10, 15, 20],
-        76: [5],
-        85: [5, 10],
-        252: [20],
-        255: [20]
-    }
-
-    # Dictionary of lower DL channel number bound for each band.
-    LOWEST_DL_CN_DICTIONARY = {
-        1: 0,
-        2: 600,
-        3: 1200,
-        4: 1950,
-        5: 2400,
-        6: 2650,
-        7: 2750,
-        8: 3450,
-        9: 3800,
-        10: 4150,
-        11: 4750,
-        12: 5010,
-        13: 5180,
-        14: 5280,
-        17: 5730,
-        18: 5850,
-        19: 6000,
-        20: 6150,
-        21: 6450,
-        22: 6600,
-        23: 7500,
-        24: 7700,
-        25: 8040,
-        26: 8690,
-        27: 9040,
-        28: 9210,
-        29: 9660,
-        30: 9770,
-        31: 9870,
-        32: 9920,
-        33: 36000,
-        34: 36200,
-        35: 36350,
-        36: 36950,
-        37: 37550,
-        38: 37750,
-        39: 38250,
-        40: 38650,
-        41: 39650,
-        42: 41590,
-        43: 45590,
-        66: 66436,
-        67: 67336
-    }
-
-    # Peak throughput lookup tables for each TDD subframe
-    # configuration and bandwidth
-    # yapf: disable
-    tdd_config4_tput_lut = {
-        0: {
-            5: {'DL': 3.82, 'UL': 2.63},
-            10: {'DL': 11.31,'UL': 9.03},
-            15: {'DL': 16.9, 'UL': 20.62},
-            20: {'DL': 22.88, 'UL': 28.43}
-        },
-        1: {
-            5: {'DL': 6.13, 'UL': 4.08},
-            10: {'DL': 18.36, 'UL': 9.69},
-            15: {'DL': 28.62, 'UL': 14.21},
-            20: {'DL': 39.04, 'UL': 19.23}
-        },
-        2: {
-            5: {'DL': 5.68, 'UL': 2.30},
-            10: {'DL': 25.51, 'UL': 4.68},
-            15: {'DL': 39.3, 'UL': 7.13},
-            20: {'DL': 53.64, 'UL': 9.72}
-        },
-        3: {
-            5: {'DL': 8.26, 'UL': 3.45},
-            10: {'DL': 23.20, 'UL': 6.99},
-            15: {'DL': 35.35, 'UL': 10.75},
-            20: {'DL': 48.3, 'UL': 14.6}
-        },
-        4: {
-            5: {'DL': 6.16, 'UL': 2.30},
-            10: {'DL': 26.77, 'UL': 4.68},
-            15: {'DL': 40.7, 'UL': 7.18},
-            20: {'DL': 55.6, 'UL': 9.73}
-        },
-        5: {
-            5: {'DL': 6.91, 'UL': 1.12},
-            10: {'DL': 30.33, 'UL': 2.33},
-            15: {'DL': 46.04, 'UL': 3.54},
-            20: {'DL': 62.9, 'UL': 4.83}
-        },
-        6: {
-            5: {'DL': 6.13, 'UL': 4.13},
-            10: {'DL': 14.79, 'UL': 11.98},
-            15: {'DL': 23.28, 'UL': 17.46},
-            20: {'DL': 31.75, 'UL': 23.95}
-        }
-    }
-
-    tdd_config3_tput_lut = {
-        0: {
-            5: {'DL': 5.04, 'UL': 3.7},
-            10: {'DL': 15.11, 'UL': 17.56},
-            15: {'DL': 22.59, 'UL': 30.31},
-            20: {'DL': 30.41, 'UL': 41.61}
-        },
-        1: {
-            5: {'DL': 8.07, 'UL': 5.66},
-            10: {'DL': 24.58, 'UL': 13.66},
-            15: {'DL': 39.05, 'UL': 20.68},
-            20: {'DL': 51.59, 'UL': 28.76}
-        },
-        2: {
-            5: {'DL': 7.59, 'UL': 3.31},
-            10: {'DL': 34.08, 'UL': 6.93},
-            15: {'DL': 53.64, 'UL': 10.51},
-            20: {'DL': 70.55, 'UL': 14.41}
-        },
-        3: {
-            5: {'DL': 10.9, 'UL': 5.0},
-            10: {'DL': 30.99, 'UL': 10.25},
-            15: {'DL': 48.3, 'UL': 15.81},
-            20: {'DL': 63.24, 'UL': 21.65}
-        },
-        4: {
-            5: {'DL': 8.11, 'UL': 3.32},
-            10: {'DL': 35.74, 'UL': 6.95},
-            15: {'DL': 55.6, 'UL': 10.51},
-            20: {'DL': 72.72, 'UL': 14.41}
-        },
-        5: {
-            5: {'DL': 9.28, 'UL': 1.57},
-            10: {'DL': 40.49, 'UL': 3.44},
-            15: {'DL': 62.9, 'UL': 5.23},
-            20: {'DL': 82.21, 'UL': 7.15}
-        },
-        6: {
-            5: {'DL': 8.06, 'UL': 5.74},
-            10: {'DL': 19.82, 'UL': 17.51},
-            15: {'DL': 31.75, 'UL': 25.77},
-            20: {'DL': 42.12, 'UL': 34.91}
-        }
-    }
-
-    tdd_config2_tput_lut = {
-        0: {
-            5: {'DL': 3.11, 'UL': 2.55},
-            10: {'DL': 9.93, 'UL': 11.1},
-            15: {'DL': 13.9, 'UL': 21.51},
-            20: {'DL': 20.02, 'UL': 41.66}
-        },
-        1: {
-            5: {'DL': 5.33, 'UL': 4.27},
-            10: {'DL': 15.14, 'UL': 13.95},
-            15: {'DL': 33.84, 'UL': 19.73},
-            20: {'DL': 44.61, 'UL': 27.35}
-        },
-        2: {
-            5: {'DL': 6.87, 'UL': 3.32},
-            10: {'DL': 17.06, 'UL': 6.76},
-            15: {'DL': 49.63, 'UL': 10.5},
-            20: {'DL': 65.2, 'UL': 14.41}
-        },
-        3: {
-            5: {'DL': 5.41, 'UL': 4.17},
-            10: {'DL': 16.89, 'UL': 9.73},
-            15: {'DL': 44.29, 'UL': 15.7},
-            20: {'DL': 53.95, 'UL': 19.85}
-        },
-        4: {
-            5: {'DL': 8.7, 'UL': 3.32},
-            10: {'DL': 17.58, 'UL': 6.76},
-            15: {'DL': 51.08, 'UL': 10.47},
-            20: {'DL': 66.45, 'UL': 14.38}
-        },
-        5: {
-            5: {'DL': 9.46, 'UL': 1.55},
-            10: {'DL': 19.02, 'UL': 3.48},
-            15: {'DL': 58.89, 'UL': 5.23},
-            20: {'DL': 76.85, 'UL': 7.1}
-        },
-        6: {
-            5: {'DL': 4.74, 'UL': 3.9},
-            10: {'DL': 12.32, 'UL': 13.37},
-            15: {'DL': 27.74, 'UL': 25.02},
-            20: {'DL': 35.48, 'UL': 32.95}
-        }
-    }
-
-    tdd_config1_tput_lut = {
-        0: {
-            5: {'DL': 4.25, 'UL': 3.35},
-            10: {'DL': 8.38, 'UL': 7.22},
-            15: {'DL': 12.41, 'UL': 13.91},
-            20: {'DL': 16.27, 'UL': 24.09}
-        },
-        1: {
-            5: {'DL': 7.28, 'UL': 4.61},
-            10: {'DL': 14.73, 'UL': 9.69},
-            15: {'DL': 21.91, 'UL': 13.86},
-            20: {'DL': 27.63, 'UL': 17.18}
-        },
-        2: {
-            5: {'DL': 10.37, 'UL': 2.27},
-            10: {'DL': 20.92, 'UL': 4.66},
-            15: {'DL': 31.01, 'UL': 7.04},
-            20: {'DL': 42.03, 'UL': 9.75}
-        },
-        3: {
-            5: {'DL': 9.25, 'UL': 3.44},
-            10: {'DL': 18.38, 'UL': 6.95},
-            15: {'DL': 27.59, 'UL': 10.62},
-            20: {'DL': 34.85, 'UL': 13.45}
-        },
-        4: {
-            5: {'DL': 10.71, 'UL': 2.26},
-            10: {'DL': 21.54, 'UL': 4.67},
-            15: {'DL': 31.91, 'UL': 7.2},
-            20: {'DL': 43.35, 'UL': 9.74}
-        },
-        5: {
-            5: {'DL': 12.34, 'UL': 1.08},
-            10: {'DL': 24.78, 'UL': 2.34},
-            15: {'DL': 36.68, 'UL': 3.57},
-            20: {'DL': 49.84, 'UL': 4.81}
-        },
-        6: {
-            5: {'DL': 5.76, 'UL': 4.41},
-            10: {'DL': 11.68, 'UL': 9.7},
-            15: {'DL': 17.34, 'UL': 17.95},
-            20: {'DL': 23.5, 'UL': 23.42}
-        }
-    }
-    # yapf: enable
-
-    # Peak throughput lookup table dictionary
-    tdd_config_tput_lut_dict = {
-        'TDD_CONFIG1':
-        tdd_config1_tput_lut,  # DL 256QAM, UL 64QAM & MAC padding turned OFF
-        'TDD_CONFIG2':
-        tdd_config2_tput_lut,  # DL 256QAM, UL 64 QAM ON & MAC padding OFF
-        'TDD_CONFIG3':
-        tdd_config3_tput_lut,  # DL 256QAM, UL 64QAM & MAC padding ON
-        'TDD_CONFIG4':
-        tdd_config4_tput_lut  # DL 256QAM, UL 64 QAM OFF & MAC padding ON
-    }
-
-    def __init__(
-        self, simulator, log, dut, test_config, calibration_table,
-        nr_mode=None):
-        """ Initializes the simulator for a single-carrier LTE simulation.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-
-        super().__init__(
-            simulator, log, dut, test_config, calibration_table, nr_mode)
-
-        self.num_carriers = None
-
-        # Force device to LTE only so that it connects faster
-        try:
-            if self.nr_mode and 'nr' == self.nr_mode:
-                self.dut.set_preferred_network_type(
-                    BaseCellularDut.PreferredNetworkType.LTE_NR)
-            else:
-                self.dut.set_preferred_network_type(
-                    BaseCellularDut.PreferredNetworkType.LTE_ONLY)
-        except Exception as e:
-            # If this fails the test should be able to run anyways, even if it
-            # takes longer to find the cell.
-            self.log.warning('Setting preferred RAT failed: ' + str(e))
-
-        # Get LTE CA frequency bands setting from the test configuration
-        if self.KEY_FREQ_BANDS not in test_config:
-            self.log.warning("The key '{}' is not set in the config file. "
-                             "Setting to null by default.".format(
-                                 self.KEY_FREQ_BANDS))
-
-        self.freq_bands = test_config.get(self.KEY_FREQ_BANDS, True)
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-        if self.nr_mode and 'nr' == self.nr_mode:
-            self.log.info('Initializes the callbox to Nr Nsa scenario')
-            self.simulator.setup_nr_nsa_scenario()
-        else:
-            self.log.info('Initializes the callbox to LTE scenario')
-            self.simulator.setup_lte_scenario()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes LTE configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary if there is only one carrier,
-                a list if there are multiple cells.
-        """
-        # If there is a single item, put in a list
-        if not isinstance(parameters, list):
-            parameters = [parameters]
-
-        # Pass only PCC configs to BaseSimulation
-        super().configure(parameters[0])
-
-        new_cell_list = []
-        for cell in parameters:
-            if LteCellConfig.PARAM_BAND not in cell:
-                raise ValueError(
-                    "The configuration dictionary must include a key '{}' with "
-                    "the required band number.".format(
-                        LteCellConfig.PARAM_BAND))
-
-            band = cell[LteCellConfig.PARAM_BAND]
-
-            if isinstance(band, str) and not band.isdigit():
-                # If band starts with n then it is an NR band
-                if band[0] == 'n' and band[1:].isdigit():
-                    # If the remaining string is only the band number, add
-                    # the cell and continue
-                    new_cell_list.append(cell)
-                    continue
-
-                ca_class = band[-1].upper()
-                band_num = band[:-1]
-
-                if ca_class in ['A', 'C']:
-                    # Remove the CA class label and add the cell
-                    cell[LteCellConfig.PARAM_BAND] = band_num
-                    new_cell_list.append(cell)
-                elif ca_class == 'B':
-                    raise RuntimeError('Class B LTE CA not supported.')
-                else:
-                    raise ValueError('Invalid band value: ' + band)
-
-                # Class C means that there are two contiguous carriers
-                if ca_class == 'C':
-                    new_cell_list.append(dict(cell))
-                    bw = int(cell[LteCellConfig.PARAM_BW])
-                    dl_earfcn = LteCellConfig.PARAM_DL_EARFCN
-                    new_cell_list[-1][dl_earfcn] = self.LOWEST_DL_CN_DICTIONARY[
-                        int(band_num)] + bw * 10 - 2
-            else:
-                # The band is just a number, so just add it to the list
-                new_cell_list.append(cell)
-
-        # Logs new_cell_list for debug
-        self.log.info('new cell list: {}'.format(new_cell_list))
-
-        self.simulator.set_band_combination(
-            [c[LteCellConfig.PARAM_BAND] for c in new_cell_list])
-
-        self.num_carriers = len(new_cell_list)
-
-        # Setup the base stations with the obtain configuration
-        self.cell_configs = []
-        for i in range(self.num_carriers):
-            band = new_cell_list[i][LteCellConfig.PARAM_BAND]
-            if isinstance(band, str) and band[0] == 'n':
-                self.cell_configs.append(NrCellConfig(self.log))
-            else:
-                self.cell_configs.append(LteCellConfig(self.log))
-            self.cell_configs[i].configure(new_cell_list[i])
-            self.simulator.configure_bts(self.cell_configs[i], i)
-
-        # Now that the band is set, calibrate the link if necessary
-        self.load_pathloss_if_required()
-
-        # This shouldn't be a cell parameter but instead a simulation config
-        # Setup LTE RRC status change function and timer for LTE idle test case
-        if self.PARAM_RRC_STATUS_CHANGE_TIMER not in parameters[0]:
-            self.log.info(
-                "The test config does not include the '{}' key. Disabled "
-                "by default.".format(self.PARAM_RRC_STATUS_CHANGE_TIMER))
-            self.simulator.set_lte_rrc_state_change_timer(False)
-        else:
-            timer = int(parameters[0][self.PARAM_RRC_STATUS_CHANGE_TIMER])
-            self.simulator.set_lte_rrc_state_change_timer(True, timer)
-            self.rrc_sc_timer = timer
-
-    def calibrated_downlink_rx_power(self, bts_config, rsrp):
-        """ LTE simulation overrides this method so that it can convert from
-        RSRP to total signal power transmitted from the basestation.
-
-        Args:
-            bts_config: the current configuration at the base station
-            rsrp: desired rsrp, contained in a key value pair
-        """
-
-        power = self.rsrp_to_signal_power(rsrp, bts_config)
-
-        self.log.info(
-            "Setting downlink signal level to {} RSRP ({} dBm)".format(
-                rsrp, power))
-
-        # Use parent method to calculate signal level
-        return super().calibrated_downlink_rx_power(bts_config, power)
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """ Computes downlink path loss and returns the calibration value.
-
-        See base class implementation for details.
-
-        Args:
-            rat: ignored, replaced by 'lteRsrp'
-            power_units_conversion_func: ignored, replaced by
-                self.rsrp_to_signal_power
-
-        Returns:
-            Downlink calibration value and measured DL power. Note that the
-            phone only reports RSRP of the primary chain
-        """
-
-        return super().downlink_calibration(
-            rat='lteDbm',
-            power_units_conversion_func=self.rsrp_to_signal_power)
-
-    def rsrp_to_signal_power(self, rsrp, bts_config):
-        """ Converts rsrp to total band signal power
-
-        RSRP is measured per subcarrier, so total band power needs to be
-        multiplied by the number of subcarriers being used.
-
-        Args:
-            rsrp: desired rsrp in dBm
-            bts_config: a base station configuration object
-        Returns:
-            Total band signal power in dBm
-        """
-
-        bandwidth = bts_config.bandwidth
-
-        if bandwidth == 100: # This assumes 273 RBs. TODO: b/229163022
-            power = rsrp + 35.15
-        elif bandwidth == 20:  # 100 RBs
-            power = rsrp + 30.79
-        elif bandwidth == 15:  # 75 RBs
-            power = rsrp + 29.54
-        elif bandwidth == 10:  # 50 RBs
-            power = rsrp + 27.78
-        elif bandwidth == 5:  # 25 RBs
-            power = rsrp + 24.77
-        elif bandwidth == 3:  # 15 RBs
-            power = rsrp + 22.55
-        elif bandwidth == 1.4:  # 6 RBs
-            power = rsrp + 18.57
-        else:
-            raise ValueError("Invalid bandwidth value.")
-
-        return power
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        return sum(
-            self.bts_maximum_downlink_throughtput(self.cell_configs[bts_index])
-            for bts_index in range(self.num_carriers))
-
-    def bts_maximum_downlink_throughtput(self, bts_config):
-        """ Calculates maximum achievable downlink throughput for a single
-        base station from its configuration object.
-
-        Args:
-            bts_config: a base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        if bts_config.mimo_mode == MimoMode.MIMO_1x1:
-            streams = 1
-        elif bts_config.mimo_mode == MimoMode.MIMO_2x2:
-            streams = 2
-        elif bts_config.mimo_mode == MimoMode.MIMO_4x4:
-            streams = 4
-        else:
-            raise ValueError('Unable to calculate maximum downlink throughput '
-                             'because the MIMO mode has not been set.')
-
-        bandwidth = bts_config.bandwidth
-        rb_ratio = bts_config.dl_rbs / TOTAL_RBS_DICTIONARY[bandwidth]
-        mcs = bts_config.dl_mcs
-
-        max_rate_per_stream = None
-
-        tdd_subframe_config = bts_config.dlul_config
-        duplex_mode = bts_config.get_duplex_mode()
-
-        if duplex_mode == DuplexMode.TDD:
-            if bts_config.dl_256_qam_enabled:
-                if mcs == 27:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG3'][tdd_subframe_config][bandwidth][
-                                'DL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG2'][tdd_subframe_config][bandwidth][
-                                'DL']
-            else:
-                if mcs == 28:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG4'][tdd_subframe_config][bandwidth][
-                                'DL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG1'][tdd_subframe_config][bandwidth][
-                                'DL']
-
-        elif duplex_mode == DuplexMode.FDD:
-            if (not bts_config.dl_256_qam_enabled and bts_config.mac_padding
-                    and mcs == 28):
-                max_rate_per_stream = {
-                    3: 9.96,
-                    5: 17.0,
-                    10: 34.7,
-                    15: 52.7,
-                    20: 72.2
-                }.get(bandwidth, None)
-            if (not bts_config.dl_256_qam_enabled and bts_config.mac_padding
-                    and mcs == 27):
-                max_rate_per_stream = {
-                    1.4: 2.94,
-                }.get(bandwidth, None)
-            elif (not bts_config.dl_256_qam_enabled
-                  and not bts_config.mac_padding and mcs == 27):
-                max_rate_per_stream = {
-                    1.4: 2.87,
-                    3: 7.7,
-                    5: 14.4,
-                    10: 28.7,
-                    15: 42.3,
-                    20: 57.7
-                }.get(bandwidth, None)
-            elif bts_config.dl_256_qam_enabled and bts_config.mac_padding and mcs == 27:
-                max_rate_per_stream = {
-                    3: 13.2,
-                    5: 22.9,
-                    10: 46.3,
-                    15: 72.2,
-                    20: 93.9
-                }.get(bandwidth, None)
-            elif bts_config.dl_256_qam_enabled and bts_config.mac_padding and mcs == 26:
-                max_rate_per_stream = {
-                    1.4: 3.96,
-                }.get(bandwidth, None)
-            elif (bts_config.dl_256_qam_enabled and not bts_config.mac_padding
-                  and mcs == 27):
-                max_rate_per_stream = {
-                    3: 11.3,
-                    5: 19.8,
-                    10: 44.1,
-                    15: 68.1,
-                    20: 88.4
-                }.get(bandwidth, None)
-            elif (bts_config.dl_256_qam_enabled and not bts_config.mac_padding
-                  and mcs == 26):
-                max_rate_per_stream = {
-                    1.4: 3.96,
-                }.get(bandwidth, None)
-
-        if not max_rate_per_stream:
-            raise NotImplementedError(
-                "The calculation for MAC padding = {} "
-                "and mcs = {} is not implemented.".format(
-                    "FULLALLOCATION" if bts_config.mac_padding else "OFF",
-                    mcs))
-
-        return max_rate_per_stream * streams * rb_ratio
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable uplink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        return self.bts_maximum_uplink_throughtput(self.cell_configs[0])
-
-    def bts_maximum_uplink_throughtput(self, bts_config):
-        """ Calculates maximum achievable uplink throughput for the selected
-        basestation from its configuration object.
-
-        Args:
-            bts_config: an LTE base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        bandwidth = bts_config.bandwidth
-        rb_ratio = bts_config.ul_rbs / TOTAL_RBS_DICTIONARY[bandwidth]
-        mcs = bts_config.ul_mcs
-
-        max_rate_per_stream = None
-
-        tdd_subframe_config = bts_config.dlul_config
-        duplex_mode = bts_config.get_duplex_mode()
-
-        if duplex_mode == DuplexMode.TDD:
-            if bts_config.ul_64_qam_enabled:
-                if mcs == 28:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG3'][tdd_subframe_config][bandwidth][
-                                'UL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG2'][tdd_subframe_config][bandwidth][
-                                'UL']
-            else:
-                if mcs == 23:
-                    if bts_config.mac_padding:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG4'][tdd_subframe_config][bandwidth][
-                                'UL']
-                    else:
-                        max_rate_per_stream = self.tdd_config_tput_lut_dict[
-                            'TDD_CONFIG1'][tdd_subframe_config][bandwidth][
-                                'UL']
-
-        elif duplex_mode == DuplexMode.FDD:
-            if mcs == 23 and not bts_config.ul_64_qam_enabled:
-                max_rate_per_stream = {
-                    1.4: 2.85,
-                    3: 7.18,
-                    5: 12.1,
-                    10: 24.5,
-                    15: 36.5,
-                    20: 49.1
-                }.get(bandwidth, None)
-            elif mcs == 28 and bts_config.ul_64_qam_enabled:
-                max_rate_per_stream = {
-                    1.4: 4.2,
-                    3: 10.5,
-                    5: 17.2,
-                    10: 35.3,
-                    15: 53.0,
-                    20: 72.6
-                }.get(bandwidth, None)
-
-        if not max_rate_per_stream:
-            raise NotImplementedError(
-                "The calculation fir mcs = {} is not implemented.".format(
-                    "FULLALLOCATION" if bts_config.mac_padding else "OFF",
-                    mcs))
-
-        return max_rate_per_stream * rb_ratio
-
-    def calibrate(self, band):
-        """ Calculates UL and DL path loss if it wasn't done before
-
-        Before running the base class implementation, configure the base station
-        to only use one downlink antenna with maximum bandwidth.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-
-        # Save initial values in a configuration object so they can be restored
-        restore_config = LteCellConfig(self.log)
-        restore_config.mimo_mode = self.cell_configs[0].mimo_mode
-        restore_config.transmission_mode = \
-            self.cell_configs[0].transmission_mode
-        restore_config.bandwidth = self.cell_configs[0].bandwidth
-
-        # Set up a temporary calibration configuration.
-        temporary_config = LteCellConfig(self.log)
-        temporary_config.mimo_mode = MimoMode.MIMO_1x1
-        temporary_config.transmission_mode = TransmissionMode.TM1
-        temporary_config.bandwidth = max(
-            self.allowed_bandwidth_dictionary[int(band)])
-        self.simulator.configure_bts(temporary_config)
-        self.cell_configs[0].incorporate(temporary_config)
-
-        super().calibrate(band)
-
-        # Restore values as they were before changing them for calibration.
-        self.simulator.configure_bts(restore_config)
-        self.cell_configs[0].incorporate(restore_config)
-
-    def start_traffic_for_calibration(self):
-        """ If MAC padding is enabled, there is no need to start IP traffic. """
-        if not self.cell_configs[0].mac_padding:
-            super().start_traffic_for_calibration()
-
-    def stop_traffic_for_calibration(self):
-        """ If MAC padding is enabled, IP traffic wasn't started. """
-        if not self.cell_configs[0].mac_padding:
-            super().stop_traffic_for_calibration()
-
-    def get_measured_ul_power(self, samples=5, wait_after_sample=3):
-        """ Calculates UL power using measurements from the callbox and the
-        calibration data.
-
-        Args:
-            samples: the numble of samples to average
-            wait_after_sample: time in seconds to wait in between samples
-
-        Returns:
-            the ul power at the UE antenna ports in dBs
-        """
-        ul_power_sum = 0
-        samples_left = samples
-
-        while samples_left > 0:
-            ul_power_sum += self.simulator.get_measured_pusch_power()
-            samples_left -= 1
-            time.sleep(wait_after_sample)
-
-        # Got enough samples, return calibrated average
-        if self.dl_path_loss:
-            return ul_power_sum / samples + self.ul_path_loss
-        else:
-            self.log.warning('No uplink calibration data. Returning '
-                             'uncalibrated values as measured by the '
-                             'callbox.')
-            return ul_power_sum / samples
-
-    def start(self):
-        """ Set the signal level for the secondary carriers, as the base class
-        implementation of this method will only set up downlink power for the
-        primary carrier component.
-
-        After that, attaches the secondary carriers."""
-
-        super().start()
-
-        if self.num_carriers > 1:
-            if self.sim_dl_power:
-                self.log.info('Setting DL power for secondary carriers.')
-
-                for bts_index in range(1, self.num_carriers):
-                    new_config = LteCellConfig(self.log)
-                    new_config.output_power = self.calibrated_downlink_rx_power(
-                        self.cell_configs[bts_index], self.sim_dl_power)
-                    self.simulator.configure_bts(new_config, bts_index)
-                    self.cell_configs[bts_index].incorporate(new_config)
-
-            self.simulator.lte_attach_secondary_carriers(self.freq_bands)
diff --git a/src/antlion/controllers/cellular_lib/NrCellConfig.py b/src/antlion/controllers/cellular_lib/NrCellConfig.py
deleted file mode 100644
index ff72dae..0000000
--- a/src/antlion/controllers/cellular_lib/NrCellConfig.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import antlion.controllers.cellular_lib.BaseCellConfig as base_cell
-import antlion.controllers.cellular_lib.LteSimulation as lte_sim
-
-
-class NrCellConfig(base_cell.BaseCellConfig):
-    """ NR cell configuration class.
-
-    Attributes:
-        band: an integer indicating the required band number.
-        bandwidth: a integer indicating the required channel bandwidth
-    """
-
-    PARAM_BAND = 'band'
-    PARAM_BW = 'bw'
-    PARAM_DL_MCS = 'dlmcs'
-    PARAM_DL_RBS = 'dl_rbs'
-    PARAM_PADDING = 'mac_padding'
-    PARAM_MIMO = 'mimo'
-    PARAM_NRARFCN = 'nr_arfcn'
-    PARAM_SCHEDULING = "scheduling"
-    PARAM_SCHEDULING_DYNAMIC = "dynamic"
-    PARAM_SCHEDULING_STATIC = "static"
-    PARAM_UL_MCS = 'ulmcs'
-    PARAM_UL_RBS = 'ul_rbs'
-
-    def __init__(self, log):
-        """ Initialize the base station config by setting all its
-        parameters to None.
-        Args:
-            log: logger object.
-        """
-        super().__init__(log)
-        self.band = None
-        self.bandwidth = None
-        self.dl_rbs = None
-        self.ul_rbs = None
-        self.dl_mcs = None
-        self.ul_mcs = None
-        self.mac_padding = None
-        self.mimo_mode = None
-        self.nr_arfcn = None
-
-    def configure(self, parameters):
-        """ Configures an NR cell using a dictionary of parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-        nr_band = parameters[self.PARAM_BAND]
-        if nr_band[0] == 'n':
-            nr_band = nr_band[1:]
-        self.band = nr_band
-
-        if self.PARAM_NRARFCN in parameters:
-            self.nr_arfcn = int(parameters[self.PARAM_NRARFCN])
-
-        if self.PARAM_BW not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter {} with an "
-                "int value (to indicate 1.4 MHz use 14).".format(
-                    self.PARAM_BW))
-        bw = float(parameters[self.PARAM_BW])
-
-        if abs(bw - 14) < 0.00000000001:
-            bw = 1.4
-
-        self.bandwidth = bw
-
-        # Setup mimo mode
-        if self.PARAM_MIMO not in parameters:
-            raise ValueError(
-                "The config dictionary must include parameter '{}' with the "
-                "mimo mode.".format(self.PARAM_MIMO))
-
-        for mimo_mode in lte_sim.MimoMode:
-            if parameters[self.PARAM_MIMO] == mimo_mode.value:
-                self.mimo_mode = mimo_mode
-                break
-        else:
-            raise ValueError("The value of {} must be one of the following:"
-                             "1x1, 2x2 or 4x4.".format(self.PARAM_MIMO))
-
-        if self.PARAM_SCHEDULING not in parameters:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-            self.log.warning(
-                "The test config does not include the '{}' key. Setting to "
-                "static by default.".format(self.PARAM_SCHEDULING))
-        elif parameters[
-                self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_DYNAMIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.DYNAMIC
-        elif parameters[self.PARAM_SCHEDULING] == self.PARAM_SCHEDULING_STATIC:
-            self.scheduling_mode = lte_sim.SchedulingMode.STATIC
-        else:
-            raise ValueError("Key '{}' must have a value of "
-                             "'dynamic' or 'static'.".format(
-                                 self.PARAM_SCHEDULING))
-
-        if self.scheduling_mode == lte_sim.SchedulingMode.STATIC:
-
-            if self.PARAM_PADDING not in parameters:
-                self.log.warning(
-                    "The '{}' parameter was not set. Enabling MAC padding by "
-                    "default.".format(self.PARAM_PADDING))
-                self.mac_padding = True
-
-            if self.PARAM_DL_MCS in parameters:
-                self.dl_mcs = int(parameters[self.PARAM_DL_MCS])
-
-            if self.PARAM_UL_MCS in parameters:
-                self.ul_mcs = int(parameters[self.PARAM_UL_MCS])
-
-            # Temproraily setting: set 273 for bandwidth of 100 MHz
-            self.dl_rbs = 273
-            self.ul_rbs = 273
-
-    def __str__(self):
-        return str(vars(self))
diff --git a/src/antlion/controllers/cellular_lib/OWNERS b/src/antlion/controllers/cellular_lib/OWNERS
deleted file mode 100644
index f88a96c..0000000
--- a/src/antlion/controllers/cellular_lib/OWNERS
+++ /dev/null
@@ -1,8 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
-
-per-file PresetSimulation.py = hmtuan@google.com
-per-file PresetSimulation.py = harjani@google.com
-per-file PresetSimulation.py = jethier@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/cellular_lib/PresetSimulation.py b/src/antlion/controllers/cellular_lib/PresetSimulation.py
deleted file mode 100644
index f1f649b..0000000
--- a/src/antlion/controllers/cellular_lib/PresetSimulation.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class PresetSimulation(BaseSimulation):
-    """5G preset simulation.
-
-    The simulation will be configed by importing SCPI config file
-    instead of individually set params.
-    """
-
-    # Keys to obtain settings from the test_config dictionary.
-    KEY_CELL_INFO = "cell_info"
-    KEY_SCPI_FILE_NAME = "scpi_file"
-
-    def __init__(self,
-                 simulator,
-                 log,
-                 dut,
-                 test_config,
-                 calibration_table,
-                 nr_mode=None):
-        """Initializes the simulator for 5G preset simulation.
-
-        Args:
-            simulator: a cellular simulator controller.
-            log: a logger handle.
-            dut: a device handler implementing BaseCellularDut.
-            test_config: test configuration obtained from the config file.
-            calibration_table: a dictionary containing path losses
-                for different bands.
-        """
-
-        super().__init__(simulator, log, dut, test_config, calibration_table,
-                         nr_mode)
-
-        # Set to KeySight APN
-        log.info('Configuring APN.')
-        self.dut.set_apn('Keysight', 'Keysight')
-        self.num_carriers = None
-
-        # Enable roaming on the phone
-        self.dut.toggle_data_roaming(True)
-
-        # Force device to LTE only so that it connects faster
-        try:
-            self.dut.set_preferred_network_type(
-                BaseCellularDut.PreferredNetworkType.NR_LTE)
-        except Exception as e:
-            # If this fails the test should be able to run anyways, even if it
-            # takes longer to find the cell.
-            self.log.warning('Setting preferred RAT failed: ' + str(e))
-
-    def setup_simulator(self):
-        """Do initial configuration in the simulator. """
-        self.log.info('This simulation does not require initial setup.')
-
-    def configure(self, parameters):
-        """Configures simulation by importing scpi file.
-
-        A pre-made SCPI file include all the essential configuration
-        for the simulation is imported by send SCPI import command
-        to the callbox.
-
-        Args:
-            parameters: a configuration dictionary which includes scpi file path
-                if there is only one carrier, a list if there are multiple cells.
-        """
-        scpi_file = parameters[0][self.KEY_SCPI_FILE_NAME]
-        cell_infos = parameters[0][self.KEY_CELL_INFO]
-
-        self.log.info('Configure test scenario with\n' +
-                      f' SCPI config file: {scpi_file}\n' +
-                      f' cell info: {cell_infos}')
-
-        self.simulator.import_configuration(scpi_file)
-        self.simulator.set_cell_info(cell_infos)
-
-    def start(self):
-        """Start simulation.
-
-        Waiting for the DUT to connect to the callbox.
-
-        Raise:
-            RuntimeError: simulation fail to start
-                due to unable to connect dut and cells.
-        """
-
-        try:
-            self.attach()
-        except Exception as exc:
-            raise RuntimeError('Simulation fail to start.') from exc
-
-    def attach(self):
-        """Attach UE to the callbox.
-
-        Toggle airplane mode on-off and wait for a specified timeout,
-        repeat until the UE connect to the callbox.
-
-        Raise:
-            RuntimeError: attaching fail
-                due to unable to connect dut and cells.
-        """
-        try:
-            self.simulator.wait_until_attached(self.dut, self.attach_timeout,
-                                               self.attach_retries)
-        except Exception as exc:
-            raise RuntimeError('Could not attach to base station.') from exc
-
-    def calibrated_downlink_rx_power(self, bts_config, rsrp):
-        """Convert RSRP to total signal power from the basestation.
-
-        Args:
-            bts_config: the current configuration at the base station
-            rsrp: desired rsrp, contained in a key value pair
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def downlink_calibration(self, rat=None, power_units_conversion_func=None):
-        """Computes downlink path loss and returns the calibration value.
-
-        See base class implementation for details.
-
-        Args:
-            rat: ignored, replaced by 'lteRsrp'.
-            power_units_conversion_func: ignored, replaced by
-                self.rsrp_to_signal_power.
-
-        Returns:
-            Downlink calibration value and measured DL power. Note that the
-            phone only reports RSRP of the primary chain
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def rsrp_to_signal_power(self, rsrp, bts_config):
-        """Converts rsrp to total band signal power
-
-        RSRP is measured per subcarrier, so total band power needs to be
-        multiplied by the number of subcarriers being used.
-
-        Args:
-            rsrp: desired rsrp in dBm.
-            bts_config: a base station configuration object.
-
-        Returns:
-            Total band signal power in dBm
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def maximum_downlink_throughput(self):
-        """Calculates maximum achievable downlink throughput in.
-
-        The calculation is based on the current simulation state
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def bts_maximum_downlink_throughtput(self, bts_config):
-        """Calculates maximum achievable downlink throughput for a single
-
-        base station from its configuration object.
-
-        Args:
-            bts_config: a base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def maximum_uplink_throughput(self):
-        """Calculates maximum achievable uplink throughput.
-
-        Returns:
-            Maximum throughput in mbps.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def bts_maximum_uplink_throughtput(self, bts_config):
-        """Calculates maximum achievable uplink throughput
-
-        The calculation is for selected basestation
-        from its configuration object.
-        Args:
-            bts_config: an LTE base station configuration object.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def calibrate(self, band):
-        """Calculates UL and DL path loss if it wasn't done before
-
-        Before running the base class implementation, configure the base station
-        to only use one downlink antenna with maximum bandwidth.
-
-        Args:
-            band: the band that is currently being calibrated.
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def start_traffic_for_calibration(self):
-        """If MAC padding is enabled, there is no need to start IP traffic. """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def stop_traffic_for_calibration(self):
-        """If MAC padding is enabled, IP traffic wasn't started. """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
-
-    def get_measured_ul_power(self, samples=5, wait_after_sample=3):
-        """Calculates UL power.
-
-        The calculation is based on measurements from the callbox
-        and the calibration data.
-        Args:
-            samples: the numble of samples to average
-            wait_after_sample: time in seconds to wait in between samples
-
-        Returns:
-            the ul power at the UE antenna ports in dBs
-        """
-        raise NotImplementedError(
-            'This simulation mode does not support this configuration option')
diff --git a/src/antlion/controllers/cellular_lib/UmtsSimulation.py b/src/antlion/controllers/cellular_lib/UmtsSimulation.py
deleted file mode 100644
index 316186f..0000000
--- a/src/antlion/controllers/cellular_lib/UmtsSimulation.py
+++ /dev/null
@@ -1,275 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ntpath
-import time
-
-from antlion.controllers.anritsu_lib import md8475_cellular_simulator as anritsusim
-from antlion.controllers.anritsu_lib.md8475a import BtsNumber
-from antlion.controllers.anritsu_lib.md8475a import BtsPacketRate
-from antlion.controllers.cellular_lib.BaseSimulation import BaseSimulation
-from antlion.controllers.cellular_lib import BaseCellularDut
-
-
-class UmtsSimulation(BaseSimulation):
-    """ Single base station simulation. """
-
-    # Simulation config files in the callbox computer.
-    # These should be replaced in the future by setting up
-    # the same configuration manually.
-
-    UMTS_BASIC_SIM_FILE = 'SIM_default_WCDMA.wnssp'
-
-    UMTS_R99_CELL_FILE = 'CELL_WCDMA_R99_config.wnscp'
-
-    UMTS_R7_CELL_FILE = 'CELL_WCDMA_R7_config.wnscp'
-
-    UMTS_R8_CELL_FILE = 'CELL_WCDMA_R8_config.wnscp'
-
-    # Configuration dictionary keys
-    PARAM_RELEASE_VERSION = "r"
-    PARAM_RELEASE_VERSION_99 = "99"
-    PARAM_RELEASE_VERSION_8 = "8"
-    PARAM_RELEASE_VERSION_7 = "7"
-    PARAM_BAND = "band"
-    PARAM_RRC_STATUS_CHANGE_TIMER = "rrcstatuschangetimer"
-
-    # Units in which signal level is defined in DOWNLINK_SIGNAL_LEVEL_DICTIONARY
-    DOWNLINK_SIGNAL_LEVEL_UNITS = "RSCP"
-
-    # RSCP signal levels thresholds (as reported by Android). Units are dBm
-    # Using LTE thresholds + 24 dB to have equivalent SPD
-    # 24 dB comes from 10 * log10(3.84 MHz / 15 KHz)
-
-    DOWNLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'excellent': -51,
-        'high': -76,
-        'medium': -86,
-        'weak': -96
-    }
-
-    # Transmitted output power for the phone
-    # Stronger Tx power means that the signal received by the BTS is weaker
-    # Units are dBm
-
-    UPLINK_SIGNAL_LEVEL_DICTIONARY = {
-        'low': -20,
-        'medium': 8,
-        'high': 15,
-        'max': 23
-    }
-
-    # Converts packet rate to the throughput that can be actually obtained in
-    # Mbits/s
-
-    packet_rate_to_dl_throughput = {
-        BtsPacketRate.WCDMA_DL384K_UL64K: 0.362,
-        BtsPacketRate.WCDMA_DL21_6M_UL5_76M: 18.5,
-        BtsPacketRate.WCDMA_DL43_2M_UL5_76M: 36.9
-    }
-
-    packet_rate_to_ul_throughput = {
-        BtsPacketRate.WCDMA_DL384K_UL64K: 0.0601,
-        BtsPacketRate.WCDMA_DL21_6M_UL5_76M: 5.25,
-        BtsPacketRate.WCDMA_DL43_2M_UL5_76M: 5.25
-    }
-
-    def __init__(self, simulator, log, dut, test_config, calibration_table):
-        """ Initializes the cellular simulator for a UMTS simulation.
-
-        Loads a simple UMTS simulation environment with 1 basestation. It also
-        creates the BTS handle so we can change the parameters as desired.
-
-        Args:
-            simulator: a cellular simulator controller
-            log: a logger handle
-            dut: a device handler implementing BaseCellularDut
-            test_config: test configuration obtained from the config file
-            calibration_table: a dictionary containing path losses for
-                different bands.
-
-        """
-        # The UMTS simulation relies on the cellular simulator to be a MD8475
-        if not isinstance(self.simulator, anritsusim.MD8475CellularSimulator):
-            raise ValueError('The UMTS simulation relies on the simulator to '
-                             'be an Anritsu MD8475 A/B instrument.')
-
-        # The Anritsu controller needs to be unwrapped before calling
-        # super().__init__ because setup_simulator() requires self.anritsu and
-        # will be called during the parent class initialization.
-        self.anritsu = self.simulator.anritsu
-        self.bts1 = self.anritsu.get_BTS(BtsNumber.BTS1)
-
-        super().__init__(simulator, log, dut, test_config, calibration_table)
-
-        self.dut.set_preferred_network_type(
-            BaseCellularDut.PreferredNetworkType.WCDMA_ONLY)
-
-        self.release_version = None
-        self.packet_rate = None
-
-    def setup_simulator(self):
-        """ Do initial configuration in the simulator. """
-
-        # Load callbox config files
-        callbox_config_path = self.CALLBOX_PATH_FORMAT_STR.format(
-            self.anritsu._md8475_version)
-
-        self.anritsu.load_simulation_paramfile(
-            ntpath.join(callbox_config_path, self.UMTS_BASIC_SIM_FILE))
-
-        # Start simulation if it wasn't started
-        self.anritsu.start_simulation()
-
-    def configure(self, parameters):
-        """ Configures simulation using a dictionary of parameters.
-
-        Processes UMTS configuration parameters.
-
-        Args:
-            parameters: a configuration dictionary
-        """
-        super().configure(parameters)
-
-        # Setup band
-        if self.PARAM_BAND not in parameters:
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with "
-                "the required band number.".format(self.PARAM_BAND))
-
-        self.set_band(self.bts1, parameters[self.PARAM_BAND])
-        self.load_pathloss_if_required()
-
-        # Setup release version
-        if (self.PARAM_RELEASE_VERSION not in parameters
-                or parameters[self.PARAM_RELEASE_VERSION] not in [
-                    self.PARAM_RELEASE_VERSION_7, self.PARAM_RELEASE_VERSION_8,
-                    self.PARAM_RELEASE_VERSION_99
-                ]):
-            raise ValueError(
-                "The configuration dictionary must include a key '{}' with a "
-                "valid release version.".format(self.PARAM_RELEASE_VERSION))
-
-        self.set_release_version(self.bts1,
-                                 parameters[self.PARAM_RELEASE_VERSION])
-
-        # Setup W-CDMA RRC status change and CELL_DCH timer for idle test case
-        if self.PARAM_RRC_STATUS_CHANGE_TIMER not in parameters:
-            self.log.info(
-                "The config dictionary does not include a '{}' key. Disabled "
-                "by default.".format(self.PARAM_RRC_STATUS_CHANGE_TIMER))
-            self.anritsu.set_umts_rrc_status_change(False)
-        else:
-            self.rrc_sc_timer = int(
-                parameters[self.PARAM_RRC_STATUS_CHANGE_TIMER])
-            self.anritsu.set_umts_rrc_status_change(True)
-            self.anritsu.set_umts_dch_stat_timer(self.rrc_sc_timer)
-
-    def set_release_version(self, bts, release_version):
-        """ Sets the release version.
-
-        Loads the cell parameter file matching the requested release version.
-        Does nothing is release version is already the one requested.
-
-        """
-
-        if release_version == self.release_version:
-            self.log.info(
-                "Release version is already {}.".format(release_version))
-            return
-        if release_version == self.PARAM_RELEASE_VERSION_99:
-
-            cell_parameter_file = self.UMTS_R99_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL384K_UL64K
-
-        elif release_version == self.PARAM_RELEASE_VERSION_7:
-
-            cell_parameter_file = self.UMTS_R7_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL21_6M_UL5_76M
-
-        elif release_version == self.PARAM_RELEASE_VERSION_8:
-
-            cell_parameter_file = self.UMTS_R8_CELL_FILE
-            self.packet_rate = BtsPacketRate.WCDMA_DL43_2M_UL5_76M
-
-        else:
-            raise ValueError("Invalid UMTS release version number.")
-
-        self.anritsu.load_cell_paramfile(
-            ntpath.join(self.callbox_config_path, cell_parameter_file))
-
-        self.release_version = release_version
-
-        # Loading a cell parameter file stops the simulation
-        self.start()
-
-        bts.packet_rate = self.packet_rate
-
-    def maximum_downlink_throughput(self):
-        """ Calculates maximum achievable downlink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        if self.packet_rate not in self.packet_rate_to_dl_throughput:
-            raise NotImplementedError("Packet rate not contained in the "
-                                      "throughput dictionary.")
-        return self.packet_rate_to_dl_throughput[self.packet_rate]
-
-    def maximum_uplink_throughput(self):
-        """ Calculates maximum achievable uplink throughput in the current
-            simulation state.
-
-        Returns:
-            Maximum throughput in mbps.
-
-        """
-
-        if self.packet_rate not in self.packet_rate_to_ul_throughput:
-            raise NotImplementedError("Packet rate not contained in the "
-                                      "throughput dictionary.")
-        return self.packet_rate_to_ul_throughput[self.packet_rate]
-
-    def set_downlink_rx_power(self, bts, signal_level):
-        """ Starts IP data traffic while setting downlink power.
-
-        This is only necessary for UMTS for unclear reasons. b/139026916 """
-
-        # Starts IP traffic while changing this setting to force the UE to be
-        # in Communication state, as UL power cannot be set in Idle state
-        self.start_traffic_for_calibration()
-
-        # Wait until it goes to communication state
-        self.anritsu.wait_for_communication_state()
-
-        super().set_downlink_rx_power(bts, signal_level)
-
-        # Stop IP traffic after setting the signal level
-        self.stop_traffic_for_calibration()
-
-    def set_band(self, bts, band):
-        """ Sets the band used for communication.
-
-        Args:
-            bts: basestation handle
-            band: desired band
-        """
-
-        bts.band = band
-        time.sleep(5)  # It takes some time to propagate the new band
diff --git a/src/antlion/controllers/cellular_lib/__init__.py b/src/antlion/controllers/cellular_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/cellular_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/cellular_simulator.py b/src/antlion/controllers/cellular_simulator.py
deleted file mode 100644
index 30eeba3..0000000
--- a/src/antlion/controllers/cellular_simulator.py
+++ /dev/null
@@ -1,489 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion import logger
-from antlion.controllers import cellular_lib
-
-
-class AbstractCellularSimulator:
-    """ A generic cellular simulator controller class that can be derived to
-    implement equipment specific classes and allows the tests to be implemented
-    without depending on a singular instrument model.
-
-    This class defines the interface that every cellular simulator controller
-    needs to implement and shouldn't be instantiated by itself. """
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = None
-
-    # The maximum power that the equipment is able to transmit
-    MAX_DL_POWER = None
-
-    def __init__(self):
-        """ Initializes the cellular simulator. """
-        self.log = logger.create_tagged_trace_logger('CellularSimulator')
-        self.num_carriers = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        raise NotImplementedError()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        raise NotImplementedError()
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated CA combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        raise NotImplementedError()
-
-    def configure_bts(self, config, bts_index=0):
-        """ Commands the equipment to setup a base station with the required
-        configuration. This method applies configurations that are common to all
-        RATs.
-
-        Args:
-            config: a BaseSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        self.log.info('The config for {} is {}'.format(bts_index, str(config)))
-
-        if config.output_power:
-            self.set_output_power(bts_index, config.output_power)
-
-        if config.input_power:
-            self.set_input_power(bts_index, config.input_power)
-
-        if isinstance(config, cellular_lib.LteCellConfig.LteCellConfig):
-            self.configure_lte_bts(config, bts_index)
-
-        if isinstance(config, cellular_lib.NrCellConfig.NrCellConfig):
-            self.configure_nr_bts(config, bts_index)
-
-    def configure_lte_bts(self, config, bts_index=0):
-        """ Commands the equipment to setup an LTE base station with the
-        required configuration.
-
-        Args:
-            config: an LteSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        if config.band:
-            self.set_band(bts_index, config.band)
-
-        if config.dlul_config:
-            self.set_tdd_config(bts_index, config.dlul_config)
-
-        if config.ssf_config:
-            self.set_ssf_config(bts_index, config.ssf_config)
-
-        if config.bandwidth:
-            self.set_bandwidth(bts_index, config.bandwidth)
-
-        if config.dl_channel:
-            self.set_downlink_channel_number(bts_index, config.dl_channel)
-
-        if config.mimo_mode:
-            self.set_mimo_mode(bts_index, config.mimo_mode)
-
-        if config.transmission_mode:
-            self.set_transmission_mode(bts_index, config.transmission_mode)
-
-        # Modulation order should be set before set_scheduling_mode being
-        # called.
-        if config.dl_256_qam_enabled is not None:
-            self.set_dl_256_qam_enabled(bts_index, config.dl_256_qam_enabled)
-
-        if config.ul_64_qam_enabled is not None:
-            self.set_ul_64_qam_enabled(bts_index, config.ul_64_qam_enabled)
-
-        if config.scheduling_mode:
-
-            if (config.scheduling_mode ==
-                    cellular_lib.LteSimulation.SchedulingMode.STATIC
-                    and not (config.dl_rbs and config.ul_rbs and config.dl_mcs
-                             and config.ul_mcs)):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            # If scheduling mode is set to Dynamic, the RB and MCS parameters
-            # will be ignored by set_scheduling_mode.
-            self.set_scheduling_mode(bts_index, config.scheduling_mode,
-                                     config.dl_mcs, config.ul_mcs,
-                                     config.dl_rbs, config.ul_rbs)
-
-        # This variable stores a boolean value so the following is needed to
-        # differentiate False from None
-        if config.mac_padding is not None:
-            self.set_mac_padding(bts_index, config.mac_padding)
-
-        if config.cfi:
-            self.set_cfi(bts_index, config.cfi)
-
-        if config.paging_cycle:
-            self.set_paging_cycle(bts_index, config.paging_cycle)
-
-        if config.phich:
-            self.set_phich_resource(bts_index, config.phich)
-
-        if config.drx_connected_mode:
-            self.set_drx_connected_mode(bts_index, config.drx_connected_mode)
-
-            if config.drx_on_duration_timer:
-                self.set_drx_on_duration_timer(bts_index,
-                                               config.drx_on_duration_timer)
-
-            if config.drx_inactivity_timer:
-                self.set_drx_inactivity_timer(bts_index,
-                                              config.drx_inactivity_timer)
-
-            if config.drx_retransmission_timer:
-                self.set_drx_retransmission_timer(
-                    bts_index, config.drx_retransmission_timer)
-
-            if config.drx_long_cycle:
-                self.set_drx_long_cycle(bts_index, config.drx_long_cycle)
-
-            if config.drx_long_cycle_offset is not None:
-                self.set_drx_long_cycle_offset(bts_index,
-                                               config.drx_long_cycle_offset)
-
-    def configure_nr_bts(self, config, bts_index=1):
-        """ Commands the equipment to setup an LTE base station with the
-        required configuration.
-
-        Args:
-            config: an LteSimulation.BtsConfig object.
-            bts_index: the base station number.
-        """
-        if config.band:
-            self.set_band(bts_index, config.band)
-
-        if config.nr_arfcn:
-            self.set_downlink_channel_number(bts_index, config.nr_arfcn)
-
-        if config.bandwidth:
-            self.set_bandwidth(bts_index, config.bandwidth)
-
-        if config.mimo_mode:
-            self.set_mimo_mode(bts_index, config.mimo_mode)
-
-        if config.scheduling_mode:
-
-            if (config.scheduling_mode ==
-                    cellular_lib.LteSimulation.SchedulingMode.STATIC
-                    and not (config.dl_rbs and config.ul_rbs and config.dl_mcs
-                             and config.ul_mcs)):
-                raise ValueError('When the scheduling mode is set to manual, '
-                                 'the RB and MCS parameters are required.')
-
-            # If scheduling mode is set to Dynamic, the RB and MCS parameters
-            # will be ignored by set_scheduling_mode.
-            self.set_scheduling_mode(bts_index, config.scheduling_mode,
-                                     config.dl_mcs, config.ul_mcs,
-                                     config.dl_rbs, config.ul_rbs)
-        if config.mac_padding is not None:
-            self.set_mac_padding(bts_index, config.mac_padding)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        raise NotImplementedError()
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        raise NotImplementedError()
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        raise NotImplementedError()
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        raise NotImplementedError()
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number
-        """
-        raise NotImplementedError()
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        raise NotImplementedError()
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        raise NotImplementedError()
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        raise NotImplementedError()
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        raise NotImplementedError()
-
-    def set_transmission_mode(self, bts_index, transmission_mode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            transmission_mode: the new transmission mode
-        """
-        raise NotImplementedError()
-
-    def set_scheduling_mode(self, bts_index, scheduling_mode, mcs_dl, mcs_ul,
-                            nrb_dl, nrb_ul):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            scheduling_mode: the new scheduling mode
-            mcs_dl: Downlink MCS (only for STATIC scheduling)
-            mcs_ul: Uplink MCS (only for STATIC scheduling)
-            nrb_dl: Number of RBs for downlink (only for STATIC scheduling)
-            nrb_ul: Number of RBs for uplink (only for STATIC scheduling)
-        """
-        raise NotImplementedError()
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        raise NotImplementedError()
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        raise NotImplementedError()
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        raise NotImplementedError()
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        raise NotImplementedError()
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        raise NotImplementedError()
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        raise NotImplementedError()
-
-    def set_drx_connected_mode(self, bts_index, active):
-        """ Sets the time interval to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            active: Boolean indicating whether cDRX mode
-                is active
-        """
-        raise NotImplementedError()
-
-    def set_drx_on_duration_timer(self, bts_index, timer):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to wait and check for user data
-                after waking from the DRX cycle
-        """
-        raise NotImplementedError()
-
-    def set_drx_inactivity_timer(self, bts_index, timer):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            bts_index: the base station number
-            timer: The amount of time to wait before entering DRX mode
-        """
-        raise NotImplementedError()
-
-    def set_drx_retransmission_timer(self, bts_index, timer):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            bts_index: the base station number
-            timer: Number of PDCCH subframes to remain active
-
-        """
-        raise NotImplementedError()
-
-    def set_drx_long_cycle(self, bts_index, cycle):
-        """ Sets the amount of subframes representing a DRX long cycle.
-
-        Args:
-            bts_index: the base station number
-            cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-        """
-        raise NotImplementedError()
-
-    def set_drx_long_cycle_offset(self, bts_index, offset):
-        """ Sets the offset used to determine the subframe number
-        to begin the long drx cycle
-
-        Args:
-            bts_index: the base station number
-            offset: Number in range 0 to (long cycle - 1)
-        """
-        raise NotImplementedError()
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError()
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        raise NotImplementedError()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        raise NotImplementedError()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def get_measured_pusch_power(self):
-        """ Queries PUSCH power measured at the callbox.
-
-        Returns:
-            The PUSCH power in the primary input port.
-        """
-        raise NotImplementedError()
-
-
-class CellularSimulatorError(Exception):
-    """ Exceptions thrown when the cellular equipment is unreachable or it
-    returns an error after receiving a command. """
diff --git a/src/antlion/controllers/chameleon_controller.py b/src/antlion/controllers/chameleon_controller.py
deleted file mode 100644
index 7f8ce1a..0000000
--- a/src/antlion/controllers/chameleon_controller.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import xmlrpc.client
-from subprocess import call
-
-from antlion import signals
-
-MOBLY_CONTROLLER_CONFIG_NAME = "ChameleonDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "chameleon_devices"
-
-CHAMELEON_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-CHAMELEON_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-
-audio_bus_endpoints = {
-    'CROS_HEADPHONE': 'Cros device headphone',
-    'CROS_EXTERNAL_MICROPHONE': 'Cros device external microphone',
-    'PERIPHERAL_MICROPHONE': 'Peripheral microphone',
-    'PERIPHERAL_SPEAKER': 'Peripheral speaker',
-    'FPGA_LINEOUT': 'Chameleon FPGA line-out',
-    'FPGA_LINEIN': 'Chameleon FPGA line-in',
-    'BLUETOOTH_OUTPUT': 'Bluetooth module output',
-    'BLUETOOTH_INPUT': 'Bluetooth module input'
-}
-
-
-class ChameleonDeviceError(signals.ControllerError):
-    pass
-
-
-def create(configs):
-    if not configs:
-        raise ChameleonDeviceError(CHAMELEON_DEVICE_EMPTY_CONFIG_MSG)
-    elif not isinstance(configs, list):
-        raise ChameleonDeviceError(CHAMELEON_DEVICE_NOT_LIST_CONFIG_MSG)
-    elif isinstance(configs[0], str):
-        # Configs is a list of IP addresses
-        chameleons = get_instances(configs)
-    return chameleons
-
-
-def destroy(chameleons):
-    for chameleon in chameleons:
-        del chameleon
-
-
-def get_info(chameleons):
-    """Get information on a list of ChameleonDevice objects.
-
-    Args:
-        ads: A list of ChameleonDevice objects.
-
-    Returns:
-        A list of dict, each representing info for ChameleonDevice objects.
-    """
-    device_info = []
-    for chameleon in chameleons:
-        info = {"address": chameleon.address, "port": chameleon.port}
-        device_info.append(info)
-    return device_info
-
-
-def get_instances(ips):
-    """Create ChameleonDevice instances from a list of IPs.
-
-    Args:
-        ips: A list of Chameleon IPs.
-
-    Returns:
-        A list of ChameleonDevice objects.
-    """
-    return [ChameleonDevice(ip) for ip in ips]
-
-
-class ChameleonDevice:
-    """Class representing a Chameleon device.
-
-    Each object of this class represents one Chameleon device in ACTS.
-
-    Attributes:
-        address: The full address to contact the Chameleon device at
-        client: The ServiceProxy of the XMLRPC client.
-        log: A logger object.
-        port: The TCP port number of the Chameleon device.
-    """
-
-    def __init__(self, ip="", port=9992):
-        self.ip = ip
-        self.log = logging.getLogger()
-        self.port = port
-        self.address = "http://{}:{}".format(ip, self.port)
-        try:
-            self.client = xmlrpc.client.ServerProxy(self.address,
-                                                    allow_none=True,
-                                                    verbose=False)
-        except ConnectionRefusedError as err:
-            self.log.exception(
-                "Failed to connect to Chameleon Device at: {}".format(
-                    self.address))
-        self.client.Reset()
-
-    def pull_file(self, chameleon_location, destination):
-        """Pulls a file from the Chameleon device. Usually the raw audio file.
-
-        Args:
-            chameleon_location: The path to the file on the Chameleon device
-            destination: The destination to where to pull it locally.
-        """
-        # TODO: (tturney) implement
-        self.log.error("Definition not yet implemented")
-
-    def start_capturing_audio(self, port_id, has_file=True):
-        """Starts capturing audio.
-
-        Args:
-            port_id: The ID of the audio input port.
-            has_file: True for saving audio data to file. False otherwise.
-        """
-        self.client.StartCapturingAudio(port_id, has_file)
-
-    def stop_capturing_audio(self, port_id):
-        """Stops capturing audio.
-
-        Args:
-            port_id: The ID of the audio input port.
-        Returns:
-            List contain the location of the recorded audio and a dictionary
-            of values relating to the raw audio including: file_type, channel,
-            sample_format, and rate.
-        """
-        return self.client.StopCapturingAudio(port_id)
-
-    def audio_board_connect(self, bus_number, endpoint):
-        """Connects an endpoint to an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-            endpoint: An endpoint defined in audio_bus_endpoints.
-        """
-        self.client.AudioBoardConnect(bus_number, endpoint)
-
-    def audio_board_disconnect(self, bus_number, endpoint):
-        """Connects an endpoint to an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-            endpoint: An endpoint defined in audio_bus_endpoints.
-        """
-        self.client.AudioBoardDisconnect(bus_number, endpoint)
-
-    def audio_board_disable_bluetooth(self):
-        """Disables Bluetooth module on audio board."""
-        self.client.AudioBoardDisableBluetooth()
-
-    def audio_board_clear_routes(self, bus_number):
-        """Clears routes on an audio bus.
-
-        Args:
-            bus_number: 1 or 2 for audio bus 1 or bus 2.
-        """
-        self.client.AudioBoardClearRoutes(bus_number)
-
-    def scp(self, source, destination):
-        """Copies files from the Chameleon device to the host machine.
-
-        Args:
-            source: The file path on the Chameleon board.
-            dest: The file path on the host machine.
-        """
-        cmd = "scp root@{}:/{} {}".format(self.ip, source, destination)
-        try:
-            call(cmd.split(" "))
-        except FileNotFoundError as err:
-            self.log.exception("File not found {}".format(source))
diff --git a/src/antlion/controllers/fastboot.py b/src/antlion/controllers/fastboot.py
deleted file mode 100755
index 0b889fa..0000000
--- a/src/antlion/controllers/fastboot.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.libs.proc import job
-
-from antlion import error
-
-
-class FastbootError(error.ActsError):
-    """Raised when there is an error in fastboot operations."""
-
-    def __init__(self, cmd, stdout, stderr, ret_code):
-        super().__init__()
-        self.cmd = cmd
-        self.stdout = stdout
-        self.stderr = stderr
-        self.ret_code = ret_code
-
-    def __str__(self):
-        return ("Error executing fastboot cmd '%s'. ret: %d, stdout: %s,"
-                " stderr: %s") % (self.cmd, self.ret_code, self.stdout,
-                                  self.stderr)
-
-
-class FastbootProxy():
-    """Proxy class for fastboot.
-
-    For syntactic reasons, the '-' in fastboot commands need to be replaced
-    with '_'. Can directly execute fastboot commands on an object:
-    >> fb = FastbootProxy(<serial>)
-    >> fb.devices() # will return the console output of "fastboot devices".
-    """
-
-    def __init__(self, serial="", ssh_connection=None):
-        self.serial = serial
-        if serial:
-            self.fastboot_str = "fastboot -s {}".format(serial)
-        else:
-            self.fastboot_str = "fastboot"
-        self.ssh_connection = ssh_connection
-
-    def _exec_fastboot_cmd(self,
-                           name,
-                           arg_str,
-                           ignore_status=False,
-                           timeout=60):
-        command = ' '.join((self.fastboot_str, name, arg_str))
-        if self.ssh_connection:
-            result = self.connection.run(command,
-                                         ignore_status=True,
-                                         timeout=timeout)
-        else:
-            result = job.run(command, ignore_status=True, timeout=timeout)
-        ret, out, err = result.exit_status, result.stdout, result.stderr
-        # TODO: This is only a temporary workaround for b/34815412.
-        # fastboot getvar outputs to stderr instead of stdout
-        if "getvar" in command:
-            out = err
-        if ret == 0 or ignore_status:
-            return out
-        else:
-            raise FastbootError(cmd=command,
-                                stdout=out,
-                                stderr=err,
-                                ret_code=ret)
-
-    def args(self, *args, **kwargs):
-        return job.run(' '.join((self.fastboot_str, ) + args), **kwargs).stdout
-
-    def __getattr__(self, name):
-        def fastboot_call(*args, **kwargs):
-            clean_name = name.replace('_', '-')
-            arg_str = ' '.join(str(elem) for elem in args)
-            return self._exec_fastboot_cmd(clean_name, arg_str, **kwargs)
-
-        return fastboot_call
diff --git a/src/antlion/controllers/fuchsia_device.py b/src/antlion/controllers/fuchsia_device.py
deleted file mode 100644
index c0d62c7..0000000
--- a/src/antlion/controllers/fuchsia_device.py
+++ /dev/null
@@ -1,987 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import Optional, List
-import json
-import logging
-import os
-import re
-import subprocess
-import textwrap
-import time
-
-from antlion import context
-from antlion import logger as acts_logger
-from antlion import signals
-from antlion import utils
-from antlion.controllers import pdu
-from antlion.controllers.fuchsia_lib.ffx import FFX
-from antlion.controllers.fuchsia_lib.lib_controllers.netstack_controller import NetstackController
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import WlanController
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import WlanPolicyController
-from antlion.controllers.fuchsia_lib.package_server import PackageServer
-from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import DEFAULT_SSH_PORT, DEFAULT_SSH_PRIVATE_KEY, DEFAULT_SSH_USER, SSHConfig, SSHProvider, FuchsiaSSHError
-from antlion.controllers.fuchsia_lib.utils_lib import flash
-from antlion.libs.proc import job
-from antlion.utils import get_fuchsia_mdns_ipv6_address, get_interface_ip_addresses
-
-MOBLY_CONTROLLER_CONFIG_NAME = "FuchsiaDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "fuchsia_devices"
-
-FUCHSIA_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-FUCHSIA_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-FUCHSIA_DEVICE_INVALID_CONFIG = ("Fuchsia device config must be either a str "
-                                 "or dict. abort! Invalid element %i in %r")
-FUCHSIA_DEVICE_NO_IP_MSG = "No IP address specified, abort!"
-FUCHSIA_COULD_NOT_GET_DESIRED_STATE = "Could not %s %s."
-FUCHSIA_INVALID_CONTROL_STATE = "Invalid control state (%s). abort!"
-
-FUCHSIA_TIME_IN_NANOSECONDS = 1000000000
-
-SL4F_APK_NAME = "com.googlecode.android_scripting"
-DAEMON_INIT_TIMEOUT_SEC = 1
-
-DAEMON_ACTIVATED_STATES = ["running", "start"]
-DAEMON_DEACTIVATED_STATES = ["stop", "stopped"]
-
-FUCHSIA_RECONNECT_AFTER_REBOOT_TIME = 5
-
-CHANNEL_OPEN_TIMEOUT = 5
-
-FUCHSIA_REBOOT_TYPE_SOFT = 'soft'
-FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH = 'flash'
-FUCHSIA_REBOOT_TYPE_HARD = 'hard'
-
-FUCHSIA_DEFAULT_CONNECT_TIMEOUT = 90
-FUCHSIA_DEFAULT_COMMAND_TIMEOUT = 60
-
-FUCHSIA_DEFAULT_CLEAN_UP_COMMAND_TIMEOUT = 15
-
-FUCHSIA_COUNTRY_CODE_TIMEOUT = 15
-FUCHSIA_DEFAULT_COUNTRY_CODE_US = 'US'
-
-MDNS_LOOKUP_RETRY_MAX = 3
-
-VALID_ASSOCIATION_MECHANISMS = {None, 'policy', 'drivers'}
-IP_ADDRESS_TIMEOUT = 15
-
-
-class FuchsiaDeviceError(signals.ControllerError):
-    pass
-
-
-class FuchsiaConfigError(signals.ControllerError):
-    """Incorrect FuchsiaDevice configuration."""
-
-
-def create(configs):
-    if not configs:
-        raise FuchsiaDeviceError(FUCHSIA_DEVICE_EMPTY_CONFIG_MSG)
-    elif not isinstance(configs, list):
-        raise FuchsiaDeviceError(FUCHSIA_DEVICE_NOT_LIST_CONFIG_MSG)
-    for index, config in enumerate(configs):
-        if isinstance(config, str):
-            configs[index] = {"ip": config}
-        elif not isinstance(config, dict):
-            raise FuchsiaDeviceError(FUCHSIA_DEVICE_INVALID_CONFIG %
-                                     (index, configs))
-    return get_instances(configs)
-
-
-def destroy(fds):
-    for fd in fds:
-        fd.clean_up()
-        del fd
-
-
-def get_info(fds):
-    """Get information on a list of FuchsiaDevice objects.
-
-    Args:
-        fds: A list of FuchsiaDevice objects.
-
-    Returns:
-        A list of dict, each representing info for FuchsiaDevice objects.
-    """
-    device_info = []
-    for fd in fds:
-        info = {"ip": fd.ip}
-        device_info.append(info)
-    return device_info
-
-
-def get_instances(fds_conf_data):
-    """Create FuchsiaDevice instances from a list of Fuchsia ips.
-
-    Args:
-        fds_conf_data: A list of dicts that contain Fuchsia device info.
-
-    Returns:
-        A list of FuchsiaDevice objects.
-    """
-
-    return [FuchsiaDevice(fd_conf_data) for fd_conf_data in fds_conf_data]
-
-
-class FuchsiaDevice:
-    """Class representing a Fuchsia device.
-
-    Each object of this class represents one Fuchsia device in ACTS.
-
-    Attributes:
-        ip: The full address or Fuchsia abstract name to contact the Fuchsia
-            device at
-        log: A logger object.
-        ssh_port: The SSH TCP port number of the Fuchsia device.
-        sl4f_port: The SL4F HTTP port number of the Fuchsia device.
-        ssh_config: The ssh_config for connecting to the Fuchsia device.
-    """
-
-    def __init__(self, fd_conf_data) -> None:
-        self.conf_data = fd_conf_data
-        if "ip" not in fd_conf_data:
-            raise FuchsiaDeviceError(FUCHSIA_DEVICE_NO_IP_MSG)
-        self.ip: str = fd_conf_data["ip"]
-        self.orig_ip: str = fd_conf_data["ip"]
-        self.sl4f_port: int = fd_conf_data.get("sl4f_port", 80)
-        self.ssh_username: str = fd_conf_data.get("ssh_username",
-                                                  DEFAULT_SSH_USER)
-        self.ssh_port: int = fd_conf_data.get("ssh_port", DEFAULT_SSH_PORT)
-
-        def expand(path: str) -> str:
-            return os.path.expandvars(os.path.expanduser(path))
-
-        def path_from_config(name: str,
-                             default: Optional[str] = None) -> Optional[str]:
-            path = fd_conf_data.get(name, default)
-            if not path:
-                return path
-            return expand(path)
-
-        def assert_exists(name: str, path: str) -> None:
-            if not path:
-                raise FuchsiaDeviceError(
-                    f'Please specify "${name}" in your configuration file')
-            if not os.path.exists(path):
-                raise FuchsiaDeviceError(
-                    f'Please specify a correct "${name}" in your configuration '
-                    f'file: "{path}" does not exist')
-
-        self.specific_image: Optional[str] = path_from_config("specific_image")
-        if self.specific_image:
-            assert_exists("specific_image", self.specific_image)
-
-        # Path to a tar.gz archive with pm and amber-files, as necessary for
-        # starting a package server.
-        self.packages_archive_path: Optional[str] = path_from_config(
-            "packages_archive_path", None)
-        if self.packages_archive_path:
-            assert_exists("packages_archive_path", self.packages_archive_path)
-
-        def required_path_from_config(name: str,
-                                      default: Optional[str] = None) -> str:
-            path = path_from_config(name, default)
-            assert_exists(name, path)
-            return path
-
-        self.ssh_priv_key: str = required_path_from_config(
-            "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY)
-        self.authorized_file: str = required_path_from_config(
-            "authorized_file_loc", f'{self.ssh_priv_key}.pub')
-        self.ffx_binary_path: str = required_path_from_config(
-            "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx")
-
-        self.serial_number: Optional[str] = fd_conf_data.get(
-            "serial_number", None)
-        self.device_type: Optional[str] = fd_conf_data.get("device_type", None)
-        self.product_type: Optional[str] = fd_conf_data.get(
-            "product_type", None)
-        self.board_type: Optional[str] = fd_conf_data.get("board_type", None)
-        self.build_number: Optional[str] = fd_conf_data.get(
-            "build_number", None)
-        self.build_type: Optional[str] = fd_conf_data.get("build_type", None)
-
-        self.mdns_name: Optional[str] = fd_conf_data.get("mdns_name", None)
-
-        self.hard_reboot_on_fail: bool = fd_conf_data.get(
-            "hard_reboot_on_fail", False)
-        self.take_bug_report_on_fail: bool = fd_conf_data.get(
-            "take_bug_report_on_fail", False)
-        self.device_pdu_config = fd_conf_data.get("PduDevice", None)
-        self.config_country_code: str = fd_conf_data.get(
-            'country_code', FUCHSIA_DEFAULT_COUNTRY_CODE_US).upper()
-
-        output_path = context.get_current_context().get_base_output_path()
-        self.ssh_config = os.path.join(output_path,
-                                       "ssh_config_{}".format(self.ip))
-        self._generate_ssh_config(self.ssh_config)
-
-        # WLAN interface info is populated inside configure_wlan
-        self.wlan_client_interfaces = {}
-        self.wlan_ap_interfaces = {}
-        self.wlan_client_test_interface_name = fd_conf_data.get(
-            'wlan_client_test_interface', None)
-        self.wlan_ap_test_interface_name = fd_conf_data.get(
-            'wlan_ap_test_interface', None)
-        self.wlan_features: List[str] = fd_conf_data.get('wlan_features', [])
-
-        # Whether to use 'policy' or 'drivers' for WLAN connect/disconnect calls
-        # If set to None, wlan is not configured.
-        self.association_mechanism = None
-        # Defaults to policy layer, unless otherwise specified in the config
-        self.default_association_mechanism = fd_conf_data.get(
-            'association_mechanism', 'policy')
-
-        # Whether to clear and preserve existing saved networks and client
-        # connections state, to be restored at device teardown.
-        self.default_preserve_saved_networks = fd_conf_data.get(
-            'preserve_saved_networks', True)
-
-        if not utils.is_valid_ipv4_address(
-                self.ip) and not utils.is_valid_ipv6_address(self.ip):
-            mdns_ip = None
-            for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
-                mdns_ip = get_fuchsia_mdns_ipv6_address(self.ip)
-                if mdns_ip:
-                    break
-                else:
-                    time.sleep(1)
-            if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
-                # self.ip was actually an mdns name. Use it for self.mdns_name
-                # unless one was explicitly provided.
-                self.mdns_name = self.mdns_name or self.ip
-                self.ip = mdns_ip
-            else:
-                raise ValueError('Invalid IP: %s' % self.ip)
-
-        self.log = acts_logger.create_tagged_trace_logger(
-            "FuchsiaDevice | %s" % self.orig_ip)
-
-        self.ping_rtt_match = re.compile(r'RTT Min/Max/Avg '
-                                         r'= \[ (.*?) / (.*?) / (.*?) \] ms')
-        self.serial = re.sub('[.:%]', '_', self.ip)
-        log_path_base = getattr(logging, 'log_path', '/tmp/logs')
-        self.log_path = os.path.join(log_path_base,
-                                     'FuchsiaDevice%s' % self.serial)
-        self.fuchsia_log_file_path = os.path.join(
-            self.log_path, "fuchsialog_%s_debug.txt" % self.serial)
-        self.log_process = None
-        self.package_server = None
-
-        self.init_controllers()
-
-    @property
-    def sl4f(self):
-        """Get the sl4f module configured for this device.
-
-        The sl4f module uses lazy-initialization; it will initialize an sl4f
-        server on the host device when it is required.
-        """
-        if not hasattr(self, '_sl4f'):
-            self._sl4f = SL4F(self.ssh, self.sl4f_port)
-            self.log.info('Started SL4F server')
-        return self._sl4f
-
-    @sl4f.deleter
-    def sl4f(self):
-        if not hasattr(self, '_sl4f'):
-            return
-        self.log.debug('Cleaning up SL4F')
-        del self._sl4f
-
-    @property
-    def ssh(self):
-        """Get the SSH provider module configured for this device."""
-        if not hasattr(self, '_ssh'):
-            if not self.ssh_port:
-                raise FuchsiaConfigError(
-                    'Must provide "ssh_port: <int>" in the device config')
-            if not self.ssh_priv_key:
-                raise FuchsiaConfigError(
-                    'Must provide "ssh_priv_key: <file path>" in the device config'
-                )
-            self._ssh = SSHProvider(
-                SSHConfig(self.ip, self.ssh_priv_key, port=self.ssh_port))
-        return self._ssh
-
-    @ssh.deleter
-    def ssh(self):
-        if not hasattr(self, '_ssh'):
-            return
-        self.log.debug('Cleaning up SSH')
-        del self._ssh
-
-    @property
-    def ffx(self):
-        """Get the ffx module configured for this device.
-
-        The ffx module uses lazy-initialization; it will initialize an ffx
-        connection to the device when it is required.
-
-        If ffx needs to be reinitialized, delete the "ffx" property and attempt
-        access again. Note re-initialization will interrupt any running ffx
-        calls.
-        """
-        if not hasattr(self, '_ffx'):
-            if not self.mdns_name:
-                raise FuchsiaConfigError(
-                    'Must provide "mdns_name: <device mDNS name>" in the device config'
-                )
-            self._ffx = FFX(self.ffx_binary_path, self.mdns_name, self.ip,
-                            self.ssh_priv_key)
-        return self._ffx
-
-    @ffx.deleter
-    def ffx(self):
-        if not hasattr(self, '_ffx'):
-            return
-        self.log.debug('Cleaning up ffx')
-        self._ffx.clean_up()
-        del self._ffx
-
-    def _generate_ssh_config(self, file_path: str):
-        """Generate and write an SSH config for Fuchsia to disk.
-
-        Args:
-            file_path: Path to write the generated SSH config
-        """
-        content = textwrap.dedent(f"""\
-            Host *
-                CheckHostIP no
-                StrictHostKeyChecking no
-                ForwardAgent no
-                ForwardX11 no
-                GSSAPIDelegateCredentials no
-                UserKnownHostsFile /dev/null
-                User fuchsia
-                IdentitiesOnly yes
-                IdentityFile {self.ssh_priv_key}
-                ControlPersist yes
-                ControlMaster auto
-                ControlPath /tmp/fuchsia--%r@%h:%p
-                ServerAliveInterval 1
-                ServerAliveCountMax 1
-                LogLevel ERROR
-            """)
-
-        with open(file_path, 'w') as file:
-            file.write(content)
-
-    def init_controllers(self):
-        # Contains Netstack functions
-        self.netstack_controller = NetstackController(self)
-
-        # Contains WLAN core functions
-        self.wlan_controller = WlanController(self)
-
-        # Contains WLAN policy functions like save_network, remove_network, etc
-        self.wlan_policy_controller = WlanPolicyController(self.sl4f, self.ffx)
-
-    def start_package_server(self):
-        if not self.packages_archive_path:
-            self.log.warn(
-                "packages_archive_path is not specified. "
-                "Assuming a package server is already running and configured on "
-                "the DUT. If this is not the case, either run your own package "
-                "server, or configure these fields appropriately. "
-                "This is usually required for the Fuchsia iPerf3 client or "
-                "other testing utilities not on device cache.")
-            return
-        if self.package_server:
-            self.log.warn(
-                "Skipping to start the package server since is already running"
-            )
-            return
-
-        self.package_server = PackageServer(self.packages_archive_path)
-        self.package_server.start()
-        self.package_server.configure_device(self.ssh)
-
-    def run_commands_from_config(self, cmd_dicts):
-        """Runs commands on the Fuchsia device from the config file. Useful for
-        device and/or Fuchsia specific configuration.
-
-        Args:
-            cmd_dicts: list of dictionaries containing the following
-                'cmd': string, command to run on device
-                'timeout': int, seconds to wait for command to run (optional)
-                'skip_status_code_check': bool, disregard errors if true
-
-        Raises:
-            FuchsiaDeviceError: if any of the commands return a non-zero status
-                code and skip_status_code_check is false or undefined.
-        """
-        for cmd_dict in cmd_dicts:
-            try:
-                cmd = cmd_dict['cmd']
-            except KeyError:
-                raise FuchsiaDeviceError(
-                    'To run a command via config, you must provide key "cmd" '
-                    'containing the command string.')
-
-            timeout = cmd_dict.get('timeout', FUCHSIA_DEFAULT_COMMAND_TIMEOUT)
-            # Catch both boolean and string values from JSON
-            skip_status_code_check = 'true' == str(
-                cmd_dict.get('skip_status_code_check', False)).lower()
-
-            if skip_status_code_check:
-                self.log.info(f'Running command "{cmd}" and ignoring result.')
-            else:
-                self.log.info(f'Running command "{cmd}".')
-
-            try:
-                result = self.ssh.run(cmd, timeout_sec=timeout)
-                self.log.debug(result)
-            except FuchsiaSSHError as e:
-                if not skip_status_code_check:
-                    raise FuchsiaDeviceError(
-                        'Failed device specific commands for initial configuration'
-                    ) from e
-
-    def configure_wlan(self,
-                       association_mechanism=None,
-                       preserve_saved_networks=None):
-        """
-        Readies device for WLAN functionality. If applicable, connects to the
-        policy layer and clears/saves preexisting saved networks.
-
-        Args:
-            association_mechanism: string, 'policy' or 'drivers'. If None, uses
-                the default value from init (can be set by ACTS config)
-            preserve_saved_networks: bool, whether to clear existing saved
-                networks, and preserve them for restoration later. If None, uses
-                the default value from init (can be set by ACTS config)
-
-        Raises:
-            FuchsiaDeviceError, if configuration fails
-        """
-
-        # Set the country code US by default, or country code provided
-        # in ACTS config
-        self.configure_regulatory_domain(self.config_country_code)
-
-        # If args aren't provided, use the defaults, which can be set in the
-        # config.
-        if association_mechanism is None:
-            association_mechanism = self.default_association_mechanism
-        if preserve_saved_networks is None:
-            preserve_saved_networks = self.default_preserve_saved_networks
-
-        if association_mechanism not in VALID_ASSOCIATION_MECHANISMS:
-            raise FuchsiaDeviceError(
-                'Invalid FuchsiaDevice association_mechanism: %s' %
-                association_mechanism)
-
-        # Allows for wlan to be set up differently in different tests
-        if self.association_mechanism:
-            self.log.info('Deconfiguring WLAN')
-            self.deconfigure_wlan()
-
-        self.association_mechanism = association_mechanism
-
-        self.log.info('Configuring WLAN w/ association mechanism: %s' %
-                      association_mechanism)
-        if association_mechanism == 'drivers':
-            self.log.warn(
-                'You may encounter unusual device behavior when using the '
-                'drivers directly for WLAN. This should be reserved for '
-                'debugging specific issues. Normal test runs should use the '
-                'policy layer.')
-            if preserve_saved_networks:
-                self.log.warn(
-                    'Unable to preserve saved networks when using drivers '
-                    'association mechanism (requires policy layer control).')
-        else:
-            # This requires SL4F calls, so it can only happen with actual
-            # devices, not with unit tests.
-            self.wlan_policy_controller.configure_wlan(preserve_saved_networks)
-
-        # Retrieve WLAN client and AP interfaces
-        self.wlan_controller.update_wlan_interfaces()
-
-    def deconfigure_wlan(self):
-        """
-        Stops WLAN functionality (if it has been started). Used to allow
-        different tests to use WLAN differently (e.g. some tests require using
-        wlan policy, while the abstract wlan_device can be setup to use policy
-        or drivers)
-
-        Raises:
-            FuchsiaDeviveError, if deconfigure fails.
-        """
-        if not self.association_mechanism:
-            self.log.debug(
-                'WLAN not configured before deconfigure was called.')
-            return
-        # If using policy, stop client connections. Otherwise, just clear
-        # variables.
-        if self.association_mechanism != 'drivers':
-            self.wlan_policy_controller._deconfigure_wlan()
-        self.association_mechanism = None
-
-    def reboot(self,
-               use_ssh: bool = False,
-               unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               ping_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               ssh_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-               reboot_type: int = FUCHSIA_REBOOT_TYPE_SOFT,
-               testbed_pdus: List[pdu.PduDevice] = None) -> None:
-        """Reboot a FuchsiaDevice.
-
-        Soft reboots the device, verifies it becomes unreachable, then verifies
-        it comes back online. Re-initializes services so the tests can continue.
-
-        Args:
-            use_ssh: if True, use fuchsia shell command via ssh to reboot
-                instead of SL4F.
-            unreachable_timeout: time to wait for device to become unreachable.
-            ping_timeout:time to wait for device to respond to pings.
-            ssh_timeout: time to wait for device to be reachable via ssh.
-            reboot_type: 'soft', 'hard' or 'flash'.
-            testbed_pdus: all testbed PDUs.
-
-        Raises:
-            ConnectionError, if device fails to become unreachable or fails to
-                come back up.
-        """
-        if reboot_type == FUCHSIA_REBOOT_TYPE_SOFT:
-            if use_ssh:
-                self.log.info('Soft rebooting via SSH')
-                try:
-                    self.ssh.run(
-                        'dm reboot',
-                        timeout_sec=FUCHSIA_RECONNECT_AFTER_REBOOT_TIME)
-                except FuchsiaSSHError as e:
-                    if 'closed by remote host' not in e.result.stderr:
-                        raise e
-            else:
-                self.log.info('Soft rebooting via SL4F')
-                self.sl4f.hardware_power_statecontrol_lib.suspendReboot(
-                    timeout=3)
-            self._check_unreachable(timeout_sec=unreachable_timeout)
-
-        elif reboot_type == FUCHSIA_REBOOT_TYPE_HARD:
-            self.log.info('Hard rebooting via PDU')
-            if not testbed_pdus:
-                raise AttributeError('Testbed PDUs must be supplied '
-                                     'to hard reboot a fuchsia_device.')
-            device_pdu, device_pdu_port = pdu.get_pdu_port_for_device(
-                self.device_pdu_config, testbed_pdus)
-            self.log.info('Killing power to FuchsiaDevice')
-            device_pdu.off(str(device_pdu_port))
-            self._check_unreachable(timeout_sec=unreachable_timeout)
-            self.log.info('Restoring power to FuchsiaDevice')
-            device_pdu.on(str(device_pdu_port))
-
-        elif reboot_type == FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH:
-            flash(self, use_ssh, FUCHSIA_RECONNECT_AFTER_REBOOT_TIME)
-
-        else:
-            raise ValueError('Invalid reboot type: %s' % reboot_type)
-
-        self._check_reachable(timeout_sec=ping_timeout)
-
-        # Cleanup services
-        self.stop_services()
-
-        self.log.info('Waiting for device to allow ssh connection.')
-        end_time = time.time() + ssh_timeout
-        while time.time() < end_time:
-            try:
-                self.ssh.run('echo')
-            except Exception as e:
-                self.log.debug(f'Retrying SSH to device. Details: {e}')
-            else:
-                break
-        else:
-            raise ConnectionError('Failed to connect to device via SSH.')
-        self.log.info('Device now available via ssh.')
-
-        # TODO (b/246852449): Move configure_wlan to other controllers.
-        # If wlan was configured before reboot, it must be configured again
-        # after rebooting, as it was before reboot. No preserving should occur.
-        if self.association_mechanism:
-            pre_reboot_association_mechanism = self.association_mechanism
-            # Prevent configure_wlan from thinking it needs to deconfigure first
-            self.association_mechanism = None
-            self.configure_wlan(
-                association_mechanism=pre_reboot_association_mechanism,
-                preserve_saved_networks=False)
-
-        self.log.info('Device has rebooted')
-
-    def version(self):
-        """Returns the version of Fuchsia running on the device.
-
-        Returns:
-            A string containing the Fuchsia version number or nothing if there
-            is no version information attached during the build.
-            For example, "5.20210713.2.1" or "".
-
-        Raises:
-            FFXTimeout: when the command times out.
-            FFXError: when the command returns non-zero and skip_status_code_check is False.
-        """
-        target_info_json = self.ffx.run("target show --json").stdout
-        target_info = json.loads(target_info_json)
-        build_info = [
-            entry for entry in target_info if entry["label"] == "build"
-        ]
-        if len(build_info) != 1:
-            self.log.warning(
-                f'Expected one entry with label "build", found {build_info}')
-            return ""
-        version_info = [
-            child for child in build_info[0]["child"]
-            if child["label"] == "version"
-        ]
-        if len(version_info) != 1:
-            self.log.warning(
-                f'Expected one entry child with label "version", found {build_info}'
-            )
-            return ""
-        return version_info[0]["value"]
-
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
-        """Pings from a Fuchsia device to an IPv4 address or hostname
-
-        Args:
-            dest_ip: (str) The ip or hostname to ping.
-            count: (int) How many icmp packets to send.
-            interval: (int) How long to wait between pings (ms)
-            timeout: (int) How long to wait before having the icmp packet
-                timeout (ms).
-            size: (int) Size of the icmp packet.
-            additional_ping_params: (str) command option flags to
-                append to the command string
-
-        Returns:
-            A dictionary for the results of the ping.  The dictionary contains
-            the following items:
-                status: Whether the ping was successful.
-                rtt_min: The minimum round trip time of the ping.
-                rtt_max: The minimum round trip time of the ping.
-                rtt_avg: The avg round trip time of the ping.
-                stdout: The standard out of the ping command.
-                stderr: The standard error of the ping command.
-        """
-        rtt_min = None
-        rtt_max = None
-        rtt_avg = None
-        self.log.debug("Pinging %s..." % dest_ip)
-        if not additional_ping_params:
-            additional_ping_params = ''
-
-        try:
-            ping_result = self.ssh.run(
-                f'ping -c {count} -i {interval} -t {timeout} -s {size} '
-                f'{additional_ping_params} {dest_ip}')
-        except FuchsiaSSHError as e:
-            ping_result = e.result
-
-        if ping_result.stderr:
-            status = False
-        else:
-            status = True
-            rtt_line = ping_result.stdout.split('\n')[:-1]
-            rtt_line = rtt_line[-1]
-            rtt_stats = re.search(self.ping_rtt_match, rtt_line)
-            rtt_min = rtt_stats.group(1)
-            rtt_max = rtt_stats.group(2)
-            rtt_avg = rtt_stats.group(3)
-        return {
-            'status': status,
-            'rtt_min': rtt_min,
-            'rtt_max': rtt_max,
-            'rtt_avg': rtt_avg,
-            'stdout': ping_result.stdout,
-            'stderr': ping_result.stderr
-        }
-
-    def can_ping(self,
-                 dest_ip,
-                 count=1,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
-        """Returns whether fuchsia device can ping a given dest address"""
-        ping_result = self.ping(dest_ip,
-                                count=count,
-                                interval=interval,
-                                timeout=timeout,
-                                size=size,
-                                additional_ping_params=additional_ping_params)
-        return ping_result['status']
-
-    def clean_up(self):
-        """Cleans up the FuchsiaDevice object, releases any resources it
-        claimed, and restores saved networks if applicable. For reboots, use
-        clean_up_services only.
-
-        Note: Any exceptions thrown in this method must be caught and handled,
-        ensuring that clean_up_services is run. Otherwise, the syslog listening
-        thread will never join and will leave tests hanging.
-        """
-        # If and only if wlan is configured, and using the policy layer
-        if self.association_mechanism == 'policy':
-            try:
-                self.wlan_policy_controller.clean_up()
-            except Exception as err:
-                self.log.warning('Unable to clean up WLAN Policy layer: %s' %
-                                 err)
-
-        self.stop_services()
-
-        if self.package_server:
-            self.package_server.clean_up()
-
-    def get_interface_ip_addresses(self, interface):
-        return get_interface_ip_addresses(self, interface)
-
-    def wait_for_ipv4_addr(self, interface: str) -> None:
-        """Checks if device has an ipv4 private address. Sleeps 1 second between
-        retries.
-
-        Args:
-            interface: name of interface from which to get ipv4 address.
-
-        Raises:
-            ConnectionError, if device does not have an ipv4 address after all
-            timeout.
-        """
-        self.log.info(
-            f'Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds.'
-        )
-        timeout = time.time() + IP_ADDRESS_TIMEOUT
-        while time.time() < timeout:
-            ip_addrs = self.get_interface_ip_addresses(interface)
-
-            if len(ip_addrs['ipv4_private']) > 0:
-                self.log.info("Device has an ipv4 address: "
-                              f"{ip_addrs['ipv4_private'][0]}")
-                break
-            else:
-                self.log.debug(
-                    'Device does not yet have an ipv4 address...retrying in 1 '
-                    'second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError('Device failed to get an ipv4 address.')
-
-    def wait_for_ipv6_addr(self, interface: str) -> None:
-        """Checks if device has an ipv6 private local address. Sleeps 1 second
-        between retries.
-
-        Args:
-            interface: name of interface from which to get ipv6 address.
-
-        Raises:
-            ConnectionError, if device does not have an ipv6 address after all
-            timeout.
-        """
-        self.log.info(
-            f'Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds.'
-        )
-        timeout = time.time() + IP_ADDRESS_TIMEOUT
-        while time.time() < timeout:
-            ip_addrs = self.get_interface_ip_addresses(interface)
-            if len(ip_addrs['ipv6_private_local']) > 0:
-                self.log.info("Device has an ipv6 private local address: "
-                              f"{ip_addrs['ipv6_private_local'][0]}")
-                break
-            else:
-                self.log.debug(
-                    'Device does not yet have an ipv6 address...retrying in 1 '
-                    'second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError('Device failed to get an ipv6 address.')
-
-    def _check_reachable(self,
-                         timeout_sec: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT
-                         ) -> None:
-        """Checks the reachability of the Fuchsia device."""
-        end_time = time.time() + timeout_sec
-        self.log.info('Verifying device is reachable.')
-        while time.time() < end_time:
-            # TODO (b/249343632): Consolidate ping commands and fix timeout in
-            # utils.can_ping.
-            if utils.can_ping(job, self.ip):
-                self.log.info('Device is reachable.')
-                break
-            else:
-                self.log.debug(
-                    'Device is not reachable. Retrying in 1 second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError('Device is unreachable.')
-
-    def _check_unreachable(self,
-                           timeout_sec: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT
-                           ) -> None:
-        """Checks the Fuchsia device becomes unreachable."""
-        end_time = time.time() + timeout_sec
-        self.log.info('Verifying device is unreachable.')
-        while (time.time() < end_time):
-            if utils.can_ping(job, self.ip):
-                self.log.debug(
-                    'Device is still reachable. Retrying in 1 second.')
-                time.sleep(1)
-            else:
-                self.log.info('Device is not reachable.')
-                break
-        else:
-            raise ConnectionError('Device failed to become unreachable.')
-
-    def check_connect_response(self, connect_response):
-        if connect_response.get("error") is None:
-            # Checks the response from SL4F and if there is no error, check
-            # the result.
-            connection_result = connect_response.get("result")
-            if not connection_result:
-                # Ideally the error would be present but just outputting a log
-                # message until available.
-                self.log.debug("Connect call failed, aborting!")
-                return False
-            else:
-                # Returns True if connection was successful.
-                return True
-        else:
-            # the response indicates an error - log and raise failure
-            self.log.debug("Aborting! - Connect call failed with error: %s" %
-                           connect_response.get("error"))
-            return False
-
-    def check_disconnect_response(self, disconnect_response):
-        if disconnect_response.get("error") is None:
-            # Returns True if disconnect was successful.
-            return True
-        else:
-            # the response indicates an error - log and raise failure
-            self.log.debug("Disconnect call failed with error: %s" %
-                           disconnect_response.get("error"))
-            return False
-
-    # TODO(fxb/64657): Determine more stable solution to country code config on
-    # device bring up.
-    def configure_regulatory_domain(self, desired_country_code):
-        """Allows the user to set the device country code via ACTS config
-
-        Usage:
-            In FuchsiaDevice config, add "country_code": "<CC>"
-        """
-        # Country code can be None, from antlion config.
-        if desired_country_code:
-            desired_country_code = desired_country_code.upper()
-            response = self.sl4f.regulatory_region_lib.setRegion(
-                desired_country_code)
-            if response.get('error'):
-                raise FuchsiaDeviceError(
-                    'Failed to set regulatory domain. Err: %s' %
-                    response['error'])
-
-            phy_list_response = self.sl4f.wlan_lib.wlanPhyIdList()
-            if phy_list_response.get('error'):
-                raise FuchsiaDeviceError(
-                    f'Failed to get phy list. Err: {response["error"]}')
-            phy_list = phy_list_response.get('result')
-            if not phy_list:
-                raise FuchsiaDeviceError('No phy available in phy list')
-            phy_id = phy_list[0]
-
-            end_time = time.time() + FUCHSIA_COUNTRY_CODE_TIMEOUT
-            while time.time() < end_time:
-                ascii_cc = self.sl4f.wlan_lib.wlanGetCountry(phy_id).get(
-                    'result')
-                # Convert ascii_cc to string, then compare
-                if ascii_cc and (''.join(chr(c) for c in ascii_cc).upper()
-                                 == desired_country_code):
-                    self.log.debug('Country code successfully set to %s.' %
-                                   desired_country_code)
-                    return
-                self.log.debug('Country code not yet updated. Retrying.')
-                time.sleep(1)
-            raise FuchsiaDeviceError('Country code never updated to %s' %
-                                     desired_country_code)
-
-    def stop_services(self):
-        """Stops the ffx daemon and deletes SL4F property."""
-        self.log.info('Stopping host device services.')
-        del self.sl4f
-        del self.ffx
-
-    def load_config(self, config):
-        pass
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        """Takes a bug report on the device and stores it in a file.
-
-        Args:
-            test_name: DEPRECATED. Do not specify this argument; it is only used
-                for logging. Name of the test case that triggered this bug
-                report.
-            begin_time: DEPRECATED. Do not specify this argument; it allows
-                overwriting of bug reports when this function is called several
-                times in one test. Epoch time when the test started. If not
-                specified, the current time will be used.
-        """
-        if test_name:
-            self.log.info(
-                f"Taking snapshot of {self.mdns_name} for {test_name}")
-        else:
-            self.log.info(f"Taking snapshot of {self.mdns_name}")
-
-        epoch = begin_time if begin_time else utils.get_current_epoch_time()
-        time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch))
-        out_dir = context.get_current_context().get_full_output_path()
-        out_path = os.path.join(out_dir, f'{self.mdns_name}_{time_stamp}.zip')
-
-        try:
-            subprocess.run(
-                [f"ssh -F {self.ssh_config} {self.ip} snapshot > {out_path}"],
-                shell=True)
-            self.log.info(f'Snapshot saved to {out_path}')
-        except Exception as err:
-            self.log.error(f'Failed to take snapshot: {err}')
-
-    def take_bt_snoop_log(self, custom_name=None):
-        """Takes a the bt-snoop log from the device and stores it in a file
-        in a pcap format.
-        """
-        bt_snoop_path = context.get_current_context().get_full_output_path()
-        time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(time.time()))
-        out_name = "FuchsiaDevice%s_%s" % (
-            self.serial, time_stamp.replace(" ", "_").replace(":", "-"))
-        out_name = "%s.pcap" % out_name
-        if custom_name:
-            out_name = "%s_%s.pcap" % (self.serial, custom_name)
-        else:
-            out_name = "%s.pcap" % out_name
-        full_out_path = os.path.join(bt_snoop_path, out_name)
-        bt_snoop_data = self.ssh.run('bt-snoop-cli -d -f pcap').raw_stdout
-        bt_snoop_file = open(full_out_path, 'wb')
-        bt_snoop_file.write(bt_snoop_data)
-        bt_snoop_file.close()
diff --git a/src/antlion/controllers/fuchsia_lib/audio_lib.py b/src/antlion/controllers/fuchsia_lib/audio_lib.py
deleted file mode 100644
index 02d974d..0000000
--- a/src/antlion/controllers/fuchsia_lib/audio_lib.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-import base64
-
-
-class FuchsiaAudioLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "audio")
-
-    def startOutputSave(self):
-        """Starts saving audio output on the device
-
-        Returns:
-            Dictionary is success, error if error.
-        """
-        test_cmd = "audio_facade.StartOutputSave"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def stopOutputSave(self):
-        """Stops saving audio output on the device
-
-        Returns:
-            Dictionary is success, error if error.
-        """
-        test_cmd = "audio_facade.StopOutputSave"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getOutputAudio(self, save_path):
-        """Gets the saved audio in base64 encoding. Use base64.b64decode.
-
-        Args:
-            save_path: The path to save the raw audio
-
-        Returns:
-            True if success, False if error.
-        """
-        test_cmd = "audio_facade.GetOutputAudio"
-        test_args = {}
-
-        result = self.send_command(test_cmd, test_args)
-        if result.get("error") is not None:
-            self.log.error("Failed to get recorded audio.")
-            return False
-
-        f = open(save_path, "wb")
-        f.write(base64.b64decode(result.get('result')))
-        f.close()
-        self.log.info("Raw audio file captured at {}".format(save_path))
-        return True
diff --git a/src/antlion/controllers/fuchsia_lib/base_lib.py b/src/antlion/controllers/fuchsia_lib/base_lib.py
deleted file mode 100644
index 42da2ea..0000000
--- a/src/antlion/controllers/fuchsia_lib/base_lib.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import socket
-
-from typing import Any, Mapping
-from urllib.parse import urlparse
-from urllib.request import Request, urlopen
-
-from antlion import logger, utils
-from antlion.libs.proc import job
-
-DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC = 30
-
-
-class DeviceOffline(Exception):
-    """Exception if the device is no longer reachable via the network."""
-
-
-class SL4FCommandFailed(Exception):
-    """A SL4F command to the server failed."""
-
-
-class BaseLib():
-
-    def __init__(self, addr: str, logger_tag: str) -> None:
-        self.address = addr
-        self.log = logger.create_tagged_trace_logger(f"SL4F | {self.address} | {logger_tag}")
-
-    def send_command(
-        self,
-        cmd: str,
-        args: Mapping[str, Any],
-        response_timeout: int = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC
-    ) -> Mapping[str, Any]:
-        """Builds and sends a JSON command to SL4F server.
-
-        Args:
-            cmd: SL4F method name of command.
-            args: Arguments required to execute cmd.
-            response_timeout: Seconds to wait for a response before
-                throwing an exception.
-
-        Returns:
-            Response from SL4F server.
-
-        Throws:
-            TimeoutError: The HTTP request timed out waiting for a response
-        """
-        data = {
-            "jsonrpc": "2.0",
-            # id is required by the SL4F server to parse test_data but is not
-            # currently used.
-            "id": "",
-            "method": cmd,
-            "params": args
-        }
-        data_json = json.dumps(data).encode("utf-8")
-        req = Request(self.address,
-                      data=data_json,
-                      headers={
-                          "Content-Type": "application/json; charset=utf-8",
-                          "Content-Length": len(data_json),
-                      })
-
-        self.log.debug(f'Sending request "{cmd}" with {args}')
-        try:
-            response = urlopen(req, timeout=response_timeout)
-        except (TimeoutError, socket.timeout) as e:
-            host = urlparse(self.address).hostname
-            if not utils.can_ping(job, host):
-                raise DeviceOffline(
-                    f'FuchsiaDevice {host} is not reachable via the network.')
-            if type(e) == socket.timeout:
-                # socket.timeout was aliased to TimeoutError in Python 3.10. For
-                # older versions of Python, we need to cast to TimeoutError to
-                # provide a version-agnostic API.
-                raise TimeoutError("socket timeout") from e
-            raise e
-
-        response_body = response.read().decode("utf-8")
-        try:
-            response_json = json.loads(response_body)
-            self.log.debug(f'Received response for "{cmd}": {response_json}')
-        except json.JSONDecodeError as e:
-            raise SL4FCommandFailed(response_body) from e
-
-        # If the SL4F command fails it returns a str, without an 'error' field
-        # to get.
-        if not isinstance(response_json, dict):
-            raise SL4FCommandFailed(response_json)
-
-        return response_json
diff --git a/src/antlion/controllers/fuchsia_lib/basemgr_lib.py b/src/antlion/controllers/fuchsia_lib/basemgr_lib.py
deleted file mode 100644
index 9e50e1e..0000000
--- a/src/antlion/controllers/fuchsia_lib/basemgr_lib.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-COMMAND_RESTART_SESSION = 'basemgr_facade.RestartSession'
-COMMAND_START_BASEMGR = 'basemgr_facade.StartBasemgr'
-COMMAND_KILL_BASEMGR = 'basemgr_facade.KillBasemgr'
-
-
-class FuchsiaBasemgrLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "basemgr")
-
-    def restartSession(self):
-        """Restarts an ongoing basemgr session
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success', 'NoSessionToRestart', or None if error
-        """
-        test_cmd = COMMAND_RESTART_SESSION
-
-        return self.send_command(test_cmd, {})
-
-    def startBasemgr(self):
-        """Starts basemgr service
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success' or None if error
-        """
-        test_cmd = COMMAND_START_BASEMGR
-
-        return self.send_command(test_cmd, {})
-
-    def killBasemgr(self):
-        """Kill basemgr service, if one is running
-
-        Returns:
-            Dictionary:
-                error: None, unless an error occurs
-                result: 'Success', 'NoBasemgrToKill', or None if error
-        """
-        test_cmd = COMMAND_KILL_BASEMGR
-
-        return self.send_command(test_cmd, {})
diff --git a/src/antlion/controllers/fuchsia_lib/bt/__init__.py b/src/antlion/controllers/fuchsia_lib/bt/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py
deleted file mode 100644
index 0af61d2..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/avdtp_lib.py
+++ /dev/null
@@ -1,229 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaAvdtpLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "avdtp")
-
-    def init(self, initiator_delay=None):
-        """Initializes the AVDTP service with optional initiator_delay.
-
-        Args:
-            initiator_delay: Optional. The delay in milliseconds to start a
-            stream.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpInit"
-        test_args = {"initiator_delay": initiator_delay}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getConnectedPeers(self):
-        """Gets the AVDTP connected peers.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetConnectedPeers"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setConfiguration(self, peer_id):
-        """Sends the AVDTP command to input peer_id: set configuration
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSetConfiguration"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getConfiguration(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get configuration
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetConfiguration"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getCapabilities(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get capabilities
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetCapabilities"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getAllCapabilities(self, peer_id):
-        """Sends the AVDTP command to input peer_id: get all capabilities
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpGetAllCapabilities"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def reconfigureStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: reconfigure stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpReconfigureStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def suspendStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: suspend stream
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSuspendStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def suspendAndReconfigure(self, peer_id):
-        """Sends the AVDTP command to input peer_id: suspend and reconfigure
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpSuspendAndReconfigure"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def releaseStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: release stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpReleaseStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def establishStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: establish stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpEstablishStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def startStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: start stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpStartStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def abortStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: abort stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpAbortStream"
-        test_args = {"identifier": peer_id}
-        test_id = self.build_id(self.test_counter)
-        self.test_counter += 1
-
-        return self.send_command(test_cmd, test_args)
-
-    def establishStream(self, peer_id):
-        """Sends the AVDTP command to input peer_id: establish stream
-
-        Args:
-            peer_id: The peer id to send the AVDTP command to.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpEstablishStream"
-        test_args = {"identifier": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the AVDTP service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "avdtp_facade.AvdtpRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py b/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py
deleted file mode 100644
index 1d7e622..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/ble_lib.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-import uuid
-
-
-class FuchsiaBleLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "ble")
-
-    def _convert_human_readable_uuid_to_byte_list(self, readable_uuid):
-        """Converts a readable uuid to a byte list.
-
-        Args:
-            readable_uuid: string, A readable uuid in the format:
-                Input: "00001101-0000-1000-8000-00805f9b34fb"
-                Output: ['fb', '34', '9b', '5f', '80', '00', '00', '80', '00',
-                         '10', '00', '00', '01', '11', '00', '00']
-
-        Returns:
-            A byte list representing the readable uuid.
-        """
-        hex_uuid_str = uuid.UUID(readable_uuid).hex
-        break_n_bytes = 2
-        byte_list = [
-            hex_uuid_str[i:i + break_n_bytes]
-            for i in range(0, len(hex_uuid_str), break_n_bytes)
-        ]
-        byte_list.reverse()
-        return byte_list
-
-    def bleStopBleAdvertising(self):
-        """BleStopAdvertising command
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "ble_advertise_facade.BleStopAdvertise"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleStartBleAdvertising(self,
-                               advertising_data,
-                               scan_response,
-                               interval,
-                               connectable=True):
-        """BleStartAdvertising command
-
-        Args:
-            advertising_data: dictionary, advertising data required for ble
-                advertise.
-            scan_response: dictionary, optional scan respones data to send.
-            interval: int, Advertising interval (in ms).
-            connectable: bool, whether the advertisement is connectable or not.
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "ble_advertise_facade.BleAdvertise"
-        service_uuid_list = None
-        if type(advertising_data['service_uuids']) == list:
-            service_uuid_list = []
-            for single_uuid in advertising_data['service_uuids']:
-                service_uuid_list.append(
-                    self._convert_human_readable_uuid_to_byte_list(
-                        single_uuid))
-            advertising_data['service_uuids'] = service_uuid_list
-
-        service_uuid_list = None
-        if scan_response and type(scan_response['service_uuids']) == list:
-            service_uuid_list = []
-            for single_uuid in scan_response['service_uuids']:
-                service_uuid_list.append(
-                    self._convert_human_readable_uuid_to_byte_list(
-                        single_uuid))
-            scan_response['service_uuids'] = service_uuid_list
-
-        if scan_response and type(scan_response['service_data']) == list:
-            for service_data in scan_response['service_data']:
-                service_data[
-                    "uuid"] = self._convert_human_readable_uuid_to_byte_list(
-                        service_data["uuid"])
-
-        if type(advertising_data['service_data']) == list:
-            for service_data in advertising_data['service_data']:
-                service_data[
-                    "uuid"] = self._convert_human_readable_uuid_to_byte_list(
-                        service_data["uuid"])
-
-        test_args = {
-            "advertising_data": advertising_data,
-            "scan_response": scan_response,
-            "interval_ms": interval,
-            "connectable": connectable
-        }
-        return self.send_command(test_cmd, test_args)
-
-    def blePublishService(self, primary, type_, service_id):
-        """Publishes services specified by input args
-
-        Args:
-            primary: bool, Flag of service.
-            type: string, Canonical 8-4-4-4-12 uuid of service.
-            service_proxy_key: string, Unique identifier to specify where to publish service
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bluetooth.BlePublishService"
-        test_args = {
-            "primary": primary,
-            "type": type_,
-            "local_service_id": service_id
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py b/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py
deleted file mode 100644
index 6a94c6b..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/bts_lib.py
+++ /dev/null
@@ -1,227 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaBtsLib(BaseLib):
-    # Class representing the Bluetooth Access Library.
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "bt_sys")
-
-    def setDiscoverable(self, discoverable):
-        """Sets the device to be discoverable over BR/EDR.
-
-        Args:
-            discoverable: A bool object for setting Bluetooth
-              device discoverable or not.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothSetDiscoverable"
-        test_args = {"discoverable": discoverable}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setName(self, name):
-        """Sets the local Bluetooth name of the device.
-
-        Args:
-            name: A string that represents the name to set.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothSetName"
-        test_args = {"name": name}
-
-        return self.send_command(test_cmd, test_args)
-
-    def inputPairingPin(self, pin):
-        """Inputs the pairing pin to the Fuchsia devices' pairing delegate.
-
-        Args:
-            pin: A string that represents the pin to input.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothInputPairingPin"
-        test_args = {"pin": pin}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getPairingPin(self):
-        """Gets the pairing pin from the Fuchsia devices' pairing delegate.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetPairingPin"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initBluetoothSys(self):
-        """Initialises the Bluetooth sys Interface proxy in SL4F.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothInitSys"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def requestDiscovery(self, discovery):
-        """Start or stop Bluetooth Control device discovery.
-
-        Args:
-            discovery: A bool object representing starting or stopping
-              device discovery.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothRequestDiscovery"
-        test_args = {"discovery": discovery}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getKnownRemoteDevices(self):
-        """Get known remote BR/EDR and LE devices.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetKnownRemoteDevices"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def forgetDevice(self, identifier):
-        """Forgets a devices pairing.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothForgetDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectDevice(self, identifier):
-        """Disconnects a devices.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothDisconnectDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectDevice(self, identifier):
-        """Connects to a devices.
-
-        Args:
-            identifier: A string representing the device id.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothConnectDevice"
-        test_args = {"identifier": identifier}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getActiveAdapterAddress(self):
-        """Gets the current Active Adapter's address.
-
-        Returns:
-            Dictionary, String address if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothGetActiveAdapterAddress"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def pair(self, identifier, pairing_security_level, non_bondable,
-             transport):
-        """Pairs to a device.
-
-        Args:
-            identifier: A string representing the device id.
-            pairing_security_level: The security level required for this pairing request
-                represented as a u64. (Only for LE pairing)
-                Available Values
-                1 - ENCRYPTED: Encrypted without MITM protection (unauthenticated)
-                2 - AUTHENTICATED: Encrypted with MITM protection (authenticated).
-                None: No pairing security level.
-            non_bondable: A bool representing whether the pairing mode is bondable or not. None is
-                also accepted. False if bondable, True if non-bondable.
-            transport: A u64 representing the transport type.
-                Available Values
-                1 - BREDR: Classic BR/EDR transport
-                2 - LE: Bluetooth Low Energy Transport
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothPairDevice"
-        test_args = {
-            "identifier": identifier,
-            "pairing_security_level": pairing_security_level,
-            "non_bondable": non_bondable,
-            "transport": transport,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def acceptPairing(self,
-                      input_capabilities="NONE",
-                      output_capabilities="NONE"):
-        """Accepts incoming pairing requests.
-
-        Args:
-            input: String - The input I/O capabilities to use
-                Available Values:
-                NONE - Input capability type None
-                CONFIRMATION - Input capability type confirmation
-                KEYBOARD - Input capability type Keyboard
-            output: String - The output I/O Capabilities to use
-                Available Values:
-                NONE - Output capability type None
-                DISPLAY - output capability type Display
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "bt_sys_facade.BluetoothAcceptPairing"
-        test_args = {
-            "input": input_capabilities,
-            "output": output_capabilities,
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py b/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py
deleted file mode 100644
index b8630d9..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/gattc_lib.py
+++ /dev/null
@@ -1,350 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaGattcLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "gatt_client")
-
-    def bleStartBleScan(self, scan_filter):
-        """Starts a BLE scan
-
-        Args:
-            scan_time_ms: int, Amount of time to scan for.
-            scan_filter: dictionary, Device filter for a scan.
-            scan_count: int, Number of devices to scan for before termination.
-
-        Returns:
-            None if pass, err if fail.
-        """
-        test_cmd = "gatt_client_facade.BleStartScan"
-        test_args = {
-            "filter": scan_filter,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleStopBleScan(self):
-        """Stops a BLE scan
-
-        Returns:
-            Dictionary, List of devices discovered, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleStopScan"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listServices(self, id):
-        """Lists services of a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to list services.
-
-        Returns:
-            Dictionary, List of Service Info if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcListServices"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleGetDiscoveredDevices(self):
-        """Stops a BLE scan
-
-        Returns:
-            Dictionary, List of devices discovered, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleGetDiscoveredDevices"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def discoverCharacteristics(self):
-        """Discover the characteristics of a connected service.
-
-        Returns:
-            Dictionary, List of Characteristics and Descriptors if success,
-            error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcDiscoverCharacteristics"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeCharById(self, id, offset, write_value):
-        """Write Characteristic by id..
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeLongCharById(self, id, offset, write_value, reliable_mode=False):
-        """Write Characteristic by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-            reliable_mode: bool value representing reliable writes.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteLongCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-            "reliable_mode": reliable_mode
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeLongDescById(self, id, offset, write_value):
-        """Write Descriptor by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset of bytes to write to.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteLongDescriptorById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeCharByIdWithoutResponse(self, id, write_value):
-        """Write Characteristic by id without response.
-
-        Args:
-            id: string, Characteristic identifier.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteCharacteristicByIdWithoutResponse"
-        test_args = {
-            "identifier": id,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def enableNotifyCharacteristic(self, id):
-        """Enable notifications on a Characteristic.
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcEnableNotifyCharacteristic"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def disableNotifyCharacteristic(self, id):
-        """Disable notifications on a Characteristic.
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcDisableNotifyCharacteristic"
-        test_args = {
-            "identifier": id,
-            "value": False,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readCharacteristicById(self, id):
-        """Read Characteristic value by id..
-
-        Args:
-            id: string, Characteristic identifier.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadCharacteristicById"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readCharacteristicByType(self, uuid):
-        """Read Characteristic value by id..
-
-        Args:
-            uuid: string, Characteristic identifier.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadCharacteristicByType"
-        test_args = {
-            "uuid": uuid,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readDescriptorById(self, id):
-        """Read Descriptor value by id..
-
-        Args:
-            id: string, Descriptor identifier.
-
-        Returns:
-            Descriptor value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadDescriptorById"
-        test_args = {
-            "identifier": id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readLongDescriptorById(self, id, offset, max_bytes):
-        """Reads Long Descriptor value by id.
-
-        Args:
-            id: string, Descriptor identifier.
-            offset: int, The offset to start reading from.
-            max_bytes: int, The max bytes to return.
-
-        Returns:
-            Descriptor value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadLongDescriptorById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "max_bytes": max_bytes
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeDescriptorById(self, id, offset, write_value):
-        """Write Descriptor by id.
-
-        Args:
-            id: string, Descriptor identifier.
-            write_value: byte array, The bytes to write.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcWriteDescriptorById"
-        test_args = {
-            "identifier": id,
-            "write_value": write_value,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def readLongCharacteristicById(self, id, offset, max_bytes):
-        """Reads Long Characteristic value by id.
-
-        Args:
-            id: string, Characteristic identifier.
-            offset: int, The offset to start reading from.
-            max_bytes: int, The max bytes to return.
-
-        Returns:
-            Characteristic value if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcReadLongCharacteristicById"
-        test_args = {
-            "identifier": id,
-            "offset": offset,
-            "max_bytes": max_bytes
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectToService(self, id, service_id):
-        """ Connect to a specific Service specified by id.
-
-        Args:
-            id: string, Service id.
-
-        Returns:
-            None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.GattcConnectToService"
-        test_args = {"identifier": id, "service_identifier": service_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleConnectToPeripheral(self, id):
-        """Connects to a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to connect to.
-
-        Returns:
-            Dictionary, List of Service Info if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleConnectPeripheral"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def bleDisconnectPeripheral(self, id):
-        """Disconnects from a peripheral specified by id.
-
-        Args:
-            id: string, Peripheral identifier to disconnect from.
-
-        Returns:
-            Dictionary, None if success, error string if error.
-        """
-        test_cmd = "gatt_client_facade.BleDisconnectPeripheral"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py b/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py
deleted file mode 100644
index 5f9ecb4..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/gatts_lib.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaGattsLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "gatt_server")
-
-    def publishServer(self, database):
-        """Publishes services specified by input args
-
-        Args:
-            database: A database that follows the conventions of
-                acts_contrib.test_utils.bt.gatt_test_database.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "gatt_server_facade.GattServerPublishServer"
-        test_args = {
-            "database": database,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def closeServer(self):
-        """Closes an active GATT server.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "gatt_server_facade.GattServerCloseServer"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py
deleted file mode 100644
index e8f68f3..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/hfp_lib.py
+++ /dev/null
@@ -1,420 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaHfpLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "hfp")
-
-    def init(self):
-        """Initializes the HFP service.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.HfpInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the HFP service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.HfpRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listPeers(self):
-        """List all connected HFP peer devices.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ListPeers"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setActivePeer(self, peer_id):
-        """Set the active HFP peer device. All peer specific commands will be
-        directed to this device.
-
-        Args:
-            peer_id: The id of the peer to set as active. Use "listPeers" to
-            find connected peer ids.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetActivePeer"
-        test_args = {"peer_id": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def listCalls(self):
-        """List all calls known to the sl4f component.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ListCalls"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def newCall(self, remote, state, direction):
-        """Opens a new call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-            state: The state of the call.
-            direction: The direction of the call. Can be "incoming" or "outgoing".
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.NewCall"
-        test_args = {"remote": remote, "state": state, "direction": direction}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateIncomingCall(self, remote):
-        """Opens an incoming call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.IncomingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateIncomingWaitingCall(self, remote):
-        """Opens an incoming call when there is an onging call and alerts
-        the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.IncomingWaitingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def initiateOutgoingCall(self, remote):
-        """Opens an outgoing call channel and alerts the HFP peer.
-
-        Args:
-            remote: The number of the remote party.
-
-        Returns:
-            Dictionary, call_id if success, error if error.
-        """
-        test_cmd = "hfp_facade.OutgoingCall"
-        test_args = {"remote": remote}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallActive(self, call_id):
-        """Sets the specified call to the "OngoingActive" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallActive"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallHeld(self, call_id):
-        """Sets the specified call to the "OngoingHeld" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallHeld"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallTerminated(self, call_id):
-        """Sets the specified call to the "Terminated" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallTerminated"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setCallTransferredToAg(self, call_id):
-        """Sets the specified call to the "TransferredToAg" state.
-
-        Args:
-            call_id: The unique id of the call.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetCallTransferredToAg"
-        test_args = {"call_id": call_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSpeakerGain(self, value):
-        """Sets the active peer's speaker gain.
-
-        Args:
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSpeakerGain"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setMicrophoneGain(self, value):
-        """Sets the active peer's microphone gain.
-
-        Args:
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetMicrophoneGain"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setServiceAvailable(self, value):
-        """Sets the simulated network service status reported by the call manager.
-
-        Args:
-            value: True to set the network service to available.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetServiceAvailable"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setRoaming(self, value):
-        """Sets the simulated roaming status reported by the call manager.
-
-        Args:
-            value: True to set the network connection to roaming.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetRoaming"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSignalStrength(self, value):
-        """Sets the simulated signal strength reported by the call manager.
-
-        Args:
-            value: The signal strength value to set. Must be between 0-5 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSignalStrength"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setSubscriberNumber(self, value):
-        """Sets the subscriber number reported by the call manager.
-
-        Args:
-            value: The subscriber number to set. Maximum length 128 characters.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetSubscriberNumber"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setOperator(self, value):
-        """Sets the operator value reported by the call manager.
-
-        Args:
-            value: The operator value to set. Maximum length 16 characters.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetOperator"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setNrecSupport(self, value):
-        """Sets the noise reduction/echo cancelation support reported by the call manager.
-
-        Args:
-            value: The nrec support bool.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetNrecSupport"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setBatteryLevel(self, value):
-        """Sets the battery level reported by the call manager.
-
-        Args:
-            value: The integer battery level value. Must be 0-5 inclusive.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetBatteryLevel"
-        test_args = {"value": value}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setLastDialed(self, number):
-        """Sets the last dialed number in the call manager.
-
-        Args:
-            number: The number of the remote party.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetLastDialed"
-        test_args = {"number": number}
-
-        return self.send_command(test_cmd, test_args)
-
-    def clearLastDialed(self):
-        """Clears the last dialed number in the call manager.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ClearLastDialed"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setMemoryLocation(self, location, number):
-        """Sets a memory location to point to a remote number.
-
-        Args:
-            location: The memory location at which to store the number.
-            number: The number of the remote party to be stored.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetMemoryLocation"
-        test_args = {"location": location, "number": number}
-
-        return self.send_command(test_cmd, test_args)
-
-    def clearMemoryLocation(self, location):
-        """Clear a memory location so that it no longer points to a remote
-        number.
-
-        Args:
-            localtion: The memory location to clear.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.ClearMemoryLocation"
-        test_args = {"location": location}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setDialResult(self, number, status):
-        """Sets the status result to be returned when the number is dialed.
-
-        Args:
-            number: The number of the remote party.
-            status: The status to be returned when an outgoing call is
-                    initiated to the number.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetDialResult"
-        test_args = {"number": number, "status": status}
-
-        return self.send_command(test_cmd, test_args)
-
-    def getState(self):
-        """Get the call manager's state.
-
-        Returns:
-            Dictionary, State dictionary if success, error if error.
-        """
-        test_cmd = "hfp_facade.GetState"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def setConnectionBehavior(self, autoconnect):
-        """Set the Service Level Connection behavior when a new peer connects.
-
-        Args:
-            autoconnect: Enable/Disable autoconnection of SLC.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "hfp_facade.SetConnectionBehavior"
-        test_args = {"autoconnect": autoconnect}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py b/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py
deleted file mode 100644
index 10f0736..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/rfcomm_lib.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaRfcommLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "rfcomm")
-
-    def init(self):
-        """Initializes the RFCOMM service.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self):
-        """Removes the RFCOMM service from the Fuchsia device
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommRemoveService"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectSession(self, peer_id):
-        """Closes the RFCOMM Session with the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.DisconnectSession"
-        test_args = {"peer_id": peer_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectRfcommChannel(self, peer_id, server_channel_number):
-        """Makes an outgoing RFCOMM connection to the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.ConnectRfcommChannel"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def disconnectRfcommChannel(self, peer_id, server_channel_number):
-        """Closes the RFCOMM channel with the remote peer
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.DisconnectRfcommChannel"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def sendRemoteLineStatus(self, peer_id, server_channel_number):
-        """Sends a Remote Line Status update to the remote peer for the provided channel number
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.SendRemoteLineStatus"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def writeRfcomm(self, peer_id, server_channel_number, data):
-        """Sends data to the remote peer over the RFCOMM channel
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "rfcomm_facade.RfcommWrite"
-        test_args = {
-            "peer_id": peer_id,
-            "server_channel_number": server_channel_number,
-            "data": data
-        }
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py b/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py
deleted file mode 100644
index 25dcc33..0000000
--- a/src/antlion/controllers/fuchsia_lib/bt/sdp_lib.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaProfileServerLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "profile_server")
-
-    def addService(self, record):
-        """Publishes an SDP service record specified by input args
-
-        Args:
-            record: A database that represents an SDP record to
-                be published.
-
-        Returns:
-            Dictionary, service id if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerAddService"
-        test_args = {
-            "record": record,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def addSearch(self, attribute_list, profile_id):
-        """Publishes services specified by input args
-
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerAddSearch"
-        test_args = {
-            "attribute_list": attribute_list,
-            "profile_id": profile_id
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def removeService(self, service_id):
-        """Removes a service.
-
-        Args:
-            record: A database that represents an SDP record to
-                be published.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerRemoveService"
-        test_args = {
-            "service_id": service_id,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def init(self):
-        """Initializes the ProfileServerFacade's proxy object.
-
-        No operations for SDP can be performed until this is initialized.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerInit"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def cleanUp(self):
-        """Cleans up all objects related to SDP.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerCleanup"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def connectL2cap(self, identifier, psm, mode):
-        """ Sends an outgoing l2cap connection to a connected peer device.
-
-        Args:
-            psm: The psm value to connect over. Available PSMs:
-                SDP 0x0001  See Bluetooth Service Discovery Protocol (SDP)
-                RFCOMM  0x0003  See RFCOMM with TS 07.10
-                TCS-BIN 0x0005  See Bluetooth Telephony Control Specification /
-                    TCS Binary
-                TCS-BIN-CORDLESS    0x0007  See Bluetooth Telephony Control
-                    Specification / TCS Binary
-                BNEP    0x000F  See Bluetooth Network Encapsulation Protocol
-                HID_Control 0x0011  See Human Interface Device
-                HID_Interrupt   0x0013  See Human Interface Device
-                UPnP    0x0015  See [ESDP]
-                AVCTP   0x0017  See Audio/Video Control Transport Protocol
-                AVDTP   0x0019  See Audio/Video Distribution Transport Protocol
-                AVCTP_Browsing  0x001B  See Audio/Video Remote Control Profile
-                UDI_C-Plane 0x001D  See the Unrestricted Digital Information
-                    Profile [UDI]
-                ATT 0x001F  See Bluetooth Core Specification​
-                ​3DSP   0x0021​ ​​See 3D Synchronization Profile.
-                ​LE_PSM_IPSP    ​0x0023 ​See Internet Protocol Support Profile
-                    (IPSP)
-                OTS 0x0025  See Object Transfer Service (OTS)
-                EATT    0x0027  See Bluetooth Core Specification
-            mode: String - The channel mode to connect to. Available values:
-                Basic mode: BASIC
-                Enhanced Retransmission mode: ERTM
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "profile_server_facade.ProfileServerConnectL2cap"
-        test_args = {"identifier": identifier, "psm": psm, "mode": mode}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/ffx.py b/src/antlion/controllers/fuchsia_lib/ffx.py
deleted file mode 100644
index ca05feb..0000000
--- a/src/antlion/controllers/fuchsia_lib/ffx.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-import tempfile
-import subprocess
-import time
-
-from pathlib import Path, PurePath
-from shutil import rmtree
-from typing import Any, MutableMapping, Optional
-
-from antlion import context
-from antlion import logger
-from antlion import signals
-from antlion import utils
-
-FFX_DEFAULT_COMMAND_TIMEOUT: int = 60
-
-
-class FFXError(signals.TestError):
-    """Non-zero error code returned from a ffx command."""
-
-    def __init__(self, command: str,
-                 process: subprocess.CalledProcessError) -> None:
-        self.command = command
-        self.stdout: str = process.stdout.decode('utf-8', errors='replace')
-        self.stderr: str = process.stderr.decode('utf-8', errors='replace')
-        self.exit_status = process.returncode
-
-    def __str__(self) -> str:
-        return f'ffx subcommand "{self.command}" returned {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
-
-
-class FFXTimeout(signals.TestError):
-    """Timed out running a ffx command."""
-
-
-class FFX:
-    """Device-specific controller for the ffx tool.
-
-    Attributes:
-        log: Logger for the device-specific instance of ffx.
-        binary_path: Path to the ffx binary.
-        mdns_name: mDNS nodename of the default Fuchsia target.
-        ip: IP address of the default Fuchsia target.
-        ssh_private_key_path: Path to Fuchsia DUT SSH private key.
-    """
-
-    def __init__(self,
-                 binary_path: str,
-                 mdns_name: str,
-                 ip: str = None,
-                 ssh_private_key_path: str = None):
-        """
-        Args:
-            binary_path: Path to ffx binary.
-            target: Fuchsia mDNS nodename of default target.
-            ssh_private_key_path: Path to SSH private key for talking to the
-                Fuchsia DUT.
-        """
-        self.log = logger.create_tagged_trace_logger(f"ffx | {mdns_name}")
-        self.binary_path = binary_path
-        self.mdns_name = mdns_name
-        self.ip = ip
-        self.ssh_private_key_path = ssh_private_key_path
-
-        self._env_config_path: Optional[str] = None
-        self._sock_dir: Optional[str] = None
-        self._ssh_auth_sock_path: Optional[str] = None
-        self._overnet_socket_path: Optional[str] = None
-        self._has_been_reachable = False
-        self._has_logged_version = False
-
-    def clean_up(self) -> None:
-        if self._env_config_path:
-            self.run("daemon stop", skip_reachability_check=True)
-        if self._ssh_auth_sock_path:
-            Path(self._ssh_auth_sock_path).unlink(missing_ok=True)
-        if self._overnet_socket_path:
-            Path(self._overnet_socket_path).unlink(missing_ok=True)
-        if self._sock_dir:
-            rmtree(self._sock_dir)
-
-        self._env_config_path = None
-        self._sock_dir = None
-        self._ssh_auth_sock_path = None
-        self._overnet_socket_path = None
-        self._has_been_reachable = False
-        self._has_logged_version = False
-
-    def run(self,
-            command: str,
-            timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT,
-            skip_status_code_check: bool = False,
-            skip_reachability_check: bool = False
-            ) -> subprocess.CompletedProcess:
-        """Runs an ffx command.
-
-        Verifies reachability before running, if it hasn't already.
-
-        Args:
-            command: Command to run with ffx.
-            timeout_sec: Seconds to wait for a command to complete.
-            skip_status_code_check: Whether to check for the status code.
-            verify_reachable: Whether to verify reachability before running.
-
-        Raises:
-            FFXTimeout: when the command times out.
-            FFXError: when the command returns non-zero and skip_status_code_check is False.
-
-        Returns:
-            The results of the command. Note subprocess.CompletedProcess returns
-            stdout and stderr as a byte-array, not a string. Treat these members
-            as such or convert to a string using bytes.decode('utf-8').
-        """
-        if not self._env_config_path:
-            self._create_isolated_environment()
-        if not self._has_been_reachable and not skip_reachability_check:
-            self.log.info(f'Verifying reachability before running "{command}"')
-            self.verify_reachable()
-
-        self.log.debug(f'Running "{command}".')
-        full_command = f'{self.binary_path} -e {self._env_config_path} {command}'
-
-        try:
-            result = subprocess.run(full_command.split(),
-                                    capture_output=True,
-                                    timeout=timeout_sec,
-                                    check=not skip_status_code_check)
-        except subprocess.CalledProcessError as e:
-            raise FFXError(command, e) from e
-        except subprocess.TimeoutExpired as e:
-            raise FFXTimeout(f'Timed out running "{full_command}"') from e
-
-        return result
-
-    def _create_isolated_environment(self) -> None:
-        """ Create a new isolated environment for ffx.
-
-        This is needed to avoid overlapping ffx daemons while testing in
-        parallel, causing the ffx invocations to “upgrade” one daemon to
-        another, which appears as a flap/restart to another test.
-        """
-        # Store ffx files in a unique directory. Timestamp is used to prevent
-        # files from being overwritten in the case when a test intentionally
-        # reboots or resets the device such that a new isolated ffx environment
-        # is created.
-        root_dir = context.get_current_context().get_full_output_path()
-        epoch = utils.get_current_epoch_time()
-        time_stamp = logger.normalize_log_line_timestamp(
-            logger.epoch_to_log_line_timestamp(epoch))
-        target_dir = os.path.join(root_dir, f'{self.mdns_name}_{time_stamp}')
-        os.makedirs(target_dir, exist_ok=True)
-
-        # Sockets need to be created in a different directory to be guaranteed
-        # to stay under the maximum socket path length of 104 characters.
-        # See https://unix.stackexchange.com/q/367008
-        self._sock_dir = tempfile.mkdtemp()
-        # On MacOS, the socket paths need to be just paths (not pre-created
-        # Python tempfiles, which are not socket files).
-        self._ssh_auth_sock_path = str(
-            PurePath(self._sock_dir, 'ssh_auth_sock'))
-        self._overnet_socket_path = str(
-            PurePath(self._sock_dir, 'overnet_socket'))
-
-        config: MutableMapping[str, Any] = {
-            "target": {
-                "default": self.mdns_name,
-            },
-            # Use user-specific and device-specific locations for sockets.
-            # Avoids user permission errors in a multi-user test environment.
-            # Avoids daemon upgrades when running tests in parallel in a CI
-            # environment.
-            "ssh": {
-                "auth-sock": self._ssh_auth_sock_path,
-            },
-            "overnet": {
-                "socket": self._overnet_socket_path,
-            },
-            # Configure the ffx daemon to log to a place where we can read it.
-            # Note, ffx client will still output to stdout, not this log
-            # directory.
-            "log": {
-                "enabled": True,
-                "dir": [target_dir],
-            },
-            # Disable analytics to decrease noise on the network.
-            "ffx": {
-                "analytics": {
-                    "disabled": True,
-                },
-            },
-        }
-
-        if self.ip:
-            config["discovery"] = {
-                "mdns": {
-                    "enabled": False,
-                },
-            }
-
-        # ffx looks for the private key in several default locations. For
-        # testbeds which have the private key in another location, set it now.
-        if self.ssh_private_key_path:
-            config["ssh"]["priv"] = self.ssh_private_key_path
-
-        config_path = os.path.join(target_dir, "ffx_config.json")
-        with open(config_path, 'w', encoding="utf-8") as f:
-            json.dump(config, f, ensure_ascii=False, indent=4)
-
-        env = {
-            "user": config_path,
-            "build": None,
-            "global": None,
-        }
-        self._env_config_path = os.path.join(target_dir, "ffx_env.json")
-        with open(self._env_config_path, 'w', encoding="utf-8") as f:
-            json.dump(env, f, ensure_ascii=False, indent=4)
-
-        # The ffx daemon will started automatically when needed. There is no
-        # need to start it manually here.
-
-    def verify_reachable(self,
-                         timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT
-                         ) -> None:
-        """Verify the target is reachable via RCS and various services.
-
-        Blocks until the device allows for an RCS connection. If the device
-        isn't reachable within a short time, logs a warning before waiting
-        longer.
-
-        Verifies the RCS connection by fetching information from the device,
-        which exercises several debug and informational FIDL services.
-
-        When called for the first time, the versions will be checked for
-        compatibility.
-
-        Args:
-            timeout_sec: Seconds to wait for reachability check
-
-        Raises:
-            FFXError: when an unknown error occurs
-            FFXTimeout: when the target is unreachable
-        """
-        cmd = "target wait"
-        if self.ip:
-            # `target add` does what `target wait` does but adds an entry
-            # to ensure connections can happen without mDNS.
-            # TODO(https://fxbug.dev/105530): Update manual target parsing in
-            # ffx.
-            cmd = f"target add {self.ip}"
-
-        timeout = time.perf_counter() + timeout_sec
-        while True:
-            try:
-                self.run(cmd, timeout_sec=5, skip_reachability_check=True)
-                break
-            except FFXError as e:
-                if 'took too long connecting to ascendd socket' in e.stderr:
-                    err = e
-                else:
-                    raise e
-            except FFXTimeout as e:
-                err = e
-
-            if time.perf_counter() > timeout:
-                raise FFXTimeout(
-                    f'Waited over {timeout_sec}s for ffx to become reachable'
-                ) from err
-
-        # Use a shorter timeout than default because device information
-        # gathering can hang for a long time if the device is not actually
-        # connectable.
-        try:
-            result = self.run("target show --json",
-                              timeout_sec=15,
-                              skip_reachability_check=True)
-        except Exception as e:
-            self.log.error(
-                f'Failed to reach target device. Try running "{self.binary_path}'
-                + ' doctor" to diagnose issues.')
-            raise e
-
-        self._has_been_reachable = True
-
-        if not self._has_logged_version:
-            self._has_logged_version = True
-            self.compare_version(result)
-
-    def compare_version(
-            self, target_show_result: subprocess.CompletedProcess) -> None:
-        """Compares the version of Fuchsia with the version of ffx.
-
-        Args:
-            target_show_result: Result of the target show command with JSON
-                output mode enabled
-        """
-        result_json = json.loads(target_show_result.stdout)
-        build_info = next(
-            filter(lambda s: s.get('label') == 'build', result_json))
-        version_info = next(
-            filter(lambda s: s.get('label') == 'version', build_info['child']))
-        device_version = version_info.get('value')
-        ffx_version = self.run("version").stdout.decode('utf-8')
-
-        self.log.info(
-            f"Device version: {device_version}, ffx version: {ffx_version}")
-        if device_version != ffx_version:
-            self.log.warning(
-                "ffx versions that differ from device versions may" +
-                " have compatibility issues. It is recommended to" +
-                " use versions within 6 weeks of each other.")
diff --git a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py b/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
deleted file mode 100644
index d6e030e..0000000
--- a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import http
-
-import antlion.controllers.fuchsia_lib.base_lib as base_lib
-
-HW_PWR_STATE_CONTROL_TIMEOUT = 5
-
-
-class FuchsiaHardwarePowerStatecontrolLib(base_lib.BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "hardware_power_statecontrol")
-
-    def send_command(self, test_cmd, test_args, response_timeout=30):
-        """Wrap send_command to allow disconnects after sending the request."""
-        try:
-            response = super().send_command(test_cmd, test_args,
-                                            response_timeout)
-        except (TimeoutError, http.client.RemoteDisconnected,
-                base_lib.DeviceOffline) as e:
-            logging.warn(f'Error while sending power command: {e}')
-            return
-        return response
-
-    def suspendReboot(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Reboot.
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendReboot"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
-
-    def suspendRebootBootloader(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Reboot Bootloader
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendRebootBootloader"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
-
-    def suspendPoweroff(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Poweroff
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendPoweroff"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
-
-    def suspendMexec(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Mexec
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendMexec"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
-
-    def suspendRam(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Ram
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendRam"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
deleted file mode 100644
index 9910be0..0000000
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion import signals
-
-
-class NetstackControllerError(signals.ControllerError):
-    pass
-
-
-class NetstackController:
-    """Contains methods related to netstack, to be used in FuchsiaDevice object"""
-
-    def __init__(self, fuchsia_device):
-        self.device = fuchsia_device
-        self.log = logger.create_tagged_trace_logger(
-            'NetstackController for FuchsiaDevice | %s' % self.device.ip)
-
-    def list_interfaces(self):
-        """Retrieve netstack interfaces from netstack facade
-
-        Returns:
-            List of dicts, one for each interface, containing interface
-            information
-        """
-        response = self.device.sl4f.netstack_lib.netstackListInterfaces()
-        if response.get('error'):
-            raise NetstackControllerError(
-                'Failed to get network interfaces list: %s' %
-                response['error'])
-        return response['result']
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
deleted file mode 100644
index 176d54e..0000000
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
+++ /dev/null
@@ -1,182 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion import logger
-from antlion import signals
-from antlion import utils
-
-TIME_TO_SLEEP_BETWEEN_RETRIES = 1
-TIME_TO_WAIT_FOR_COUNTRY_CODE = 10
-
-
-class WlanControllerError(signals.ControllerError):
-    pass
-
-
-class WlanController:
-    """Contains methods related to wlan core, to be used in FuchsiaDevice object"""
-
-    def __init__(self, fuchsia_device):
-        self.device = fuchsia_device
-        self.log = logger.create_tagged_trace_logger(
-            'WlanController for FuchsiaDevice | %s' % self.device.ip)
-
-    # TODO(70501): Wrap wlan_lib functions and setup from FuchsiaDevice here
-    # (similar to how WlanPolicyController does it) to prevent FuchsiaDevice
-    # from growing too large.
-    def _configure_wlan(self):
-        pass
-
-    def _deconfigure_wlan(self):
-        pass
-
-    def update_wlan_interfaces(self):
-        """ Retrieves WLAN interfaces from device and sets the FuchsiaDevice
-        attributes.
-        """
-        wlan_interfaces = self.get_interfaces_by_role()
-        self.device.wlan_client_interfaces = wlan_interfaces['client']
-        self.device.wlan_ap_interfaces = wlan_interfaces['ap']
-
-        # Set test interfaces to value from config, else the first found
-        # interface, else None
-        self.device.wlan_client_test_interface_name = self.device.conf_data.get(
-            'wlan_client_test_interface',
-            next(iter(self.device.wlan_client_interfaces), None))
-
-        self.device.wlan_ap_test_interface_name = self.device.conf_data.get(
-            'wlan_ap_test_interface',
-            next(iter(self.device.wlan_ap_interfaces), None))
-
-    def get_interfaces_by_role(self):
-        """ Retrieves WLAN interface information, supplimented by netstack info.
-
-        Returns:
-            Dict with keys 'client' and 'ap', each of which contain WLAN
-            interfaces.
-        """
-
-        # Retrieve WLAN interface IDs
-        response = self.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if response.get('error'):
-            raise WlanControllerError('Failed to get WLAN iface ids: %s' %
-                                      response['error'])
-
-        wlan_iface_ids = response.get('result', [])
-        if len(wlan_iface_ids) < 1:
-            return {'client': {}, 'ap': {}}
-
-        # Use IDs to get WLAN interface info and mac addresses
-        wlan_ifaces_by_mac = {}
-        for id in wlan_iface_ids:
-            response = self.device.sl4f.wlan_lib.wlanQueryInterface(id)
-            if response.get('error'):
-                raise WlanControllerError(
-                    'Failed to query wlan iface id %s: %s' %
-                    (id, response['error']))
-
-            mac = response['result'].get('sta_addr', None)
-            if mac is None:
-                # Fallback to older field name to maintain backwards
-                # compatibility with older versions of SL4F's
-                # QueryIfaceResponse. See https://fxrev.dev/562146.
-                mac = response['result'].get('mac_addr')
-
-            wlan_ifaces_by_mac[utils.mac_address_list_to_str(
-                mac)] = response['result']
-
-        # Use mac addresses to query the interfaces from the netstack view,
-        # which allows us to supplement the interface information with the name,
-        # netstack_id, etc.
-
-        # TODO(fxb/75909): This tedium is necessary to get the interface name
-        # because only netstack has that information. The bug linked here is
-        # to reconcile some of the information between the two perspectives, at
-        # which point we can eliminate step.
-        net_ifaces = self.device.netstack_controller.list_interfaces()
-        wlan_ifaces_by_role = {'client': {}, 'ap': {}}
-        for iface in net_ifaces:
-            try:
-                # Some interfaces might not have a MAC
-                iface_mac = utils.mac_address_list_to_str(iface['mac'])
-            except Exception as e:
-                self.log.debug(f'Error {e} getting MAC for iface {iface}')
-                continue
-            if iface_mac in wlan_ifaces_by_mac:
-                wlan_ifaces_by_mac[iface_mac]['netstack_id'] = iface['id']
-
-                # Add to return dict, mapped by role then name.
-                wlan_ifaces_by_role[
-                    wlan_ifaces_by_mac[iface_mac]['role'].lower()][
-                        iface['name']] = wlan_ifaces_by_mac[iface_mac]
-
-        return wlan_ifaces_by_role
-
-    def set_country_code(self, country_code):
-        """Sets country code through the regulatory region service and waits
-        for the code to be applied to WLAN PHY.
-
-        Args:
-            country_code: string, the 2 character country code to set
-
-        Raises:
-            EnvironmentError - failure to get/set regulatory region
-            ConnectionError - failure to query PHYs
-        """
-        self.log.info('Setting DUT country code to %s' % country_code)
-        country_code_response = self.device.sl4f.regulatory_region_lib.setRegion(
-            country_code)
-        if country_code_response.get('error'):
-            raise EnvironmentError(
-                'Failed to set country code (%s) on DUT. Error: %s' %
-                (country_code, country_code_response['error']))
-
-        self.log.info('Verifying DUT country code was correctly set to %s.' %
-                      country_code)
-        phy_ids_response = self.device.sl4f.wlan_lib.wlanPhyIdList()
-        if phy_ids_response.get('error'):
-            raise ConnectionError('Failed to get phy ids from DUT. Error: %s' %
-                                  (country_code, phy_ids_response['error']))
-
-        end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
-        while time.time() < end_time:
-            for id in phy_ids_response['result']:
-                get_country_response = self.device.sl4f.wlan_lib.wlanGetCountry(
-                    id)
-                if get_country_response.get('error'):
-                    raise ConnectionError(
-                        'Failed to query PHY ID (%s) for country. Error: %s' %
-                        (id, get_country_response['error']))
-
-                set_code = ''.join([
-                    chr(ascii_char)
-                    for ascii_char in get_country_response['result']
-                ])
-                if set_code != country_code:
-                    self.log.debug(
-                        'PHY (id: %s) has incorrect country code set. '
-                        'Expected: %s, Got: %s' % (id, country_code, set_code))
-                    break
-            else:
-                self.log.info('All PHYs have expected country code (%s)' %
-                              country_code)
-                break
-            time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
-        else:
-            raise EnvironmentError('Failed to set DUT country code to %s.' %
-                                   country_code)
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
deleted file mode 100644
index 25f06b4..0000000
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
+++ /dev/null
@@ -1,607 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-import time
-
-from antlion import logger
-from antlion import signals
-
-from antlion.controllers.fuchsia_lib.ffx import FFX, FFXError, FFXTimeout
-from antlion.controllers.fuchsia_lib.sl4f import SL4F
-
-SAVED_NETWORKS = "saved_networks"
-CLIENT_STATE = "client_connections_state"
-CONNECTIONS_ENABLED = "ConnectionsEnabled"
-CONNECTIONS_DISABLED = "ConnectionsDisabled"
-
-STATE_CONNECTED = 'Connected'
-STATE_CONNECTING = 'Connecting'
-STATE_DISCONNECTED = 'Disconnected'
-STATE_CONNECTION_STOPPED = 'ConnectionStopped'
-
-FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT = 30
-
-
-class WlanPolicyControllerError(signals.ControllerError):
-    pass
-
-
-class WlanPolicyController:
-    """Contains methods related to the wlan policy layer, to be used in the
-    FuchsiaDevice object.
-    """
-
-    def __init__(self, sl4f: SL4F, ffx: FFX):
-        self.client_controller = False
-        self.preserved_networks_and_client_state = None
-        self.policy_configured = False
-        self.sl4f = sl4f
-        self.ffx = ffx
-        self.log = logger.create_tagged_trace_logger(
-            f'WlanPolicyController | {ffx.ip}')
-
-    # TODO(b/231252355): Lower default timeout to 15s once ffx becomes more
-    # performant and/or reliable.
-    def configure_wlan(
-            self,
-            preserve_saved_networks: bool,
-            timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT) -> None:
-        """Sets up wlan policy layer.
-
-        Args:
-            preserve_saved_networks: whether to clear existing saved
-                networks and client state, to be restored at test close.
-            timeout: time to wait for device to configure WLAN.
-        """
-        end_time_sec = time.time() + timeout_sec
-
-        # Kill basemgr (Component v1 version of session manager)
-        while time.time() < end_time_sec:
-            response = self.sl4f.basemgr_lib.killBasemgr()
-            if not response.get('error'):
-                self.log.debug('Basemgr kill call successfully issued.')
-                break
-            self.log.debug(response['error'])
-            time.sleep(1)
-        else:
-            raise WlanPolicyControllerError(
-                'Failed to issue successful basemgr kill call.')
-
-        # Stop the session manager, which also holds the Policy controller.
-        try:
-            result = self.ffx.run(
-                'component destroy /core/session-manager/session:session',
-                skip_status_code_check=True)
-
-            if result.returncode == 0:
-                self.log.debug(f"Stopped session: {result.stdout}.")
-            else:
-                if (b'InstanceNotFound' in result.stderr
-                        or b'instance was not found' in result.stderr
-                        or b'does not exist' in result.stderr):
-                    self.log.debug(f'Instance was not found: {result.stderr}.')
-                else:
-                    raise WlanPolicyControllerError(
-                        f'Failed to stop the session: {result.stderr}.')
-        except FFXTimeout or FFXError as e:
-            raise WlanPolicyControllerError from e
-
-        # Acquire control of policy layer
-        controller_errors = []
-        while time.time() < end_time_sec:
-            # Create a client controller
-            response = self.sl4f.wlan_policy_lib.wlanCreateClientController()
-            if response.get('error'):
-                controller_errors.append(response['error'])
-                self.log.debug(response['error'])
-                time.sleep(1)
-                continue
-            # Attempt to use the client controller (failure indicates a closed
-            # channel, meaning the client controller was rejected.
-            response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
-            if response.get('error'):
-                controller_errors.append(response['error'])
-                self.log.debug(response['error'])
-                time.sleep(1)
-                continue
-            break
-        else:
-            self.log.warning(
-                "Failed to create and use a WLAN policy client controller. Errors: ["
-                + "; ".join(controller_errors) + "]")
-            raise WlanPolicyControllerError(
-                'Failed to create and use a WLAN policy client controller.')
-
-        self.log.info('ACTS tests now have control of the WLAN policy layer.')
-
-        if preserve_saved_networks and not self.preserved_networks_and_client_state:
-            self.preserved_networks_and_client_state = self.remove_and_preserve_networks_and_client_state(
-            )
-        if not self.start_client_connections():
-            raise WlanPolicyControllerError(
-                'Failed to start client connections during configuration.')
-
-        self.policy_configured = True
-
-    def _deconfigure_wlan(self):
-        if not self.stop_client_connections():
-            raise WlanPolicyControllerError(
-                'Failed to stop client connections during deconfiguration.')
-        self.policy_configured = False
-
-    def clean_up(self) -> None:
-        if self.preserved_networks_and_client_state:
-            # It is possible for policy to have been configured before, but
-            # deconfigured before test end. In this case, in must be setup
-            # before restoring networks
-            if not self.policy_configured:
-                self.configure_wlan()
-            self.restore_preserved_networks_and_client_state()
-
-    def start_client_connections(self):
-        """Allow device to connect to networks via policy layer (including
-        autoconnecting to saved networks).
-
-        Returns:
-            True, if successful. False otherwise."""
-        start_response = self.sl4f.wlan_policy_lib.wlanStartClientConnections()
-        if start_response.get('error'):
-            self.log.error('Failed to start client connections. Err: %s' %
-                           start_response['error'])
-            return False
-        return True
-
-    def stop_client_connections(self):
-        """Prevent device from connecting and autoconnecting to networks via the
-        policy layer.
-
-        Returns:
-            True, if successful. False otherwise."""
-        stop_response = self.sl4f.wlan_policy_lib.wlanStopClientConnections()
-        if stop_response.get('error'):
-            self.log.error('Failed to stop client connections. Err: %s' %
-                           stop_response['error'])
-            return False
-        return True
-
-    def save_and_connect(self, ssid, security, password=None, timeout=30):
-        """ Saves and connects to the network. This is the policy version of
-        connect and check_connect_response because the policy layer
-        requires a saved network and the policy connect does not return
-        success or failure
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-            timeout: int, time in seconds to wait for connection
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        # Save network and check response
-        if not self.save_network(ssid, security, password=password):
-            return False
-        # Make connect call and check response
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        if not self.send_connect_command(ssid, security):
-            return False
-        return self.wait_for_connect(ssid, security, timeout=timeout)
-
-    def save_and_wait_for_autoconnect(self,
-                                      ssid,
-                                      security,
-                                      password=None,
-                                      timeout=30):
-        """Saves a network and waits, expecting an autoconnection to the newly
-        saved network. This differes from save_and_connect, as it doesn't
-        expressly trigger a connection first. There are cases in which an
-        autoconnect won't occur after a save (like if the device is connected
-        already), so this should be used with caution to test very specific
-        situations.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-            timeout: int, time in seconds to wait for connection
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        if not self.save_network(ssid, security, password=password):
-            return False
-        return self.wait_for_connect(ssid, security, timeout=timeout)
-
-    def remove_and_wait_for_disconnect(self,
-                                       ssid,
-                                       security_type,
-                                       password=None,
-                                       state=None,
-                                       status=None,
-                                       timeout=30):
-        """Removes a single network and waits for a disconnect. It is not
-        guaranteed the device will stay disconnected, as it may autoconnect
-        to a different saved network.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-            state: string, The connection state we are expecting, ie "Disconnected" or
-                "Failed"
-            status: string, The disconnect status we expect, it "ConnectionStopped" or
-                "ConnectionFailed"
-            timeout: int, time in seconds to wait for connection
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        if not self.remove_network(ssid, security_type, password=password):
-            return False
-        return self.wait_for_disconnect(ssid,
-                                        security_type,
-                                        state=state,
-                                        status=status,
-                                        timeout=timeout)
-
-    def remove_all_networks_and_wait_for_no_connections(self, timeout=30):
-        """Removes all networks and waits until device is not connected to any
-        networks. This should be used as the policy version of disconnect.
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        if not self.remove_all_networks():
-            self.log.error('Failed to remove all networks. Cannot continue to '
-                           'wait_for_no_connections.')
-            return False
-        return self.wait_for_no_connections(timeout=timeout)
-
-    def save_network(self, ssid, security_type, password=None):
-        """Save a network via the policy layer.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        save_response = self.sl4f.wlan_policy_lib.wlanSaveNetwork(
-            ssid, security_type, target_pwd=password)
-        if save_response.get('error'):
-            self.log.error('Failed to save network %s with error: %s' %
-                           (ssid, save_response['error']))
-            return False
-        return True
-
-    def remove_network(self, ssid, security_type, password=None):
-        """Remove a saved network via the policy layer.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        remove_response = self.sl4f.wlan_policy_lib.wlanRemoveNetwork(
-            ssid, security_type, target_pwd=password)
-        if remove_response.get('error'):
-            self.log.error('Failed to remove network %s with error: %s' %
-                           (ssid, remove_response['error']))
-            return False
-        return True
-
-    def remove_all_networks(self):
-        """Removes all saved networks from device.
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        remove_all_response = self.sl4f.wlan_policy_lib.wlanRemoveAllNetworks()
-        if remove_all_response.get('error'):
-            self.log.error('Error occurred removing all networks: %s' %
-                           remove_all_response['error'])
-            return False
-        return True
-
-    def get_saved_networks(self):
-        """Retrieves saved networks from device.
-
-        Returns:
-            list of saved networks
-
-        Raises:
-            WlanPolicyControllerError, if retrieval fails.
-        """
-        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks(
-        )
-        if saved_networks_response.get('error'):
-            raise WlanPolicyControllerError(
-                'Failed to retrieve saved networks: %s' %
-                saved_networks_response['error'])
-        return saved_networks_response['result']
-
-    def send_connect_command(self, ssid, security_type):
-        """Sends a connect command to a network that is already saved. This does
-        not wait to guarantee the connection is successful (for that, use
-        save_and_connect).
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-
-        Returns:
-            True, if command send successfully. False otherwise.
-        """
-        connect_response = self.sl4f.wlan_policy_lib.wlanConnect(
-            ssid, security_type)
-        if connect_response.get('error'):
-            self.log.error(
-                'Error occurred when sending policy connect command: %s' %
-                connect_response['error'])
-            return False
-        return True
-
-    def wait_for_connect(self, ssid, security_type, timeout=30):
-        """ Wait until the device has connected to the specified network.
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            timeout: int, seconds to wait for a update showing connection
-        Returns:
-            True if we see a connect to the network, False otherwise.
-        """
-        security_type = str(security_type)
-        # Wait until we've connected.
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            time_left = max(1, int(end_time - time.time()))
-
-            try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
-            except TimeoutError:
-                self.log.error('Timed out waiting for response from device '
-                               'while waiting for network with SSID "%s" to '
-                               'connect. Device took too long to connect or '
-                               'the request timed out for another reason.' %
-                               ssid)
-                self.sl4f.wlan_policy_lib.wlanSetNewListener()
-                return False
-            if update.get('error'):
-                # This can occur for many reasons, so it is not necessarily a
-                # failure.
-                self.log.debug('Error occurred getting status update: %s' %
-                               update['error'])
-                continue
-
-            for network in update['result']['networks']:
-                if network['id']['ssid'] == ssid or network['id'][
-                        'type_'].lower() == security_type.lower():
-                    if 'state' not in network:
-                        raise WlanPolicyControllerError(
-                            'WLAN status missing state field.')
-                    elif network['state'].lower() == STATE_CONNECTED.lower():
-                        return True
-            # Wait a bit before requesting another status update
-            time.sleep(1)
-        # Stopped getting updates because out timeout
-        self.log.error('Timed out waiting for network with SSID "%s" to '
-                       "connect" % ssid)
-        return False
-
-    def wait_for_disconnect(self,
-                            ssid,
-                            security_type,
-                            state=None,
-                            status=None,
-                            timeout=30):
-        """ Wait for a disconnect of the specified network on the given device. This
-        will check that the correct connection state and disconnect status are
-        given in update. If we do not see a disconnect after some time,
-        return false.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            state: string, The connection state we are expecting, ie "Disconnected" or
-                "Failed"
-            status: string, The disconnect status we expect, it "ConnectionStopped" or
-                "ConnectionFailed"
-            timeout: int, seconds to wait before giving up
-
-        Returns: True if we saw a disconnect as specified, or False otherwise.
-        """
-        if not state:
-            state = STATE_DISCONNECTED
-        if not status:
-            status = STATE_CONNECTION_STOPPED
-
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            time_left = max(1, int(end_time - time.time()))
-            try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
-            except TimeoutError:
-                self.log.error(
-                    'Timed out waiting for response from device '
-                    'while waiting for network with SSID "%s" to '
-                    'disconnect. Device took too long to disconnect '
-                    'or the request timed out for another reason.' % ssid)
-                self.sl4f.wlan_policy_lib.wlanSetNewListener()
-                return False
-
-            if update.get('error'):
-                # This can occur for many reasons, so it is not necessarily a
-                # failure.
-                self.log.debug('Error occurred getting status update: %s' %
-                               update['error'])
-                continue
-            # Update should include network, either connected to or recently disconnected.
-            if len(update['result']['networks']) == 0:
-                raise WlanPolicyControllerError(
-                    'WLAN state update is missing network.')
-
-            for network in update['result']['networks']:
-                if network['id']['ssid'] == ssid or network['id'][
-                        'type_'].lower() == security_type.lower():
-                    if 'state' not in network or 'status' not in network:
-                        raise WlanPolicyControllerError(
-                            'Client state summary\'s network is missing fields'
-                        )
-                    # If still connected, we will wait for another update and check again
-                    elif network['state'].lower() == STATE_CONNECTED.lower():
-                        continue
-                    elif network['state'].lower() == STATE_CONNECTING.lower():
-                        self.log.error(
-                            'Update is "Connecting", but device should already be '
-                            'connected; expected disconnect')
-                        return False
-                    # Check that the network state and disconnect status are expected, ie
-                    # that it isn't ConnectionFailed when we expect ConnectionStopped
-                    elif network['state'].lower() != state.lower(
-                    ) or network['status'].lower() != status.lower():
-                        self.log.error(
-                            'Connection failed: a network failure occurred that is unrelated'
-                            'to remove network or incorrect status update. \nExpected state: '
-                            '%s, Status: %s,\nActual update: %s' %
-                            (state, status, network))
-                        return False
-                    else:
-                        return True
-            # Wait a bit before requesting another status update
-            time.sleep(1)
-        # Stopped getting updates because out timeout
-        self.log.error('Timed out waiting for network with SSID "%s" to '
-                       'connect' % ssid)
-        return False
-
-    def wait_for_no_connections(self, timeout=30):
-        """ Waits to see that there are no existing connections the device. This
-        is the simplest way to watch for disconnections when only a single
-        network is saved/present.
-
-        Args:
-            timeout: int, time in seconds to wait to see no connections
-
-        Returns:
-            True, if successful. False, if still connected after timeout.
-        """
-        # If there are already no existing connections when this function is called,
-        # then an update won't be generated by the device, and we'll time out.
-        # Force an update by getting a new listener.
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            time_left = max(1, int(end_time - time.time()))
-            try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(
-                    timeout=time_left)
-            except TimeoutError:
-                self.log.info(
-                    "Timed out getting status update while waiting for all"
-                    " connections to end.")
-                self.sl4f.wlan_policy_lib.wlanSetNewListener()
-                return False
-
-            if update["error"] != None:
-                self.log.info("Failed to get status update")
-                return False
-            # If any network is connected or being connected to, wait for them
-            # to disconnect.
-            if any(network['state'].lower() in
-                   {STATE_CONNECTED.lower(),
-                    STATE_CONNECTING.lower()}
-                   for network in update['result']['networks']):
-                continue
-            else:
-                return True
-        return False
-
-    def remove_and_preserve_networks_and_client_state(self):
-        """ Preserves networks already saved on devices before removing them to
-        setup up for a clean test environment. Records the state of client
-        connections before tests.
-
-        Raises:
-            WlanPolicyControllerError, if the network removal is unsuccessful
-        """
-        # Save preexisting saved networks
-        preserved_networks_and_state = {}
-        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks(
-        )
-        if saved_networks_response.get('error'):
-            raise WlanPolicyControllerError(
-                'Failed to get preexisting saved networks: %s' %
-                saved_networks_response['error'])
-        if saved_networks_response.get('result') != None:
-            preserved_networks_and_state[
-                SAVED_NETWORKS] = saved_networks_response['result']
-
-        # Remove preexisting saved networks
-        if not self.remove_all_networks():
-            raise WlanPolicyControllerError(
-                'Failed to clear networks and disconnect at FuchsiaDevice creation.'
-            )
-
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        update_response = self.sl4f.wlan_policy_lib.wlanGetUpdate()
-        update_result = update_response.get('result', {})
-        if update_result.get('state'):
-            preserved_networks_and_state[CLIENT_STATE] = update_result['state']
-        else:
-            self.log.warn('Failed to get update; test will not start or '
-                          'stop client connections at the end of the test.')
-
-        self.log.info('Saved networks cleared and preserved.')
-        return preserved_networks_and_state
-
-    def restore_preserved_networks_and_client_state(self):
-        """ Restore saved networks and client state onto device if they have
-        been preserved.
-        """
-        if not self.remove_all_networks():
-            self.log.warn('Failed to remove saved networks before restore.')
-        restore_success = True
-        for network in self.preserved_networks_and_client_state[
-                SAVED_NETWORKS]:
-            if not self.save_network(network["ssid"], network["security_type"],
-                                     network["credential_value"]):
-                self.log.warn('Failed to restore network (%s).' %
-                              network['ssid'])
-                restore_success = False
-        starting_state = self.preserved_networks_and_client_state[CLIENT_STATE]
-        if starting_state == CONNECTIONS_ENABLED:
-            state_restored = self.start_client_connections()
-        else:
-            state_restored = self.stop_client_connections()
-        if not state_restored:
-            self.log.warn('Failed to restore client connections state.')
-            restore_success = False
-        if restore_success:
-            self.log.info('Preserved networks and client state restored.')
-            self.preserved_networks_and_client_state = None
-        return restore_success
diff --git a/src/antlion/controllers/fuchsia_lib/location/__init__.py b/src/antlion/controllers/fuchsia_lib/location/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/fuchsia_lib/location/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py b/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
deleted file mode 100644
index 7b5c692..0000000
--- a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaRegulatoryRegionLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "location_regulatory_region")
-
-    # TODO(fxb/46727): Provide an analagous call to check the region
-    # configured into the driver.
-    def setRegion(self, region_code):
-        """Set regulatory region.
-
-        Args:
-            region_code: 2-byte ASCII string.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "location_regulatory_region_facade.set_region"
-        test_args = {"region": region_code}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/logging_lib.py b/src/antlion/controllers/fuchsia_lib/logging_lib.py
deleted file mode 100644
index aba1acf..0000000
--- a/src/antlion/controllers/fuchsia_lib/logging_lib.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaLoggingLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "logging")
-
-    def logE(self, message):
-        """Log a message of level Error directly to the syslog.
-
-        Args:
-            message: The message to log.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "logging_facade.LogErr"
-        test_args = {
-            "message": '[%s] %s' % (datetime.datetime.now(), message),
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def logI(self, message):
-        """Log a message of level Info directly to the syslog.
-
-        Args:
-            message: The message to log.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "logging_facade.LogInfo"
-        test_args = {"message": '[%s] %s' % (datetime.datetime.now(), message)}
-
-        return self.send_command(test_cmd, test_args)
-
-    def logW(self, message):
-        """Log a message of level Warning directly to the syslog.
-
-        Args:
-            message: The message to log.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "logging_facade.LogWarn"
-        test_args = {"message": '[%s] %s' % (datetime.datetime.now(), message)}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py b/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
deleted file mode 100644
index 20893b2..0000000
--- a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaNetstackLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "netstack")
-
-    def netstackListInterfaces(self):
-        """ListInterfaces command
-
-        Returns:
-            List of interface paths
-        """
-        test_cmd = "netstack_facade.ListInterfaces"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def enableInterface(self, id):
-        """Enable Interface
-
-        Args:
-            id: The interface ID.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "netstack_facade.EnableInterface"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def disableInterface(self, id):
-        """Disable Interface
-
-        Args:
-            id: The interface ID.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "netstack_facade.DisableInterface"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/package_server.py b/src/antlion/controllers/fuchsia_lib/package_server.py
deleted file mode 100644
index b0a45c7..0000000
--- a/src/antlion/controllers/fuchsia_lib/package_server.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-import shutil
-import socket
-import subprocess
-import tarfile
-import tempfile
-
-from dataclasses import dataclass
-from datetime import datetime
-from typing import TextIO, List, Optional
-
-from antlion import context
-from antlion import logger
-from antlion import signals
-from antlion import utils
-
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError, SSHProvider
-from antlion.controllers.fuchsia_lib.utils_lib import wait_for_port
-from antlion.tracelogger import TraceLogger
-
-DEFAULT_FUCHSIA_REPO_NAME = "fuchsia.com"
-PM_SERVE_STOP_TIMEOUT_SEC = 5
-
-
-class PackageServerError(signals.TestAbortClass):
-    pass
-
-
-def random_port() -> int:
-    s = socket.socket()
-    s.bind(('', 0))
-    return s.getsockname()[1]
-
-
-@dataclass
-class Route:
-    """Represent a route in the routing table."""
-    preferred_source: Optional[str]
-
-
-def find_routes_to(dest_ip) -> List[Route]:
-    """Find the routes used to reach a destination.
-
-    Look through the routing table for the routes that would be used without
-    sending any packets. This is especially helpful for when the device is
-    currently unreachable.
-
-    Only natively supported on Linux. MacOS has iproute2mac, but it doesn't
-    support JSON formatted output.
-
-    TODO(http://b/238924195): Add support for MacOS.
-
-    Args:
-        dest_ip: IP address of the destination
-
-    Throws:
-        CalledProcessError: if the ip command returns a non-zero exit code
-        JSONDecodeError: if the ip command doesn't return JSON
-
-    Returns:
-        Routes with destination to dest_ip.
-    """
-    resp = subprocess.run(f"ip -json route get {dest_ip}".split(),
-                          capture_output=True,
-                          check=True)
-    routes = json.loads(resp.stdout)
-    return [Route(r.get("prefsrc")) for r in routes]
-
-
-def find_host_ip(device_ip: str) -> str:
-    """Find the host's source IP used to reach a device.
-
-    Not all host interfaces can talk to a given device. This limitation can
-    either be physical through hardware or virtual through routing tables.
-    Look through the routing table without sending any packets then return the
-    preferred source IP address.
-
-    Args:
-        device_ip: IP address of the device
-
-    Raises:
-        PackageServerError: if there are multiple or no routes to device_ip, or
-            if the route doesn't contain "prefsrc"
-
-    Returns:
-        The host IP used to reach device_ip.
-    """
-    routes = find_routes_to(device_ip)
-    if len(routes) != 1:
-        raise PackageServerError(
-            f"Expected only one route to {device_ip}, got {routes}")
-
-    route = routes[0]
-    if not route.preferred_source:
-        raise PackageServerError(f'Route does not contain "prefsrc": {route}')
-    return route.preferred_source
-
-
-class PackageServer:
-    """Package manager for Fuchsia; an interface to the "pm" CLI tool."""
-
-    def __init__(self, packages_archive_path: str) -> None:
-        """
-        Args:
-            packages_archive_path: Path to an archive containing the pm binary
-                and amber-files.
-        """
-        self.log: TraceLogger = logger.create_tagged_trace_logger("pm")
-
-        self._server_log: Optional[TextIO] = None
-        self._server_proc: Optional[subprocess.Popen] = None
-        self._log_path: Optional[str] = None
-
-        self._tmp_dir = tempfile.mkdtemp(prefix="packages-")
-        tar = tarfile.open(packages_archive_path, "r:gz")
-        tar.extractall(self._tmp_dir)
-
-        self._binary_path = os.path.join(self._tmp_dir, "pm")
-        self._packages_path = os.path.join(self._tmp_dir, "amber-files")
-        self._port = random_port()
-
-        self._assert_repo_has_not_expired()
-
-    def clean_up(self) -> None:
-        if self._server_proc:
-            self.stop_server()
-        if self._tmp_dir:
-            shutil.rmtree(self._tmp_dir)
-
-    def _assert_repo_has_not_expired(self) -> None:
-        """Abort if the repository metadata has expired.
-
-        Raises:
-            TestAbortClass: when the timestamp.json file has expired
-        """
-        with open(f'{self._packages_path}/repository/timestamp.json',
-                  'r') as f:
-            data = json.load(f)
-            expiresAtRaw = data["signed"]["expires"]
-            expiresAt = datetime.strptime(expiresAtRaw, '%Y-%m-%dT%H:%M:%SZ')
-            if expiresAt <= datetime.now():
-                raise signals.TestAbortClass(
-                    f'{self._packages_path}/repository/timestamp.json has expired on {expiresAtRaw}'
-                )
-
-    def start(self) -> None:
-        """Start the package server.
-
-        Does not check for errors; view the log file for any errors.
-        """
-        if self._server_proc:
-            self.log.warn(
-                "Skipping to start the server since it has already been started"
-            )
-            return
-
-        pm_command = f'{self._binary_path} serve -c 2 -repo {self._packages_path} -l :{self._port}'
-
-        root_dir = context.get_current_context().get_full_output_path()
-        epoch = utils.get_current_epoch_time()
-        time_stamp = logger.normalize_log_line_timestamp(
-            logger.epoch_to_log_line_timestamp(epoch))
-        self._log_path = os.path.join(root_dir, f'pm_server.{time_stamp}.log')
-
-        self._server_log = open(self._log_path, 'a+')
-        self._server_proc = subprocess.Popen(pm_command.split(),
-                                             preexec_fn=os.setpgrp,
-                                             stdout=self._server_log,
-                                             stderr=subprocess.STDOUT)
-        try:
-            wait_for_port('127.0.0.1', self._port)
-        except TimeoutError as e:
-            if self._server_log:
-                self._server_log.close()
-            if self._log_path:
-                with open(self._log_path, 'r') as f:
-                    logs = f.read()
-            raise TimeoutError(
-                f"pm serve failed to expose port {self._port}. Logs:\n{logs}"
-            ) from e
-
-        self.log.info(f'Serving packages on port {self._port}')
-
-    def configure_device(self,
-                         ssh: SSHProvider,
-                         repo_name=DEFAULT_FUCHSIA_REPO_NAME) -> None:
-        """Configure the device to use this package server.
-
-        Args:
-            ssh: Device SSH transport channel
-            repo_name: Name of the repo to alias this package server
-        """
-        # Remove any existing repositories that may be stale.
-        try:
-            ssh.run(f'pkgctl repo rm fuchsia-pkg://{repo_name}')
-        except FuchsiaSSHError as e:
-            if 'NOT_FOUND' not in e.result.stderr:
-                raise e
-
-        # Configure the device with the new repository.
-        host_ip = find_host_ip(ssh.config.host_name)
-        repo_url = f"http://{host_ip}:{self._port}"
-        ssh.run(
-            f"pkgctl repo add url -f 2 -n {repo_name} {repo_url}/config.json")
-        self.log.info(
-            f'Added repo "{repo_name}" as {repo_url} on device {ssh.config.host_name}'
-        )
-
-    def stop_server(self) -> None:
-        """Stop the package server."""
-        if not self._server_proc:
-            self.log.warn(
-                "Skipping to stop the server since it hasn't been started yet")
-            return
-
-        self._server_proc.terminate()
-        try:
-            self._server_proc.wait(timeout=PM_SERVE_STOP_TIMEOUT_SEC)
-        except subprocess.TimeoutExpired:
-            self.log.warn(
-                f"Taking over {PM_SERVE_STOP_TIMEOUT_SEC}s to stop. Killing the server"
-            )
-            self._server_proc.kill()
-            self._server_proc.wait(timeout=PM_SERVE_STOP_TIMEOUT_SEC)
-        finally:
-            if self._server_log:
-                self._server_log.close()
-
-        self._server_proc = None
-        self._log_path = None
-        self._server_log = None
diff --git a/src/antlion/controllers/fuchsia_lib/sl4f.py b/src/antlion/controllers/fuchsia_lib/sl4f.py
deleted file mode 100644
index 1958772..0000000
--- a/src/antlion/controllers/fuchsia_lib/sl4f.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import sys
-
-from antlion import logger
-from antlion.controllers.fuchsia_lib import utils_lib
-from antlion.controllers.fuchsia_lib.audio_lib import FuchsiaAudioLib
-from antlion.controllers.fuchsia_lib.basemgr_lib import FuchsiaBasemgrLib
-from antlion.controllers.fuchsia_lib.bt.avdtp_lib import FuchsiaAvdtpLib
-from antlion.controllers.fuchsia_lib.bt.ble_lib import FuchsiaBleLib
-from antlion.controllers.fuchsia_lib.bt.bts_lib import FuchsiaBtsLib
-from antlion.controllers.fuchsia_lib.bt.gattc_lib import FuchsiaGattcLib
-from antlion.controllers.fuchsia_lib.bt.gatts_lib import FuchsiaGattsLib
-from antlion.controllers.fuchsia_lib.bt.hfp_lib import FuchsiaHfpLib
-from antlion.controllers.fuchsia_lib.bt.rfcomm_lib import FuchsiaRfcommLib
-from antlion.controllers.fuchsia_lib.bt.sdp_lib import FuchsiaProfileServerLib
-from antlion.controllers.fuchsia_lib.hardware_power_statecontrol_lib import FuchsiaHardwarePowerStatecontrolLib
-from antlion.controllers.fuchsia_lib.location.regulatory_region_lib import FuchsiaRegulatoryRegionLib
-from antlion.controllers.fuchsia_lib.logging_lib import FuchsiaLoggingLib
-from antlion.controllers.fuchsia_lib.netstack.netstack_lib import FuchsiaNetstackLib
-from antlion.controllers.fuchsia_lib.ssh import SSHProvider, FuchsiaSSHError
-from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import FuchsiaWlanApPolicyLib
-from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import FuchsiaWlanDeprecatedConfigurationLib
-from antlion.controllers.fuchsia_lib.wlan_lib import FuchsiaWlanLib
-from antlion.controllers.fuchsia_lib.wlan_policy_lib import FuchsiaWlanPolicyLib
-
-DEFAULT_SL4F_PORT = 80
-START_SL4F_V2_CMD = 'start_sl4f'
-
-
-class SL4F:
-    """Module for Fuchsia devices to interact with the SL4F tool.
-
-    Attributes:
-        ssh: SSHProvider transport to start and stop SL4F.
-        address: http address for SL4F server including SL4F port.
-        log: Logger for the device-specific instance of SL4F.
-    """
-
-    def __init__(self, ssh: SSHProvider,
-                 port: int = DEFAULT_SL4F_PORT) -> None:
-        """
-        Args:
-            ssh: SSHProvider transport to start and stop SL4F.
-            port: Port for the SL4F server to listen on.
-        """
-        if sys.version_info < (3, 9):
-            # TODO(http://b/261746355): Remove this if statement once the
-            # minimum Python version is 3.9 or newer.
-            host = ipaddress.ip_address(ssh.config.host_name.split('%')[0])
-            if host.version == 4:
-                self.address = f'http://{host}:{port}'
-            elif host.version == 6:
-                host = ssh.config.host_name
-                self.address = f'http://[{host}]:{port}'
-        else:
-            host = ipaddress.ip_address(ssh.config.host_name)
-            if host.version == 4:
-                self.address = f'http://{host}:{port}'
-            elif host.version == 6:
-                self.address = f'http://[{host}]:{port}'
-
-        self.log = logger.create_tagged_trace_logger(f"SL4F | {self.address}")
-
-        try:
-            ssh.run(START_SL4F_V2_CMD).stdout
-        except FuchsiaSSHError:
-            # TODO(fxbug.dev/99331) Remove support to run SL4F in CFv1 mode
-            # once ACTS no longer use images that comes with only CFv1 SL4F.
-            self.log.warn(
-                "Running SL4F in CFv1 mode, "
-                "this is deprecated for images built after 5/9/2022, "
-                "see https://fxbug.dev/77056 for more info.")
-            ssh.stop_v1_component("sl4f")
-            ssh.start_v1_component("sl4f")
-
-        utils_lib.wait_for_port(str(host), port)
-        self._init_libraries()
-        self._verify_sl4f_connection()
-
-    def _init_libraries(self) -> None:
-        # Grab commands from FuchsiaAudioLib
-        self.audio_lib = FuchsiaAudioLib(self.address)
-
-        # Grab commands from FuchsiaAvdtpLib
-        self.avdtp_lib = FuchsiaAvdtpLib(self.address)
-
-        # Grab commands from FuchsiaHfpLib
-        self.hfp_lib = FuchsiaHfpLib(self.address)
-
-        # Grab commands from FuchsiaRfcommLib
-        self.rfcomm_lib = FuchsiaRfcommLib(self.address)
-
-        # Grab commands from FuchsiaBasemgrLib
-        self.basemgr_lib = FuchsiaBasemgrLib(self.address)
-
-        # Grab commands from FuchsiaBleLib
-        self.ble_lib = FuchsiaBleLib(self.address)
-
-        # Grab commands from FuchsiaBtsLib
-        self.bts_lib = FuchsiaBtsLib(self.address)
-
-        # Grab commands from FuchsiaGattcLib
-        self.gattc_lib = FuchsiaGattcLib(self.address)
-
-        # Grab commands from FuchsiaGattsLib
-        self.gatts_lib = FuchsiaGattsLib(self.address)
-
-        # Grab commands from FuchsiaHardwarePowerStatecontrolLib
-        self.hardware_power_statecontrol_lib = (
-            FuchsiaHardwarePowerStatecontrolLib(self.address))
-
-        # Grab commands from FuchsiaLoggingLib
-        self.logging_lib = FuchsiaLoggingLib(self.address)
-
-        # Grab commands from FuchsiaNetstackLib
-        self.netstack_lib = FuchsiaNetstackLib(self.address)
-
-        # Grab commands from FuchsiaProfileServerLib
-        self.sdp_lib = FuchsiaProfileServerLib(self.address)
-
-        # Grab commands from FuchsiaRegulatoryRegionLib
-        self.regulatory_region_lib = FuchsiaRegulatoryRegionLib(self.address)
-
-        # Grabs command from FuchsiaWlanDeprecatedConfigurationLib
-        self.wlan_deprecated_configuration_lib = (
-            FuchsiaWlanDeprecatedConfigurationLib(self.address))
-
-        # Grab commands from FuchsiaWlanLib
-        self.wlan_lib = FuchsiaWlanLib(self.address)
-
-        # Grab commands from FuchsiaWlanApPolicyLib
-        self.wlan_ap_policy_lib = FuchsiaWlanApPolicyLib(self.address)
-
-        # Grab commands from FuchsiaWlanPolicyLib
-        self.wlan_policy_lib = FuchsiaWlanPolicyLib(self.address)
-
-    def _verify_sl4f_connection(self) -> None:
-        """Verify SL4F commands can run on server."""
-
-        self.log.info('Verifying SL4F commands can run.')
-        try:
-            self.wlan_lib.wlanGetIfaceIdList()
-        except Exception as err:
-            raise ConnectionError(
-                f'Failed to connect and run command via SL4F. Err: {err}')
diff --git a/src/antlion/controllers/fuchsia_lib/ssh.py b/src/antlion/controllers/fuchsia_lib/ssh.py
deleted file mode 100644
index ec8f762..0000000
--- a/src/antlion/controllers/fuchsia_lib/ssh.py
+++ /dev/null
@@ -1,271 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-import time
-
-from dataclasses import dataclass
-from typing import List, Union
-
-from antlion import logger
-from antlion import signals
-
-DEFAULT_SSH_USER: str = "fuchsia"
-DEFAULT_SSH_PORT: int = 22
-DEFAULT_SSH_PRIVATE_KEY: str = "~/.ssh/fuchsia_ed25519"
-DEFAULT_SSH_TIMEOUT_SEC: int = 60
-DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 30
-DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
-# The default package repository for all components.
-FUCHSIA_PACKAGE_REPO_NAME = 'fuchsia.com'
-
-
-class SSHResult:
-    """Result of an SSH command."""
-
-    def __init__(
-        self, process: Union[subprocess.CompletedProcess,
-                             subprocess.CalledProcessError]
-    ) -> None:
-        self._raw_stdout = process.stdout
-        self._stdout = process.stdout.decode('utf-8', errors='replace')
-        self._stderr = process.stderr.decode('utf-8', errors='replace')
-        self._exit_status: int = process.returncode
-
-    def __str__(self):
-        if self.exit_status == 0:
-            return self.stdout
-        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
-
-    @property
-    def stdout(self) -> str:
-        return self._stdout
-
-    @property
-    def stderr(self) -> str:
-        return self._stderr
-
-    @property
-    def exit_status(self) -> int:
-        return self._exit_status
-
-    @property
-    def raw_stdout(self) -> bytes:
-        return self._raw_stdout
-
-
-class FuchsiaSSHError(signals.TestError):
-    """A SSH command returned with a non-zero status code."""
-
-    def __init__(self, command: str, result: SSHResult):
-        super().__init__(
-            f'SSH command "{command}" unexpectedly returned {result}')
-        self.result = result
-
-
-class SSHTimeout(signals.TestError):
-    """A SSH command timed out."""
-
-    def __init__(self, err: subprocess.TimeoutExpired):
-        super().__init__(
-            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
-            f'stdout="{err.stdout}", stderr="{err.stderr}"')
-
-
-class FuchsiaSSHTransportError(signals.TestError):
-    """Failure to send an SSH command."""
-
-
-@dataclass
-class SSHConfig:
-    """SSH client config."""
-
-    # SSH flags. See ssh(1) for full details.
-    host_name: str
-    identity_file: str
-
-    ssh_binary: str = 'ssh'
-    config_file: str = '/dev/null'
-    port: int = 22
-    user: str = DEFAULT_SSH_USER
-
-    # SSH options. See ssh_config(5) for full details.
-    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
-    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
-    strict_host_key_checking: bool = False
-    user_known_hosts_file: str = "/dev/null"
-    log_level: str = "ERROR"
-
-    def full_command(self, command: str, force_tty: bool = False) -> List[str]:
-        """Generate the complete command to execute command over SSH.
-
-        Args:
-            command: The command to run over SSH
-            force_tty: Force pseudo-terminal allocation. This can be used to
-                execute arbitrary screen-based programs on a remote machine,
-                which can be very useful, e.g. when implementing menu services.
-
-        Returns:
-            Arguments composing the complete call to SSH.
-        """
-        optional_flags = []
-        if force_tty:
-            # Multiple -t options force tty allocation, even if ssh has no local
-            # tty. This is necessary for launching ssh with subprocess without
-            # shell=True.
-            optional_flags.append('-tt')
-
-        return [
-            self.ssh_binary,
-            # SSH flags
-            '-i',
-            self.identity_file,
-            '-F',
-            self.config_file,
-            '-p',
-            str(self.port),
-            # SSH configuration options
-            '-o',
-            f'ConnectTimeout={self.connect_timeout}',
-            '-o',
-            f'ServerAliveInterval={self.server_alive_interval}',
-            '-o',
-            f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
-            '-o',
-            f'UserKnownHostsFile={self.user_known_hosts_file}',
-            '-o',
-            f'LogLevel={self.log_level}',
-        ] + optional_flags + [
-            f'{self.user}@{self.host_name}'
-        ] + command.split()
-
-
-class SSHProvider:
-    """Device-specific provider for SSH clients."""
-
-    def __init__(self, config: SSHConfig) -> None:
-        """
-        Args:
-            config: SSH client config
-        """
-        logger_tag = f"ssh | {config.host_name}"
-        if config.port != DEFAULT_SSH_PORT:
-            logger_tag += f':{config.port}'
-
-        # Check if the private key exists
-
-        self.log = logger.create_tagged_trace_logger(logger_tag)
-        self.config = config
-
-    def run(self,
-            command: str,
-            timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
-            connect_retries: int = 3,
-            force_tty: bool = False) -> SSHResult:
-        """Run a command on the device then exit.
-
-        Args:
-            command: String to send to the device.
-            timeout_sec: Seconds to wait for the command to complete.
-            connect_retries: Amount of times to retry connect on fail.
-            force_tty: Force pseudo-terminal allocation.
-
-        Raises:
-            FuchsiaSSHError: if the SSH command returns a non-zero status code
-            FuchsiaSSHTimeout: if there is no response within timeout_sec
-            FuchsiaSSHTransportError: if SSH fails to run the command
-
-        Returns:
-            SSHResults from the executed command.
-        """
-        err: Exception
-        for i in range(0, connect_retries):
-            try:
-                return self._run(command, timeout_sec, force_tty)
-            except FuchsiaSSHTransportError as e:
-                err = e
-                self.log.warn(f'Connect failed: {e}')
-        raise err
-
-    def _run(self, command: str, timeout_sec: int, force_tty: bool) -> SSHResult:
-        full_command = self.config.full_command(command, force_tty)
-        self.log.debug(f'Running "{" ".join(full_command)}"')
-        try:
-            process = subprocess.run(full_command,
-                                     capture_output=True,
-                                     timeout=timeout_sec,
-                                     check=True)
-        except subprocess.CalledProcessError as e:
-            if e.returncode == 255:
-                stderr = e.stderr.decode('utf-8', errors='replace')
-                if 'Name or service not known' in stderr or 'Host does not exist' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Hostname {self.config.host_name} cannot be resolved to an address'
-                    ) from e
-                if 'Connection timed out' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Failed to establish a connection to {self.config.host_name} within {timeout_sec}s'
-                    ) from e
-                if 'Connection refused' in stderr:
-                    raise FuchsiaSSHTransportError(
-                        f'Connection refused by {self.config.host_name}') from e
-
-            raise FuchsiaSSHError(command, SSHResult(e)) from e
-        except subprocess.TimeoutExpired as e:
-            raise SSHTimeout(e) from e
-
-        return SSHResult(process)
-
-    def start_v1_component(self,
-                           component: str,
-                           timeout_sec: int = 5,
-                           repo: str = FUCHSIA_PACKAGE_REPO_NAME) -> None:
-        """Start a CFv1 component in the background.
-
-        Args:
-            component: Name of the component without ".cmx".
-            timeout_sec: Seconds to wait for the process to show up in 'ps'.
-            repo: Default package repository for all components.
-
-        Raises:
-            TimeoutError: when the component doesn't launch within timeout_sec
-        """
-        # The "run -d" command will hang when executed without a pseudo-tty
-        # allocated.
-        self.run(
-            f'run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx', force_tty=True)
-
-        timeout = time.perf_counter() + timeout_sec
-        while True:
-            ps_cmd = self.run("ps")
-            if f'{component}.cmx' in ps_cmd.stdout:
-                return
-            if time.perf_counter() > timeout:
-                raise TimeoutError(
-                    f'Failed to start "{component}.cmx" after {timeout_sec}s')
-
-    def stop_v1_component(self, component: str) -> None:
-        """Stop all instances of a CFv1 component.
-
-        Args:
-            component: Name of the component without ".cmx"
-        """
-        try:
-            self.run(f'killall {component}.cmx')
-        except FuchsiaSSHError as e:
-            if 'no tasks found' in e.result.stderr:
-                return
-            raise e
diff --git a/src/antlion/controllers/fuchsia_lib/utils_lib.py b/src/antlion/controllers/fuchsia_lib/utils_lib.py
deleted file mode 100644
index 897749f..0000000
--- a/src/antlion/controllers/fuchsia_lib/utils_lib.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import logging
-import socket
-import tarfile
-import tempfile
-import time
-
-from antlion import utils
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError
-from antlion.libs.proc import job
-from antlion.utils import get_fuchsia_mdns_ipv6_address
-
-MDNS_LOOKUP_RETRY_MAX = 3
-FASTBOOT_TIMEOUT = 30
-AFTER_FLASH_BOOT_TIME = 30
-WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC = 360
-PROCESS_CHECK_WAIT_TIME_SEC = 30
-
-FUCHSIA_SDK_URL = "gs://fuchsia-sdk/development"
-FUCHSIA_RELEASE_TESTING_URL = "gs://fuchsia-release-testing/images"
-
-
-def flash(fuchsia_device, use_ssh=False,
-          fuchsia_reconnect_after_reboot_time=5):
-    """A function to flash, not pave, a fuchsia_device
-
-    Args:
-        fuchsia_device: An ACTS fuchsia_device
-
-    Returns:
-        True if successful.
-    """
-    if not fuchsia_device.authorized_file:
-        raise ValueError('A ssh authorized_file must be present in the '
-                         'ACTS config to flash fuchsia_devices.')
-    # This is the product type from the fx set command.
-    # Do 'fx list-products' to see options in Fuchsia source tree.
-    if not fuchsia_device.product_type:
-        raise ValueError('A product type must be specified to flash '
-                         'fuchsia_devices.')
-    # This is the board type from the fx set command.
-    # Do 'fx list-boards' to see options in Fuchsia source tree.
-    if not fuchsia_device.board_type:
-        raise ValueError('A board type must be specified to flash '
-                         'fuchsia_devices.')
-    if not fuchsia_device.build_number:
-        fuchsia_device.build_number = 'LATEST'
-    if not fuchsia_device.mdns_name:
-        raise ValueError(
-            'Either fuchsia_device mdns_name must be specified or '
-            'ip must be the mDNS name to be able to flash.')
-
-    file_to_download = None
-    image_archive_path = None
-    image_path = None
-
-    if not fuchsia_device.specific_image:
-        product_build = fuchsia_device.product_type
-        if fuchsia_device.build_type:
-            product_build = f'{product_build}_{fuchsia_device.build_type}'
-        if 'LATEST' in fuchsia_device.build_number:
-            sdk_version = 'sdk'
-            if 'LATEST_F' in fuchsia_device.build_number:
-                f_branch = fuchsia_device.build_number.split('LATEST_F', 1)[1]
-                sdk_version = f'f{f_branch}_sdk'
-            file_to_download = (
-                f'{FUCHSIA_RELEASE_TESTING_URL}/'
-                f'{sdk_version}-{product_build}.{fuchsia_device.board_type}-release.tgz'
-            )
-        else:
-            # Must be a fully qualified build number (e.g. 5.20210721.4.1215)
-            file_to_download = (
-                f'{FUCHSIA_SDK_URL}/{fuchsia_device.build_number}/images/'
-                f'{product_build}.{fuchsia_device.board_type}-release.tgz')
-    elif 'gs://' in fuchsia_device.specific_image:
-        file_to_download = fuchsia_device.specific_image
-    elif os.path.isdir(fuchsia_device.specific_image):
-        image_path = fuchsia_device.specific_image
-    elif tarfile.is_tarfile(fuchsia_device.specific_image):
-        image_archive_path = fuchsia_device.specific_image
-    else:
-        raise ValueError(
-            f'Invalid specific_image "{fuchsia_device.specific_image}"')
-
-    if image_path:
-        reboot_to_bootloader(fuchsia_device, use_ssh,
-                             fuchsia_reconnect_after_reboot_time)
-        logging.info(
-            f'Flashing {fuchsia_device.mdns_name} with {image_path} using authorized keys "{fuchsia_device.authorized_file}".'
-        )
-        run_flash_script(fuchsia_device, image_path)
-    else:
-        suffix = fuchsia_device.board_type
-        with tempfile.TemporaryDirectory(suffix=suffix) as image_path:
-            if file_to_download:
-                logging.info(f'Downloading {file_to_download} to {image_path}')
-                job.run(f'gsutil cp {file_to_download} {image_path}')
-                image_archive_path = os.path.join(
-                    image_path, os.path.basename(file_to_download))
-
-            if image_archive_path:
-                # Use tar command instead of tarfile.extractall, as it takes too long.
-                job.run(f'tar xfvz {image_archive_path} -C {image_path}',
-                        timeout=120)
-
-            reboot_to_bootloader(fuchsia_device, use_ssh,
-                                 fuchsia_reconnect_after_reboot_time)
-
-            logging.info(
-                f'Flashing {fuchsia_device.mdns_name} with {image_archive_path} using authorized keys "{fuchsia_device.authorized_file}".'
-            )
-            run_flash_script(fuchsia_device, image_path)
-    return True
-
-
-def reboot_to_bootloader(fuchsia_device,
-                         use_ssh=False,
-                         fuchsia_reconnect_after_reboot_time=5):
-    import psutil
-    import usbinfo
-
-    if use_ssh:
-        logging.info('Sending reboot command via SSH to '
-                     'get into bootloader.')
-        # Sending this command will put the device in fastboot
-        # but it does not guarantee the device will be in fastboot
-        # after this command.  There is no check so if there is an
-        # expectation of the device being in fastboot, then some
-        # other check needs to be done.
-        try:
-            fuchsia_device.ssh.run(
-                'dm rb', timeout_sec=fuchsia_reconnect_after_reboot_time)
-        except FuchsiaSSHError as e:
-            if 'closed by remote host' not in e.result.stderr:
-                raise e
-    else:
-        pass
-        ## Todo: Add elif for SL4F if implemented in SL4F
-
-    time_counter = 0
-    while time_counter < FASTBOOT_TIMEOUT:
-        logging.info('Checking to see if fuchsia_device(%s) SN: %s is in '
-                     'fastboot. (Attempt #%s Timeout: %s)' %
-                     (fuchsia_device.mdns_name, fuchsia_device.serial_number,
-                      str(time_counter + 1), FASTBOOT_TIMEOUT))
-        for usb_device in usbinfo.usbinfo():
-            if (usb_device['iSerialNumber'] == fuchsia_device.serial_number
-                    and usb_device['iProduct'] == 'USB_download_gadget'):
-                logging.info(
-                    'fuchsia_device(%s) SN: %s is in fastboot.' %
-                    (fuchsia_device.mdns_name, fuchsia_device.serial_number))
-                time_counter = FASTBOOT_TIMEOUT
-        time_counter = time_counter + 1
-        if time_counter == FASTBOOT_TIMEOUT:
-            for fail_usb_device in usbinfo.usbinfo():
-                logging.debug(fail_usb_device)
-            raise TimeoutError(
-                'fuchsia_device(%s) SN: %s '
-                'never went into fastboot' %
-                (fuchsia_device.mdns_name, fuchsia_device.serial_number))
-        time.sleep(1)
-
-    end_time = time.time() + WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC
-    # Attempt to wait for existing flashing process to finish
-    while time.time() < end_time:
-        flash_process_found = False
-        for proc in psutil.process_iter():
-            if "bash" in proc.name() and "flash.sh" in proc.cmdline():
-                logging.info(
-                    "Waiting for existing flash.sh process to complete.")
-                time.sleep(PROCESS_CHECK_WAIT_TIME_SEC)
-                flash_process_found = True
-        if not flash_process_found:
-            break
-
-
-def run_flash_script(fuchsia_device, flash_dir):
-    try:
-        flash_output = job.run(
-            f'bash {flash_dir}/flash.sh --ssh-key={fuchsia_device.authorized_file} -s {fuchsia_device.serial_number}',
-            timeout=120)
-        logging.debug(flash_output.stderr)
-    except job.TimeoutError as err:
-        raise TimeoutError(err)
-
-    logging.info('Waiting %s seconds for device'
-                 ' to come back up after flashing.' % AFTER_FLASH_BOOT_TIME)
-    time.sleep(AFTER_FLASH_BOOT_TIME)
-    logging.info('Updating device to new IP addresses.')
-    mdns_ip = None
-    for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
-        mdns_ip = get_fuchsia_mdns_ipv6_address(fuchsia_device.mdns_name)
-        if mdns_ip:
-            break
-        else:
-            time.sleep(1)
-    if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
-        logging.info('IP for fuchsia_device(%s) changed from %s to %s' %
-                     (fuchsia_device.mdns_name, fuchsia_device.ip, mdns_ip))
-        fuchsia_device.ip = mdns_ip
-        fuchsia_device.address = "http://[{}]:{}".format(
-            fuchsia_device.ip, fuchsia_device.sl4f_port)
-    else:
-        raise ValueError('Invalid IP: %s after flashing.' %
-                         fuchsia_device.mdns_name)
-
-
-def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None:
-    """Wait for the host to start accepting connections on the port.
-
-    Some services take some time to start. Call this after launching the service
-    to avoid race conditions.
-
-    Args:
-        host: IP of the running service.
-        port: Port of the running service.
-        timeout_sec: Seconds to wait until raising TimeoutError
-
-    Raises:
-        TimeoutError: when timeout_sec has expired without a successful
-            connection to the service
-    """
-    timeout = time.perf_counter() + timeout_sec
-    while True:
-        try:
-            with socket.create_connection((host, port), timeout=timeout_sec):
-                return
-        except ConnectionRefusedError as e:
-            if time.perf_counter() > timeout:
-                raise TimeoutError(
-                    f'Waited over {timeout_sec}s for the service to start '
-                    f'accepting connections at {host}:{port}') from e
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
deleted file mode 100644
index 07ff2f8..0000000
--- a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaWlanApPolicyLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "wlan_ap_policy")
-
-    def wlanStartAccessPoint(self, target_ssid, security_type, target_pwd,
-                             connectivity_mode, operating_band):
-        """ Start an Access Point.
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network. Possible inputs:
-                    	"none", "wep", "wpa", "wpa2", "wpa3"
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
-                    connectivity_mode: the connectivity mode to use. Possible inputs:
-                    	"local_only", "unrestricted"
-                    operating_band: The operating band to use. Possible inputs:
-                    	"any", "only_2_4_ghz", "only_5_ghz"
-
-                Returns:
-                    boolean indicating if the action was successful
-        """
-
-        test_cmd = "wlan_ap_policy.start_access_point"
-
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": security_type.lower(),
-            "target_pwd": target_pwd,
-            "connectivity_mode": connectivity_mode,
-            "operating_band": operating_band,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanStopAccessPoint(self, target_ssid, security_type, target_pwd=""):
-        """ Stops an active Access Point.
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
-
-                Returns:
-                    boolean indicating if the action was successful
-        """
-
-        test_cmd = "wlan_ap_policy.stop_access_point"
-
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": security_type.lower(),
-            "target_pwd": target_pwd
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanStopAllAccessPoint(self):
-        """ Stops all Access Points
-
-                Returns:
-                    boolean indicating if the actions were successful
-        """
-
-        test_cmd = "wlan_ap_policy.stop_all_access_points"
-
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanSetNewListener(self):
-        """ Sets the update listener stream of the facade to a new stream so that updates will be
-            reset. Intended to be used between tests so that the behaviour of updates in a test is
-            independent from previous tests.
-        """
-        test_cmd = "wlan_ap_policy.set_new_update_listener"
-
-        return self.send_command(test_cmd, {})
-
-    def wlanGetUpdate(self, timeout=30):
-        """ Gets a list of AP state updates. This call will return with an update immediately the
-            first time the update listener is initialized by setting a new listener or by creating
-            a client controller before setting a new listener. Subsequent calls will hang until
-            there is an update.
-            Returns:
-                A list of AP state updated. If there is no error, the result is a list with a
-                structure that matches the FIDL AccessPointState struct given for updates.
-        """
-        test_cmd = "wlan_ap_policy.get_update"
-
-        return self.send_command(test_cmd, {}, response_timeout=timeout)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
deleted file mode 100644
index d229049..0000000
--- a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaWlanDeprecatedConfigurationLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "wlan_deprecated")
-
-    def wlanSuggestAccessPointMacAddress(self, addr):
-        """ Suggests a mac address to soft AP interface, to support
-        cast legacy behavior.
-
-        Args:
-            addr: string of mac address to suggest (e.g. '12:34:56:78:9a:bc')
-        """
-        test_cmd = 'wlan_deprecated.suggest_ap_mac'
-        test_args = {'mac': addr}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_lib.py
deleted file mode 100644
index c68fa11..0000000
--- a/src/antlion/controllers/fuchsia_lib/wlan_lib.py
+++ /dev/null
@@ -1,177 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion import logger
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-COMMAND_SCAN = "wlan.scan"
-COMMAND_SCAN_FOR_BSS_INFO = "wlan.scan_for_bss_info"
-COMMAND_CONNECT = "wlan.connect"
-COMMAND_DISCONNECT = "wlan.disconnect"
-COMMAND_STATUS = "wlan.status"
-COMMAND_GET_IFACE_ID_LIST = "wlan.get_iface_id_list"
-COMMAND_GET_PHY_ID_LIST = "wlan.get_phy_id_list"
-COMMAND_DESTROY_IFACE = "wlan.destroy_iface"
-COMMAND_GET_COUNTRY = "wlan_phy.get_country"
-COMMAND_GET_DEV_PATH = "wlan_phy.get_dev_path"
-COMMAND_QUERY_IFACE = "wlan.query_iface"
-
-
-class FuchsiaWlanLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "wlan")
-
-    def wlanStartScan(self):
-        """ Starts a wlan scan
-
-        Returns:
-            scan results
-        """
-        test_cmd = COMMAND_SCAN
-
-        return self.send_command(test_cmd, {})
-
-    def wlanScanForBSSInfo(self):
-        """ Scans and returns BSS info
-
-        Returns:
-            A dict mapping each seen SSID to a list of BSS Description IE
-            blocks, one for each BSS observed in the network
-        """
-        test_cmd = COMMAND_SCAN_FOR_BSS_INFO
-
-        return self.send_command(test_cmd, {})
-
-    def wlanConnectToNetwork(self,
-                             target_ssid,
-                             target_bss_desc,
-                             target_pwd=None):
-        """ Triggers a network connection
-        Args:
-            target_ssid: the network to attempt a connection to
-            target_pwd: (optional) password for the target network
-
-        Returns:
-            boolean indicating if the connection was successful
-        """
-        test_cmd = COMMAND_CONNECT
-        test_args = {
-            "target_ssid": target_ssid,
-            "target_pwd": target_pwd,
-            "target_bss_desc": target_bss_desc
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanDisconnect(self):
-        """ Disconnect any current wifi connections"""
-        test_cmd = COMMAND_DISCONNECT
-
-        return self.send_command(test_cmd, {})
-
-    def wlanDestroyIface(self, iface_id):
-        """ Destroy WLAN interface by ID.
-        Args:
-            iface_id: the interface id.
-
-        Returns:
-            Dictionary, service id if success, error if error.
-        """
-        test_cmd = COMMAND_DESTROY_IFACE
-        test_args = {"identifier": iface_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanGetIfaceIdList(self):
-        """ Get a list if wlan interface IDs.
-
-        Returns:
-            Dictionary, service id if success, error if error.
-        """
-        test_cmd = COMMAND_GET_IFACE_ID_LIST
-
-        return self.send_command(test_cmd, {})
-
-    def wlanPhyIdList(self):
-        """ Get a list if wlan phy IDs.
-
-        Returns:
-            List of IDs if success, error if error.
-        """
-        test_cmd = COMMAND_GET_PHY_ID_LIST
-
-        return self.send_command(test_cmd, {})
-
-    def wlanStatus(self, iface_id=None):
-        """ Request connection status
-
-        Args:
-            iface_id: unsigned 16-bit int, the wlan interface id
-                (defaults to None)
-
-        Returns:
-            Client state summary containing WlanClientState and
-            status of various networks connections
-        """
-        test_cmd = COMMAND_STATUS
-        test_args = {}
-        if iface_id:
-            test_args = {'iface_id': iface_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanGetCountry(self, phy_id):
-        """ Reads the currently configured country for `phy_id`.
-
-        Args:
-            phy_id: unsigned 16-bit integer.
-
-        Returns:
-            Dictionary, String if success, error if error.
-        """
-        test_cmd = COMMAND_GET_COUNTRY
-        test_args = {"phy_id": phy_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanGetDevPath(self, phy_id):
-        """ Queries the device path for `phy_id`.
-
-        Args:
-            phy_id: unsigned 16-bit integer.
-
-        Returns:
-            Dictionary, String if success, error if error.
-        """
-        test_cmd = COMMAND_GET_DEV_PATH
-        test_args = {"phy_id": phy_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanQueryInterface(self, iface_id):
-        """ Retrieves interface info for given wlan iface id.
-
-        Args:
-            iface_id: unsigned 16-bit int, the wlan interface id.
-
-        Returns:
-            Dictionary, containing interface id, role, phy_id, phy_assigned_id
-            and mac addr.
-        """
-        test_cmd = COMMAND_QUERY_IFACE
-        test_args = {'iface_id': iface_id}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
deleted file mode 100644
index 37e70fa..0000000
--- a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
+++ /dev/null
@@ -1,183 +0,0 @@
-# Lint as: python3
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-#   This class provides pipeline betweem python tests and WLAN policy facade.
-
-from antlion import logger
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-COMMAND_START_CLIENT_CONNECTIONS = "wlan_policy.start_client_connections"
-COMMAND_STOP_CLIENT_CONNECTIONS = "wlan_policy.stop_client_connections"
-COMMAND_SCAN_FOR_NETWORKS = "wlan_policy.scan_for_networks"
-COMMAND_SAVE_NETWORK = "wlan_policy.save_network"
-COMMAND_REMOVE_NETWORK = "wlan_policy.remove_network"
-COMMAND_REMOVE_ALL_NETWORKS = "wlan_policy.remove_all_networks"
-COMMAND_GET_SAVED_NETWORKS = "wlan_policy.get_saved_networks"
-COMMAND_CONNECT = "wlan_policy.connect"
-COMMAND_CREATE_CLIENT_CONTROLLER = "wlan_policy.create_client_controller"
-COMMAND_SET_NEW_LISTENER = "wlan_policy.set_new_update_listener"
-COMMAND_REMOVE_ALL_NETWORKS = "wlan_policy.remove_all_networks"
-COMMAND_GET_UPDATE = "wlan_policy.get_update"
-
-
-class FuchsiaWlanPolicyLib(BaseLib):
-
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "wlan_policy")
-
-    def wlanStartClientConnections(self):
-        """ Enables device to initiate connections to networks """
-
-        test_cmd = COMMAND_START_CLIENT_CONNECTIONS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanStopClientConnections(self):
-        """ Disables device for initiating connections to networks """
-
-        test_cmd = COMMAND_STOP_CLIENT_CONNECTIONS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanScanForNetworks(self):
-        """ Scans for networks that can be connected to
-                Returns:
-                    A list of network names and security types
-         """
-
-        test_cmd = COMMAND_SCAN_FOR_NETWORKS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanSaveNetwork(self, target_ssid, security_type, target_pwd=None):
-        """ Saveds a network to the device for future connections
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential being saved with the network. No password
-                                is equivalent to empty string.
-
-                Returns:
-                    boolean indicating if the connection was successful
-        """
-        if not target_pwd:
-            target_pwd = ''
-        test_cmd = COMMAND_SAVE_NETWORK
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": str(security_type).lower(),
-            "target_pwd": target_pwd
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanRemoveNetwork(self, target_ssid, security_type, target_pwd=None):
-        """ Removes or "forgets" a network from saved networks
-                Args:
-                    target_ssid: the network to attempt a connection to
-                    security_type: the security protocol of the network
-                    target_pwd: (optional) credential of the network to remove. No password and
-                                empty string are equivalent.
-        """
-        if not target_pwd:
-            target_pwd = ''
-        test_cmd = COMMAND_REMOVE_NETWORK
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": str(security_type).lower(),
-            "target_pwd": target_pwd
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanRemoveAllNetworks(self):
-        """ Removes or "forgets" all networks from saved networks
-                Returns:
-                    A boolean indicating if the action was successful
-        """
-
-        test_cmd = COMMAND_REMOVE_ALL_NETWORKS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanGetSavedNetworks(self):
-        """ Gets networks saved on device. Any PSK of a saved network will be
-            lower case regardless of how it was saved.
-                Returns:
-                    A list of saved network names and security protocols
-        """
-
-        test_cmd = COMMAND_GET_SAVED_NETWORKS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanConnect(self, target_ssid, security_type):
-        """ Triggers connection to a network
-                Args:
-                    target_ssid: the network to attempt a connection to. Must have been previously
-                                 saved in order for a successful connection to happen.
-                    security_type: the security protocol of the network
-
-            Returns:
-                    boolean indicating if the connection was successful
-        """
-
-        test_cmd = COMMAND_CONNECT
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": str(security_type).lower()
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanCreateClientController(self):
-        """ Initializes the client controller of the facade that is used to make Client Controller
-            API calls
-        """
-        test_cmd = COMMAND_CREATE_CLIENT_CONTROLLER
-
-        return self.send_command(test_cmd, {})
-
-    def wlanSetNewListener(self):
-        """ Sets the update listener stream of the facade to a new stream so that updates will be
-            reset. Intended to be used between tests so that the behaviour of updates in a test is
-            independent from previous tests.
-        """
-        test_cmd = COMMAND_SET_NEW_LISTENER
-
-        return self.send_command(test_cmd, {})
-
-    def wlanRemoveAllNetworks(self):
-        """ Deletes all saved networks on the device. Relies directly on the get_saved_networks and
-            remove_network commands
-        """
-        test_cmd = COMMAND_REMOVE_ALL_NETWORKS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanGetUpdate(self, timeout=30):
-        """ Gets one client listener update. This call will return with an update immediately the
-            first time the update listener is initialized by setting a new listener or by creating
-            a client controller before setting a new listener. Subsequent calls will hang until
-            there is an update.
-            Returns:
-                An update of connection status. If there is no error, the result is a dict with a
-                structure that matches the FIDL ClientStateSummary struct given for updates.
-        """
-        test_cmd = COMMAND_GET_UPDATE
-
-        return self.send_command(test_cmd, {}, response_timeout=timeout)
diff --git a/src/antlion/controllers/gnss_lib/GnssSimulator.py b/src/antlion/controllers/gnss_lib/GnssSimulator.py
deleted file mode 100644
index 69cad04..0000000
--- a/src/antlion/controllers/gnss_lib/GnssSimulator.py
+++ /dev/null
@@ -1,200 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Python module for General abstract GNSS Simulator.
-@author: Clay Liao (jianhsiungliao@)
-"""
-from time import sleep
-from antlion.controllers.spectracom_lib import gsg6
-from antlion.controllers.spirent_lib import gss7000
-from antlion import logger
-from antlion.utils import ping
-from antlion.libs.proc import job
-
-
-class AbstractGnssSimulator:
-    """General abstract GNSS Simulator"""
-
-    def __init__(self, simulator, ip_addr, ip_port, ip_port_ctrl=7717):
-        """Init AbstractGnssSimulator
-
-        Args:
-            simulator: GNSS simulator name,
-                Type, str
-                Option 'gss7000/gsg6'
-            ip_addr: IP Address.
-                Type, str
-            ip_port: TCPIP Port,
-                Type, str
-            ip_port_ctrl: TCPIP port,
-                Type, int
-                Default, 7717
-        """
-        self.simulator_name = str(simulator).lower()
-        self.ip_addr = ip_addr
-        self.ip_port = ip_port
-        self.ip_port_ctrl = ip_port_ctrl
-        self._logger = logger.create_tagged_trace_logger(
-            '%s %s:%s' % (simulator, self.ip_addr, self.ip_port))
-        if self.simulator_name == 'gsg6':
-            self._logger.info('GNSS simulator is GSG6')
-            self.simulator = gsg6.GSG6(self.ip_addr, self.ip_port)
-        elif self.simulator_name == 'gss7000':
-            self._logger.info('GNSS simulator is GSS7000')
-            self.simulator = gss7000.GSS7000(self.ip_addr, self.ip_port,
-                                             self.ip_port_ctrl)
-        else:
-            self._logger.error('No matched GNSS simulator')
-            raise AttributeError(
-                'The GNSS simulator in config file is {} which is not supported.'
-                .format(self.simulator_name))
-
-    def connect(self):
-        """Connect to GNSS Simulator"""
-        self._logger.debug('Connect to GNSS Simulator {}'.format(
-            self.simulator_name.upper()))
-        self.simulator.connect()
-
-    def close(self):
-        """Disconnect from GNSS Simulator"""
-        self._logger.debug('Disconnect from GNSS Simulator {}'.format(
-            self.simulator_name.upper()))
-        self.simulator.close()
-
-    def start_scenario(self, scenario=''):
-        """Start the running scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        self._logger.info('Start GNSS Scenario {}'.format(scenario))
-        self.simulator.start_scenario(scenario)
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-        self._logger.debug('Stop playing scenario')
-        self.simulator.stop_scenario()
-
-    def set_power(self, power_level=-130):
-        """Set scenario power level.
-        Args:
-            power_level: target power level in dBm for gsg6 or gss7000,
-                gsg6 power_level range is [-160, -65],
-                gss7000 power_level range is [-170, -115]
-                Type, float,
-        """
-        self.simulator.set_power(power_level)
-
-    def set_power_offset(self, gss7000_ant=1, pwr_offset=0):
-        """Set scenario power level offset based on reference level.
-           The default reference level is -130dBm for GPS L1.
-        Args:
-            ant: target gss7000 RF port,
-                Type, int
-            pwr_offset: target power offset in dB,
-                Type, float
-        """
-        if self.simulator_name == 'gsg6':
-            power_level = -130 + pwr_offset
-            self.simulator.set_power(power_level)
-        elif self.simulator_name == 'gss7000':
-            self.simulator.set_power_offset(gss7000_ant, pwr_offset)
-        else:
-            self._logger.error('No GNSS simulator is available')
-
-    def set_scenario_power(self,
-                           power_level,
-                           sat_id='',
-                           sat_system='',
-                           freq_band=''):
-        """Set dynamic power for the running scenario.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-            sat_id: set power level for specific satellite identifiers
-                Type, str.
-                Option
-                    For GSG-6: 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                    where xx is satellite identifiers number
-                    e.g.: G10
-                    For GSS7000: Provide SVID.
-                Default, '', assumed All.
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-                Default, '', assumed All.
-            freq_band: Frequency band to set the power level
-                Type, str
-                Default, '', assumed to be L1.
-         Raises:
-            RuntimeError: raise when instrument does not support this function.
-        """
-        self.simulator.set_scenario_power(power_level=power_level,
-                                          sat_id=sat_id,
-                                          sat_system=sat_system,
-                                          freq_band=freq_band)
-
-    def toggle_scenario_power(self,
-                              toggle_onoff='ON',
-                              sat_id='',
-                              sat_system=''):
-        """Toggle ON OFF scenario.
-
-        Args:
-            toggle_onoff: turn on or off the satellites
-                Type, str. Option ON/OFF
-                Default, 'ON'
-            sat_id: satellite identifiers
-                Type, str.
-                Option 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers no.
-                e.g.: G10
-            sat_system: to toggle On/OFF for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL'
-        """
-        # TODO: [b/208719212] Currently only support GSG-6. Will implement GSS7000 feature.
-        if self.simulator_name == 'gsg6':
-            self.simulator.toggle_scenario_power(toggle_onoff=toggle_onoff,
-                                                 sat_id=sat_id,
-                                                 sat_system=sat_system)
-        else:
-            raise RuntimeError('{} does not support this function'.format(
-                self.simulator_name))
-
-    def ping_inst(self, retry=3, wait=1):
-        """Ping IP of instrument to check if the connection is stable.
-        Args:
-            retry: Retry times.
-                Type, int.
-                Default, 3.
-            wait: Wait time between each ping command when ping fail is met.
-                Type, int.
-                Default, 1.
-        Return:
-            True/False of ping result.
-        """
-        for i in range(retry):
-            ret = ping(job, self.ip_addr)
-            self._logger.debug(f'Ping return results: {ret}')
-            if ret.get('packet_loss') == '0':
-                return True
-            self._logger.warning(f'Fail to ping GNSS Simulator: {i+1}')
-            sleep(wait)
-        return False
diff --git a/src/antlion/controllers/gnss_lib/__init__.py b/src/antlion/controllers/gnss_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/gnss_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/iperf_client.py b/src/antlion/controllers/iperf_client.py
deleted file mode 100644
index c4d8e1d..0000000
--- a/src/antlion/controllers/iperf_client.py
+++ /dev/null
@@ -1,311 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import subprocess
-import socket
-import threading
-
-from antlion import context
-from antlion import utils
-from antlion.controllers.adb_lib.error import AdbCommandError
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_lib.ssh import SSHProvider
-from antlion.controllers.iperf_server import _AndroidDeviceBridge
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'IPerfClient'
-ACTS_CONTROLLER_REFERENCE_NAME = 'iperf_clients'
-
-
-class IPerfError(Exception):
-    """Raised on execution errors of iPerf."""
-
-
-def create(configs):
-    """Factory method for iperf clients.
-
-    The function creates iperf clients based on at least one config.
-    If configs contain ssh settings or and AndroidDevice, remote iperf clients
-    will be started on those devices, otherwise, a the client will run on the
-    local machine.
-
-    Args:
-        configs: config parameters for the iperf server
-    """
-    results = []
-    for c in configs:
-        if type(c) is dict and 'AndroidDevice' in c:
-            results.append(
-                IPerfClientOverAdb(c['AndroidDevice'],
-                                   test_interface=c.get('test_interface')))
-        elif type(c) is dict and 'ssh_config' in c:
-            results.append(
-                IPerfClientOverSsh(c['ssh_config'],
-                                   test_interface=c.get('test_interface')))
-        else:
-            results.append(IPerfClient())
-    return results
-
-
-def get_info(iperf_clients):
-    """Placeholder for info about iperf clients
-
-    Returns:
-        None
-    """
-    return None
-
-
-def destroy(_):
-    # No cleanup needed.
-    pass
-
-
-class IPerfClientBase(object):
-    """The Base class for all IPerfClients.
-
-    This base class is responsible for synchronizing the logging to prevent
-    multiple IPerfClients from writing results to the same file, as well
-    as providing the interface for IPerfClient objects.
-    """
-    # Keeps track of the number of IPerfClient logs to prevent file name
-    # collisions.
-    __log_file_counter = 0
-
-    __log_file_lock = threading.Lock()
-
-    @staticmethod
-    def _get_full_file_path(tag=''):
-        """Returns the full file path for the IPerfClient log file.
-
-        Note: If the directory for the file path does not exist, it will be
-        created.
-
-        Args:
-            tag: The tag passed in to the server run.
-        """
-        current_context = context.get_current_context()
-        full_out_dir = os.path.join(current_context.get_full_output_path(),
-                                    'iperf_client_files')
-
-        with IPerfClientBase.__log_file_lock:
-            os.makedirs(full_out_dir, exist_ok=True)
-            tags = ['IPerfClient', tag, IPerfClientBase.__log_file_counter]
-            out_file_name = '%s.log' % (','.join(
-                [str(x) for x in tags if x != '' and x is not None]))
-            IPerfClientBase.__log_file_counter += 1
-
-        return os.path.join(full_out_dir, out_file_name)
-
-    def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
-        """Starts iperf client, and waits for completion.
-
-        Args:
-            ip: iperf server ip address.
-            iperf_args: A string representing arguments to start iperf
-                client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
-            tag: A string to further identify iperf results file
-            timeout: the maximum amount of time the iperf client can run.
-            iperf_binary: Location of iperf3 binary. If none, it is assumed the
-                the binary is in the path.
-
-        Returns:
-            full_out_path: iperf result path.
-        """
-        raise NotImplementedError('start() must be implemented.')
-
-
-class IPerfClient(IPerfClientBase):
-    """Class that handles iperf3 client operations."""
-
-    def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
-        """Starts iperf client, and waits for completion.
-
-        Args:
-            ip: iperf server ip address.
-            iperf_args: A string representing arguments to start iperf
-            client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
-            tag: tag to further identify iperf results file
-            timeout: unused.
-            iperf_binary: Location of iperf3 binary. If none, it is assumed the
-                the binary is in the path.
-
-        Returns:
-            full_out_path: iperf result path.
-        """
-        if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
-        else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_cmd = [str(iperf_binary), '-c', ip] + iperf_args.split(' ')
-        full_out_path = self._get_full_file_path(tag)
-
-        with open(full_out_path, 'w') as out_file:
-            subprocess.call(iperf_cmd, stdout=out_file)
-
-        return full_out_path
-
-
-class IPerfClientOverSsh(IPerfClientBase):
-    """Class that handles iperf3 client operations on remote machines."""
-
-    def __init__(self,
-                 ssh_config: str,
-                 test_interface: str = None,
-                 ssh_provider: SSHProvider = None):
-        self._ssh_provider = ssh_provider
-        if not self._ssh_provider:
-            self._ssh_settings = settings.from_config(ssh_config)
-            if not (utils.is_valid_ipv4_address(self._ssh_settings.hostname) or
-                    utils.is_valid_ipv6_address(self._ssh_settings.hostname)):
-                mdns_ip = utils.get_fuchsia_mdns_ipv6_address(
-                    self._ssh_settings.hostname)
-                if mdns_ip:
-                    self._ssh_settings.hostname = mdns_ip
-        self._ssh_session = None
-        self.start_ssh()
-
-        self.test_interface = test_interface
-
-    def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
-        """Starts iperf client, and waits for completion.
-
-        Args:
-            ip: iperf server ip address.
-            iperf_args: A string representing arguments to start iperf
-            client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
-            tag: tag to further identify iperf results file
-            timeout: the maximum amount of time to allow the iperf client to run
-            iperf_binary: Location of iperf3 binary. If none, it is assumed the
-                the binary is in the path.
-
-        Returns:
-            full_out_path: iperf result path.
-        """
-        if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
-        else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_cmd = '{} -c {} {}'.format(iperf_binary, ip, iperf_args)
-        full_out_path = self._get_full_file_path(tag)
-
-        try:
-            self.start_ssh()
-            if self._ssh_provider:
-                iperf_process = self._ssh_provider.run(iperf_cmd,
-                                                       timeout_sec=timeout)
-            else:
-                iperf_process = self._ssh_session.run(iperf_cmd,
-                                                      timeout=timeout)
-            iperf_output = iperf_process.stdout
-            with open(full_out_path, 'w') as out_file:
-                out_file.write(iperf_output)
-        except socket.timeout:
-            raise TimeoutError('Socket timeout. Timed out waiting for iperf '
-                               'client to finish.')
-        except Exception as err:
-            logging.exception('iperf run failed: {}'.format(err))
-
-        return full_out_path
-
-    def start_ssh(self):
-        """Starts an ssh session to the iperf client."""
-        if self._ssh_provider:
-            # SSH sessions are created by the provider.
-            return
-        if not self._ssh_session:
-            self._ssh_session = connection.SshConnection(self._ssh_settings)
-
-    def close_ssh(self):
-        """Closes the ssh session to the iperf client, if one exists, preventing
-        connection reset errors when rebooting client device.
-        """
-        if self._ssh_session:
-            self._ssh_session.close()
-            self._ssh_session = None
-
-
-class IPerfClientOverAdb(IPerfClientBase):
-    """Class that handles iperf3 operations over ADB devices."""
-
-    def __init__(self, android_device_or_serial, test_interface=None):
-        """Creates a new IPerfClientOverAdb object.
-
-        Args:
-            android_device_or_serial: Either an AndroidDevice object, or the
-                serial that corresponds to the AndroidDevice. Note that the
-                serial must be present in an AndroidDevice entry in the ACTS
-                config.
-            test_interface: The network interface that will be used to send
-                traffic to the iperf server.
-        """
-        self._android_device_or_serial = android_device_or_serial
-        self.test_interface = test_interface
-
-    @property
-    def _android_device(self):
-        if isinstance(self._android_device_or_serial, AndroidDevice):
-            return self._android_device_or_serial
-        else:
-            return _AndroidDeviceBridge.android_devices()[
-                self._android_device_or_serial]
-
-    def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
-        """Starts iperf client, and waits for completion.
-
-        Args:
-            ip: iperf server ip address.
-            iperf_args: A string representing arguments to start iperf
-            client. Eg: iperf_args = "-t 10 -p 5001 -w 512k/-u -b 200M -J".
-            tag: tag to further identify iperf results file
-            timeout: the maximum amount of time to allow the iperf client to run
-            iperf_binary: Location of iperf3 binary. If none, it is assumed the
-                the binary is in the path.
-
-        Returns:
-            The iperf result file path.
-        """
-        clean_out = ''
-        try:
-            if not iperf_binary:
-                logging.debug('No iperf3 binary specified.  '
-                              'Assuming iperf3 is in the path.')
-                iperf_binary = 'iperf3'
-            else:
-                logging.debug('Using iperf3 binary located at %s' %
-                              iperf_binary)
-            iperf_cmd = '{} -c {} {}'.format(iperf_binary, ip, iperf_args)
-            out = self._android_device.adb.shell(str(iperf_cmd),
-                                                 timeout=timeout)
-            clean_out = out.split('\n')
-            if 'error' in clean_out[0].lower():
-                raise IPerfError(clean_out)
-        except (job.TimeoutError, AdbCommandError):
-            logging.warning('TimeoutError: Iperf measurement failed.')
-
-        full_out_path = self._get_full_file_path(tag)
-        with open(full_out_path, 'w') as out_file:
-            out_file.write('\n'.join(clean_out))
-
-        return full_out_path
diff --git a/src/antlion/controllers/iperf_server.py b/src/antlion/controllers/iperf_server.py
deleted file mode 100755
index b1311ff..0000000
--- a/src/antlion/controllers/iperf_server.py
+++ /dev/null
@@ -1,725 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-import math
-import os
-import shlex
-import subprocess
-import threading
-import time
-
-from antlion import context
-from antlion import logger as acts_logger
-from antlion import utils
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.event import event_bus
-from antlion.event.decorators import subscribe_static
-from antlion.event.event import TestClassBeginEvent
-from antlion.event.event import TestClassEndEvent
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'IPerfServer'
-ACTS_CONTROLLER_REFERENCE_NAME = 'iperf_servers'
-KILOBITS = 1024
-MEGABITS = KILOBITS * 1024
-GIGABITS = MEGABITS * 1024
-BITS_IN_BYTE = 8
-
-
-def create(configs):
-    """ Factory method for iperf servers.
-
-    The function creates iperf servers based on at least one config.
-    If configs only specify a port number, a regular local IPerfServer object
-    will be created. If configs contains ssh settings or and AndroidDevice,
-    remote iperf servers will be started on those devices
-
-    Args:
-        configs: config parameters for the iperf server
-    """
-    results = []
-    for c in configs:
-        if type(c) in (str, int) and str(c).isdigit():
-            results.append(IPerfServer(int(c)))
-        elif type(c) is dict and 'AndroidDevice' in c and 'port' in c:
-            results.append(IPerfServerOverAdb(c['AndroidDevice'], c['port']))
-        elif type(c) is dict and 'ssh_config' in c and 'port' in c:
-            results.append(
-                IPerfServerOverSsh(c['ssh_config'],
-                                   c['port'],
-                                   test_interface=c.get('test_interface'),
-                                   use_killall=c.get('use_killall')))
-        else:
-            raise ValueError(
-                'Config entry %s in %s is not a valid IPerfServer '
-                'config.' % (repr(c), configs))
-    return results
-
-
-def get_info(iperf_servers):
-    """Placeholder for info about iperf servers
-
-    Returns:
-        None
-    """
-    return None
-
-
-def destroy(iperf_server_list):
-    for iperf_server in iperf_server_list:
-        try:
-            iperf_server.stop()
-        except Exception:
-            logging.exception('Unable to properly clean up %s.' % iperf_server)
-
-
-class IPerfResult(object):
-
-    def __init__(self, result_path, reporting_speed_units='Mbytes'):
-        """Loads iperf result from file.
-
-        Loads iperf result from JSON formatted server log. File can be accessed
-        before or after server is stopped. Note that only the first JSON object
-        will be loaded and this funtion is not intended to be used with files
-        containing multiple iperf client runs.
-        """
-        # if result_path isn't a path, treat it as JSON
-        self.reporting_speed_units = reporting_speed_units
-        if not os.path.exists(result_path):
-            self.result = json.loads(result_path)
-        else:
-            try:
-                with open(result_path, 'r') as f:
-                    iperf_output = f.readlines()
-                    if '}\n' in iperf_output:
-                        iperf_output = iperf_output[:iperf_output.index('}\n'
-                                                                        ) + 1]
-                    iperf_string = ''.join(iperf_output)
-                    iperf_string = iperf_string.replace('nan', '0')
-                    self.result = json.loads(iperf_string)
-            except ValueError:
-                with open(result_path, 'r') as f:
-                    # Possibly a result from interrupted iperf run,
-                    # skip first line and try again.
-                    lines = f.readlines()[1:]
-                    self.result = json.loads(''.join(lines))
-
-    def _has_data(self):
-        """Checks if the iperf result has valid throughput data.
-
-        Returns:
-            True if the result contains throughput data. False otherwise.
-        """
-        return ('end' in self.result) and ('sum_received' in self.result['end']
-                                           or 'sum' in self.result['end'])
-
-    def _get_reporting_speed(self, network_speed_in_bits_per_second):
-        """Sets the units for the network speed reporting based on how the
-        object was initiated.  Defaults to Megabytes per second.  Currently
-        supported, bits per second (bits), kilobits per second (kbits), megabits
-        per second (mbits), gigabits per second (gbits), bytes per second
-        (bytes), kilobits per second (kbytes), megabits per second (mbytes),
-        gigabytes per second (gbytes).
-
-        Args:
-            network_speed_in_bits_per_second: The network speed from iperf in
-                bits per second.
-
-        Returns:
-            The value of the throughput in the appropriate units.
-        """
-        speed_divisor = 1
-        if self.reporting_speed_units[1:].lower() == 'bytes':
-            speed_divisor = speed_divisor * BITS_IN_BYTE
-        if self.reporting_speed_units[0:1].lower() == 'k':
-            speed_divisor = speed_divisor * KILOBITS
-        if self.reporting_speed_units[0:1].lower() == 'm':
-            speed_divisor = speed_divisor * MEGABITS
-        if self.reporting_speed_units[0:1].lower() == 'g':
-            speed_divisor = speed_divisor * GIGABITS
-        return network_speed_in_bits_per_second / speed_divisor
-
-    def get_json(self):
-        """Returns the raw json output from iPerf."""
-        return self.result
-
-    @property
-    def error(self):
-        return self.result.get('error', None)
-
-    @property
-    def avg_rate(self):
-        """Average UDP rate in MB/s over the entire run.
-
-        This is the average UDP rate observed at the terminal the iperf result
-        is pulled from. According to iperf3 documentation this is calculated
-        based on bytes sent and thus is not a good representation of the
-        quality of the link. If the result is not from a success run, this
-        property is None.
-        """
-        if not self._has_data() or 'sum' not in self.result['end']:
-            return None
-        bps = self.result['end']['sum']['bits_per_second']
-        return self._get_reporting_speed(bps)
-
-    @property
-    def avg_receive_rate(self):
-        """Average receiving rate in MB/s over the entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None.
-        """
-        if not self._has_data() or 'sum_received' not in self.result['end']:
-            return None
-        bps = self.result['end']['sum_received']['bits_per_second']
-        return self._get_reporting_speed(bps)
-
-    @property
-    def avg_send_rate(self):
-        """Average sending rate in MB/s over the entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None.
-        """
-        if not self._has_data() or 'sum_sent' not in self.result['end']:
-            return None
-        bps = self.result['end']['sum_sent']['bits_per_second']
-        return self._get_reporting_speed(bps)
-
-    @property
-    def instantaneous_rates(self):
-        """Instantaneous received rate in MB/s over entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None.
-        """
-        if not self._has_data():
-            return None
-        intervals = [
-            self._get_reporting_speed(interval['sum']['bits_per_second'])
-            for interval in self.result['intervals']
-        ]
-        return intervals
-
-    @property
-    def std_deviation(self):
-        """Standard deviation of rates in MB/s over entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None.
-        """
-        return self.get_std_deviation(0)
-
-    def get_std_deviation(self, iperf_ignored_interval):
-        """Standard deviation of rates in MB/s over entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None. A configurable number of
-        beginning (and the single last) intervals are ignored in the
-        calculation as they are inaccurate (e.g. the last is from a very small
-        interval)
-
-        Args:
-            iperf_ignored_interval: number of iperf interval to ignored in
-            calculating standard deviation
-
-        Returns:
-            The standard deviation.
-        """
-        if not self._has_data():
-            return None
-        instantaneous_rates = self.instantaneous_rates[
-            iperf_ignored_interval:-1]
-        avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates)
-        sqd_deviations = ([(rate - avg_rate)**2
-                           for rate in instantaneous_rates])
-        std_dev = math.sqrt(
-            math.fsum(sqd_deviations) / (len(sqd_deviations) - 1))
-        return std_dev
-
-
-class IPerfServerBase(object):
-    # Keeps track of the number of IPerfServer logs to prevent file name
-    # collisions.
-    __log_file_counter = 0
-
-    __log_file_lock = threading.Lock()
-
-    def __init__(self, port):
-        self._port = port
-        # TODO(markdr): We shouldn't be storing the log files in an array like
-        # this. Nobody should be reading this property either. Instead, the
-        # IPerfResult should be returned in stop() with all the necessary info.
-        # See aosp/1012824 for a WIP implementation.
-        self.log_files = []
-
-    @property
-    def port(self):
-        raise NotImplementedError('port must be specified.')
-
-    @property
-    def started(self):
-        raise NotImplementedError('started must be specified.')
-
-    def start(self, extra_args='', tag=''):
-        """Starts an iperf3 server.
-
-        Args:
-            extra_args: A string representing extra arguments to start iperf
-                server with.
-            tag: Appended to log file name to identify logs from different
-                iperf runs.
-        """
-        raise NotImplementedError('start() must be specified.')
-
-    def stop(self):
-        """Stops the iperf server.
-
-        Returns:
-            The name of the log file generated from the terminated session.
-        """
-        raise NotImplementedError('stop() must be specified.')
-
-    def _get_full_file_path(self, tag=None):
-        """Returns the full file path for the IPerfServer log file.
-
-        Note: If the directory for the file path does not exist, it will be
-        created.
-
-        Args:
-            tag: The tag passed in to the server run.
-        """
-        out_dir = self.log_path
-
-        with IPerfServerBase.__log_file_lock:
-            tags = [tag, IPerfServerBase.__log_file_counter]
-            out_file_name = 'IPerfServer,%s.log' % (','.join(
-                [str(x) for x in tags if x != '' and x is not None]))
-            IPerfServerBase.__log_file_counter += 1
-
-        file_path = os.path.join(out_dir, out_file_name)
-        self.log_files.append(file_path)
-        return file_path
-
-    @property
-    def log_path(self):
-        current_context = context.get_current_context()
-        full_out_dir = os.path.join(current_context.get_full_output_path(),
-                                    'IPerfServer%s' % self.port)
-
-        # Ensure the directory exists.
-        os.makedirs(full_out_dir, exist_ok=True)
-
-        return full_out_dir
-
-
-def _get_port_from_ss_output(ss_output, pid):
-    pid = str(pid)
-    lines = ss_output.split('\n')
-    for line in lines:
-        if pid in line:
-            # Expected format:
-            # tcp LISTEN  0 5 *:<PORT>  *:* users:(("cmd",pid=<PID>,fd=3))
-            return line.split()[4].split(':')[-1]
-    else:
-        raise ProcessLookupError('Could not find started iperf3 process.')
-
-
-class IPerfServer(IPerfServerBase):
-    """Class that handles iperf server commands on localhost."""
-
-    def __init__(self, port=5201):
-        super().__init__(port)
-        self._hinted_port = port
-        self._current_log_file = None
-        self._iperf_process = None
-        self._last_opened_file = None
-
-    @property
-    def port(self):
-        return self._port
-
-    @property
-    def started(self):
-        return self._iperf_process is not None
-
-    def start(self, extra_args='', tag=''):
-        """Starts iperf server on local machine.
-
-        Args:
-            extra_args: A string representing extra arguments to start iperf
-                server with.
-            tag: Appended to log file name to identify logs from different
-                iperf runs.
-        """
-        if self._iperf_process is not None:
-            return
-
-        self._current_log_file = self._get_full_file_path(tag)
-
-        # Run an iperf3 server on the hinted port with JSON output.
-        command = ['iperf3', '-s', '-p', str(self._hinted_port), '-J']
-
-        command.extend(shlex.split(extra_args))
-
-        if self._last_opened_file:
-            self._last_opened_file.close()
-        self._last_opened_file = open(self._current_log_file, 'w')
-        self._iperf_process = subprocess.Popen(command,
-                                               stdout=self._last_opened_file,
-                                               stderr=subprocess.DEVNULL)
-        for attempts_left in reversed(range(3)):
-            try:
-                self._port = int(
-                    _get_port_from_ss_output(
-                        job.run('ss -l -p -n | grep iperf').stdout,
-                        self._iperf_process.pid))
-                break
-            except ProcessLookupError:
-                if attempts_left == 0:
-                    raise
-                logging.debug('iperf3 process not started yet.')
-                time.sleep(.01)
-
-    def stop(self):
-        """Stops the iperf server.
-
-        Returns:
-            The name of the log file generated from the terminated session.
-        """
-        if self._iperf_process is None:
-            return
-
-        if self._last_opened_file:
-            self._last_opened_file.close()
-            self._last_opened_file = None
-
-        self._iperf_process.terminate()
-        self._iperf_process = None
-
-        return self._current_log_file
-
-    def __del__(self):
-        self.stop()
-
-
-class IPerfServerOverSsh(IPerfServerBase):
-    """Class that handles iperf3 operations on remote machines."""
-
-    def __init__(self,
-                 ssh_config,
-                 port,
-                 test_interface=None,
-                 use_killall=False):
-        super().__init__(port)
-        self.ssh_settings = settings.from_config(ssh_config)
-        self.log = acts_logger.create_tagged_trace_logger(
-            f'IPerfServer | {self.ssh_settings.hostname}')
-        self._ssh_session = None
-        self.start_ssh()
-
-        self._iperf_pid = None
-        self._current_tag = None
-        self.hostname = self.ssh_settings.hostname
-        self._use_killall = str(use_killall).lower() == 'true'
-        try:
-            # A test interface can only be found if an ip address is specified.
-            # A fully qualified hostname will return None for the
-            # test_interface.
-            self.test_interface = test_interface if test_interface else utils.get_interface_based_on_ip(
-                self._ssh_session, self.hostname)
-        except Exception as e:
-            self.log.warning(e)
-            self.test_interface = None
-
-    @property
-    def port(self):
-        return self._port
-
-    @property
-    def started(self):
-        return self._iperf_pid is not None
-
-    def _get_remote_log_path(self):
-        return '/tmp/iperf_server_port%s.log' % self.port
-
-    def get_interface_ip_addresses(self, interface):
-        """Gets all of the ip addresses, ipv4 and ipv6, associated with a
-           particular interface name.
-
-        Args:
-            interface: The interface name on the device, ie eth0
-
-        Returns:
-            A list of dictionaries of the various IP addresses. See
-            utils.get_interface_ip_addresses.
-        """
-        if not self._ssh_session:
-            self.start_ssh()
-
-        return utils.get_interface_ip_addresses(self._ssh_session, interface)
-
-    def renew_test_interface_ip_address(self):
-        """Renews the test interface's IPv4 address.
-
-        Necessary for changing DHCP scopes during a test.
-        """
-        if not self._ssh_session:
-            self.start_ssh()
-        utils.renew_linux_ip_address(self._ssh_session, self.test_interface)
-
-    def get_addr(self, addr_type='ipv4_private', timeout_sec=None):
-        """Wait until a type of IP address on the test interface is available
-        then return it.
-        """
-        if not self._ssh_session:
-            self.start_ssh()
-        return utils.get_addr(self._ssh_session, self.test_interface,
-                              addr_type, timeout_sec)
-
-    def _cleanup_iperf_port(self):
-        """Checks and kills zombie iperf servers occupying intended port."""
-        iperf_check_cmd = ('netstat -tulpn | grep LISTEN | grep iperf3'
-                           ' | grep :{}').format(self.port)
-        iperf_check = self._ssh_session.run(iperf_check_cmd,
-                                            ignore_status=True)
-        iperf_check = iperf_check.stdout
-        if iperf_check:
-            logging.debug('Killing zombie server on port {}'.format(self.port))
-            iperf_pid = iperf_check.split(' ')[-1].split('/')[0]
-            self._ssh_session.run('kill -9 {}'.format(str(iperf_pid)))
-
-    def start(self, extra_args='', tag='', iperf_binary=None):
-        """Starts iperf server on specified machine and port.
-
-        Args:
-            extra_args: A string representing extra arguments to start iperf
-                server with.
-            tag: Appended to log file name to identify logs from different
-                iperf runs.
-            iperf_binary: Location of iperf3 binary. If none, it is assumed the
-                the binary is in the path.
-        """
-        if self.started:
-            return
-
-        if not self._ssh_session:
-            self.start_ssh()
-        self._cleanup_iperf_port()
-        if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
-        else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_command = '{} -s -J -p {}'.format(iperf_binary, self.port)
-
-        cmd = '{cmd} {extra_flags} > {log_file}'.format(
-            cmd=iperf_command,
-            extra_flags=extra_args,
-            log_file=self._get_remote_log_path())
-
-        job_result = self._ssh_session.run_async(cmd)
-        self._iperf_pid = job_result.stdout
-        self._current_tag = tag
-
-    def stop(self):
-        """Stops the iperf server.
-
-        Returns:
-            The name of the log file generated from the terminated session.
-        """
-        if not self.started:
-            return
-
-        if self._use_killall:
-            self._ssh_session.run('killall iperf3', ignore_status=True)
-        else:
-            self._ssh_session.run_async('kill -9 {}'.format(
-                str(self._iperf_pid)))
-
-        iperf_result = self._ssh_session.run('cat {}'.format(
-            self._get_remote_log_path()))
-
-        log_file = self._get_full_file_path(self._current_tag)
-        with open(log_file, 'w') as f:
-            f.write(iperf_result.stdout)
-
-        self._ssh_session.run_async('rm {}'.format(
-            self._get_remote_log_path()))
-        self._iperf_pid = None
-        return log_file
-
-    def start_ssh(self):
-        """Starts an ssh session to the iperf server."""
-        if not self._ssh_session:
-            self._ssh_session = connection.SshConnection(self.ssh_settings)
-
-    def close_ssh(self):
-        """Closes the ssh session to the iperf server, if one exists, preventing
-        connection reset errors when rebooting server device.
-        """
-        if self.started:
-            self.stop()
-        if self._ssh_session:
-            self._ssh_session.close()
-            self._ssh_session = None
-
-
-# TODO(markdr): Remove this after automagic controller creation has been
-# removed.
-class _AndroidDeviceBridge(object):
-    """A helper class for connecting serial numbers to AndroidDevices."""
-
-    _test_class = None
-
-    @staticmethod
-    @subscribe_static(TestClassBeginEvent)
-    def on_test_begin(event):
-        _AndroidDeviceBridge._test_class = event.test_class
-
-    @staticmethod
-    @subscribe_static(TestClassEndEvent)
-    def on_test_end(_):
-        _AndroidDeviceBridge._test_class = None
-
-    @staticmethod
-    def android_devices():
-        """A dict of serial -> AndroidDevice, where AndroidDevice is a device
-        found in the current TestClass's controllers.
-        """
-        if not _AndroidDeviceBridge._test_class:
-            return {}
-        return {
-            device.serial: device
-            for device in _AndroidDeviceBridge._test_class.android_devices
-        }
-
-
-event_bus.register_subscription(
-    _AndroidDeviceBridge.on_test_begin.subscription)
-event_bus.register_subscription(_AndroidDeviceBridge.on_test_end.subscription)
-
-
-class IPerfServerOverAdb(IPerfServerBase):
-    """Class that handles iperf3 operations over ADB devices."""
-
-    def __init__(self, android_device_or_serial, port):
-        """Creates a new IPerfServerOverAdb object.
-
-        Args:
-            android_device_or_serial: Either an AndroidDevice object, or the
-                serial that corresponds to the AndroidDevice. Note that the
-                serial must be present in an AndroidDevice entry in the ACTS
-                config.
-            port: The port number to open the iperf server on.
-        """
-        super().__init__(port)
-        self._android_device_or_serial = android_device_or_serial
-
-        self._iperf_process = None
-        self._current_tag = ''
-
-    @property
-    def port(self):
-        return self._port
-
-    @property
-    def started(self):
-        return self._iperf_process is not None
-
-    @property
-    def _android_device(self):
-        if isinstance(self._android_device_or_serial, AndroidDevice):
-            return self._android_device_or_serial
-        else:
-            return _AndroidDeviceBridge.android_devices()[
-                self._android_device_or_serial]
-
-    def _get_device_log_path(self):
-        return '~/data/iperf_server_port%s.log' % self.port
-
-    def start(self, extra_args='', tag='', iperf_binary=None):
-        """Starts iperf server on an ADB device.
-
-        Args:
-            extra_args: A string representing extra arguments to start iperf
-                server with.
-            tag: Appended to log file name to identify logs from different
-                iperf runs.
-            iperf_binary: Location of iperf3 binary. If none, it is assumed the
-                the binary is in the path.
-        """
-        if self._iperf_process is not None:
-            return
-
-        if not iperf_binary:
-            logging.debug('No iperf3 binary specified.  '
-                          'Assuming iperf3 is in the path.')
-            iperf_binary = 'iperf3'
-        else:
-            logging.debug('Using iperf3 binary located at %s' % iperf_binary)
-        iperf_command = '{} -s -J -p {}'.format(iperf_binary, self.port)
-
-        self._iperf_process = self._android_device.adb.shell_nb(
-            '{cmd} {extra_flags} > {log_file} 2>&1'.format(
-                cmd=iperf_command,
-                extra_flags=extra_args,
-                log_file=self._get_device_log_path()))
-
-        self._iperf_process_adb_pid = ''
-        while len(self._iperf_process_adb_pid) == 0:
-            self._iperf_process_adb_pid = self._android_device.adb.shell(
-                'pgrep iperf3 -n')
-
-        self._current_tag = tag
-
-    def stop(self):
-        """Stops the iperf server.
-
-        Returns:
-            The name of the log file generated from the terminated session.
-        """
-        if self._iperf_process is None:
-            return
-
-        job.run('kill -9 {}'.format(self._iperf_process.pid))
-
-        # TODO(markdr): update with definitive kill method
-        while True:
-            iperf_process_list = self._android_device.adb.shell('pgrep iperf3')
-            if iperf_process_list.find(self._iperf_process_adb_pid) == -1:
-                break
-            else:
-                self._android_device.adb.shell("kill -9 {}".format(
-                    self._iperf_process_adb_pid))
-
-        iperf_result = self._android_device.adb.shell('cat {}'.format(
-            self._get_device_log_path()))
-
-        log_file = self._get_full_file_path(self._current_tag)
-        with open(log_file, 'w') as f:
-            f.write(iperf_result)
-
-        self._android_device.adb.shell('rm {}'.format(
-            self._get_device_log_path()))
-
-        self._iperf_process = None
-        return log_file
diff --git a/src/antlion/controllers/monsoon.py b/src/antlion/controllers/monsoon.py
deleted file mode 100644
index a14f4fd..0000000
--- a/src/antlion/controllers/monsoon.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.monsoon_lib.api.hvpm.monsoon import Monsoon as HvpmMonsoon
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon import \
-    Monsoon as LvpmStockMonsoon
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'Monsoon'
-ACTS_CONTROLLER_REFERENCE_NAME = 'monsoons'
-
-
-def create(configs):
-    """Takes a list of Monsoon configs and returns Monsoon Controllers.
-
-    Args:
-        configs: A list of serial numbers, or dicts in the form:
-            {
-                'type': anyof('LvpmStockMonsoon', 'HvpmMonsoon')
-                'serial': int
-            }
-
-    Returns:
-        a list of Monsoon configs
-
-    Raises:
-        ValueError if the configuration does not provide the required info.
-    """
-    objs = []
-    for config in configs:
-        monsoon_type = None
-        if isinstance(config, dict):
-            if isinstance(config.get('type', None), str):
-                if 'lvpm' in config['type'].lower():
-                    monsoon_type = LvpmStockMonsoon
-                elif 'hvpm' in config['type'].lower():
-                    monsoon_type = HvpmMonsoon
-                else:
-                    raise ValueError('Unknown monsoon type %s in Monsoon '
-                                     'config %s' % (config['type'], config))
-            if 'serial' not in config:
-                raise ValueError('Monsoon config must specify "serial".')
-            serial_number = int(config.get('serial'))
-        else:
-            serial_number = int(config)
-        if monsoon_type is None:
-            if serial_number < 20000:
-                # This code assumes the LVPM has firmware version 20. If
-                # someone has updated the firmware, or somehow found an older
-                # version, the power measurement will fail.
-                monsoon_type = LvpmStockMonsoon
-            else:
-                monsoon_type = HvpmMonsoon
-
-        objs.append(monsoon_type(serial=serial_number))
-    return objs
-
-
-def destroy(monsoons):
-    for monsoon in monsoons:
-        if monsoon.is_allocated():
-            monsoon.release_monsoon_connection()
-
-
-def get_info(monsoons):
-    return [{'serial': monsoon.serial} for monsoon in monsoons]
diff --git a/src/antlion/controllers/monsoon_lib/__init__.py b/src/antlion/controllers/monsoon_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/__init__.py b/src/antlion/controllers/monsoon_lib/api/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/common.py b/src/antlion/controllers/monsoon_lib/api/common.py
deleted file mode 100644
index 70e0a0a..0000000
--- a/src/antlion/controllers/monsoon_lib/api/common.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.signals import ControllerError
-
-
-class MonsoonError(ControllerError):
-    """Raised for exceptions encountered when interfacing with a Monsoon device.
-    """
-
-
-class PassthroughStates(object):
-    """An enum containing the values for power monitor's passthrough states."""
-    # "Off" or 0 means USB always off.
-    OFF = 0
-    # "On" or 1 means USB always on.
-    ON = 1
-    # "Auto" or 2 means USB is automatically turned off during sampling, and
-    # turned back on after sampling.
-    AUTO = 2
-
-
-PASSTHROUGH_STATES = {
-    'off': PassthroughStates.OFF,
-    'on': PassthroughStates.ON,
-    'auto': PassthroughStates.AUTO
-}
-
-
-class MonsoonDataRecord(object):
-    """A data class for Monsoon data points."""
-    def __init__(self, sample_time, relative_time, current):
-        """Creates a new MonsoonDataRecord.
-
-        Args:
-            sample_time: the unix timestamp of the sample.
-            relative_time: the time since the start of the measurement.
-            current: The current in Amperes as a string.
-        """
-        self._sample_time = sample_time
-        self._relative_time = relative_time
-        self._current = current
-
-    @property
-    def time(self):
-        """The time the record was fetched."""
-        return self._sample_time
-
-    @property
-    def relative_time(self):
-        """The time the record was fetched, relative to collection start."""
-        return self._relative_time
-
-    @property
-    def current(self):
-        """The amount of current in Amperes measured for the given record."""
-        return self._current
-
-
-class MonsoonResult(object):
-    """An object that contains aggregated data collected during sampling.
-
-    Attributes:
-        _num_samples: The number of samples gathered.
-        _sum_currents: The total sum of all current values gathered, in amperes.
-        _hz: The frequency sampling is being done at.
-        _voltage: The voltage output during sampling.
-    """
-
-    # The number of decimal places to round a value to.
-    ROUND_TO = 6
-
-    def __init__(self, num_samples, sum_currents, hz, voltage, datafile_path):
-        """Creates a new MonsoonResult.
-
-        Args:
-            num_samples: the number of samples collected.
-            sum_currents: the total summation of every current measurement.
-            hz: the number of samples per second.
-            voltage: the voltage used during the test.
-            datafile_path: the path to the monsoon data file.
-        """
-        self._num_samples = num_samples
-        self._sum_currents = sum_currents
-        self._hz = hz
-        self._voltage = voltage
-        self.tag = datafile_path
-
-    def get_data_points(self):
-        """Returns an iterator of MonsoonDataRecords."""
-        class MonsoonDataIterator:
-            def __init__(self, file):
-                self.file = file
-
-            def __iter__(self):
-                with open(self.file, 'r') as f:
-                    start_time = None
-                    for line in f:
-                        # Remove the newline character.
-                        line.strip()
-                        sample_time, current = map(float, line.split(' '))
-                        if start_time is None:
-                            start_time = sample_time
-                        yield MonsoonDataRecord(sample_time,
-                                                sample_time - start_time,
-                                                current)
-
-        return MonsoonDataIterator(self.tag)
-
-    @property
-    def num_samples(self):
-        """The number of samples recorded during the test."""
-        return self._num_samples
-
-    @property
-    def average_current(self):
-        """Average current in mA."""
-        if self.num_samples == 0:
-            return 0
-        return round(self._sum_currents * 1000 / self.num_samples,
-                     self.ROUND_TO)
-
-    @property
-    def total_charge(self):
-        """Total charged used in the unit of mAh."""
-        return round((self._sum_currents / self._hz) * 1000 / 3600,
-                     self.ROUND_TO)
-
-    @property
-    def total_power(self):
-        """Total power used."""
-        return round(self.average_current * self._voltage, self.ROUND_TO)
-
-    @property
-    def voltage(self):
-        """The voltage during the measurement (in Volts)."""
-        return self._voltage
-
-    def __str__(self):
-        return ('avg current: %s\n'
-                'total charge: %s\n'
-                'total power: %s\n'
-                'total samples: %s' % (self.average_current, self.total_charge,
-                                      self.total_power, self._num_samples))
diff --git a/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py b/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/hvpm/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py b/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py
deleted file mode 100644
index 44afba2..0000000
--- a/src/antlion/controllers/monsoon_lib/api/hvpm/monsoon.py
+++ /dev/null
@@ -1,166 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import multiprocessing
-import time
-
-from Monsoon import HVPM
-from Monsoon import Operations as op
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonResult
-from antlion.controllers.monsoon_lib.api.monsoon import BaseMonsoon
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import AssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLine
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import DownSampler
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import SampleAggregator
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import Tee
-from antlion.controllers.monsoon_lib.sampling.hvpm.transformers import HvpmTransformer
-
-
-class Monsoon(BaseMonsoon):
-    """The controller class for interacting with the HVPM Monsoon."""
-
-    # The device doesn't officially support voltages lower than this. Note that
-    # 0 is a valid voltage.
-    MIN_VOLTAGE = 0.8
-
-    # The Monsoon doesn't support setting higher voltages than this directly
-    # without tripping overvoltage.
-    # Note that it is possible to increase the voltage above this value by
-    # increasing the voltage by small increments over a period of time.
-    # The communication protocol supports up to 16V.
-    MAX_VOLTAGE = 13.5
-
-    def __init__(self, serial):
-        super().__init__()
-        self.serial = serial
-        self._mon = HVPM.Monsoon()
-        self._mon.setup_usb(serial)
-        self._allocated = True
-        if self._mon.Protocol.DEVICE is None:
-            raise ValueError('HVPM Monsoon %s could not be found.' % serial)
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        self._log.debug('Setting voltage to %sV.' % voltage)
-        self._mon.setVout(voltage)
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        self._mon.setRunTimeCurrentLimit(amperes)
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        self._mon.setPowerUpCurrentLimit(amperes)
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        self._mon.fillStatusPacket()
-        return self._mon.statusPacket
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Sends the call to set usb passthrough mode.
-
-        Args:
-            mode: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-        """
-        self._mon.setUSBPassthroughMode(mode)
-
-    def _get_main_voltage(self):
-        """Returns the value of the voltage on the main channel."""
-        # Any getValue call on a setX function will return the value set for X.
-        # Using this, we can pull the last setMainVoltage (or its default).
-        return (self._mon.Protocol.getValue(op.OpCodes.setMainVoltage, 4) /
-                op.Conversion.FLOAT_TO_INT)
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """See parent docstring for details."""
-        voltage = self._get_main_voltage()
-
-        aggregator = SampleAggregator(measure_after_seconds)
-        manager = multiprocessing.Manager()
-
-        assembly_line_builder = AssemblyLineBuilder(manager.Queue,
-                                                    ThreadAssemblyLine)
-        assembly_line_builder.source(
-            HvpmTransformer(self.serial, duration + measure_after_seconds))
-        if hz != 5000:
-            assembly_line_builder.into(DownSampler(int(5000 / hz)))
-        if output_path:
-            assembly_line_builder.into(Tee(output_path, measure_after_seconds))
-        assembly_line_builder.into(aggregator)
-        if transformers:
-            for transformer in transformers:
-                assembly_line_builder.into(transformer)
-
-        self.take_samples(assembly_line_builder.build())
-
-        manager.shutdown()
-
-        self._mon.setup_usb(self.serial)
-        self._allocated = True
-        monsoon_data = MonsoonResult(aggregator.num_samples,
-                                     aggregator.sum_currents, hz, voltage,
-                                     output_path)
-        self._log.info('Measurement summary:\n%s', str(monsoon_data))
-        return monsoon_data
-
-    def reconnect_monsoon(self):
-        """Reconnect Monsoon to serial port."""
-        self.release_monsoon_connection()
-        self._log.info('Closed monsoon connection.')
-        time.sleep(5)
-        self.establish_monsoon_connection()
-
-    def release_monsoon_connection(self):
-        self._mon.closeDevice()
-        self._allocated = False
-
-    def is_allocated(self):
-        return self._allocated
-
-    def establish_monsoon_connection(self):
-        self._mon.setup_usb(self.serial)
-        # Makes sure the Monsoon is in the command-receiving state.
-        self._mon.stopSampling()
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py
deleted file mode 100644
index 063972f..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import multiprocessing
-import time
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonResult
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon_proxy import MonsoonProxy
-from antlion.controllers.monsoon_lib.api.monsoon import BaseMonsoon
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import AssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLine
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import DownSampler
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import SampleAggregator
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import Tee
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.stock_transformers import StockLvpmSampler
-
-
-class Monsoon(BaseMonsoon):
-    """The controller class for interacting with the LVPM Monsoon."""
-
-    # The device protocol has a floor value for positive voltages. Note that 0
-    # is still a valid voltage.
-    MIN_VOLTAGE = 2.01
-
-    # The device protocol does not support values above this.
-    MAX_VOLTAGE = 4.55
-
-    def __init__(self, serial, device=None):
-        super().__init__()
-        self._mon = MonsoonProxy(serialno=serial, device=device)
-        self._allocated = True
-        self.serial = serial
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: Voltage to set the output to.
-        """
-        self._log.debug('Setting voltage to %sV.' % voltage)
-        self._mon.set_voltage(voltage)
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        self._mon.set_max_current(amperes)
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        self._mon.set_max_initial_current(amperes)
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        return self._mon.get_status()
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Sends the call to set usb passthrough mode.
-
-        Args:
-            mode: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-        """
-        self._mon.set_usb_passthrough(mode)
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """See parent docstring for details."""
-        voltage = self._mon.get_voltage()
-
-        aggregator = SampleAggregator(measure_after_seconds)
-        manager = multiprocessing.Manager()
-
-        assembly_line_builder = AssemblyLineBuilder(manager.Queue,
-                                                    ThreadAssemblyLine)
-        assembly_line_builder.source(
-            StockLvpmSampler(self.serial, duration + measure_after_seconds))
-        if hz != 5000:
-            assembly_line_builder.into(DownSampler(int(round(5000 / hz))))
-        if output_path is not None:
-            assembly_line_builder.into(Tee(output_path, measure_after_seconds))
-        assembly_line_builder.into(aggregator)
-        if transformers:
-            for transformer in transformers:
-                assembly_line_builder.into(transformer)
-
-        self.take_samples(assembly_line_builder.build())
-
-        manager.shutdown()
-
-        monsoon_data = MonsoonResult(aggregator.num_samples,
-                                     aggregator.sum_currents, hz, voltage,
-                                     output_path)
-        self._log.info('Measurement summary:\n%s', str(monsoon_data))
-        return monsoon_data
-
-    def reconnect_monsoon(self):
-        """Reconnect Monsoon to serial port."""
-        self._log.debug('Close serial connection')
-        self._mon.ser.close()
-        self._log.debug('Reset serial port')
-        time.sleep(5)
-        self._log.debug('Open serial connection')
-        self._mon.ser.open()
-        self._mon.ser.reset_input_buffer()
-        self._mon.ser.reset_output_buffer()
-
-    def release_monsoon_connection(self):
-        self._mon.release_dev_port()
-        self._allocated = False
-
-    def is_allocated(self):
-        return self._allocated
-
-    def establish_monsoon_connection(self):
-        self._mon.obtain_dev_port()
-        self._allocated = True
-        # Makes sure the Monsoon is in the command-receiving state.
-        self._mon.stop_data_collection()
diff --git a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py b/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py
deleted file mode 100644
index 909bff4..0000000
--- a/src/antlion/controllers/monsoon_lib/api/lvpm_stock/monsoon_proxy.py
+++ /dev/null
@@ -1,402 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The interface for a USB-connected Monsoon power meter.
-
-Details on the protocol can be found at
-(http://msoon.com/LabEquipment/PowerMonitor/)
-
-Based on the original py2 script of kens@google.com.
-"""
-import collections
-import fcntl
-import logging
-import os
-import select
-import struct
-import sys
-import time
-
-import errno
-import serial
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-
-
-class LvpmStatusPacket(object):
-    """The data received from asking an LVPM Monsoon for its status.
-
-    Attributes names with the same values as HVPM match those defined in
-    Monsoon.Operations.statusPacket.
-    """
-
-    def __init__(self, values):
-        iter_value = iter(values)
-        self.packetType = next(iter_value)
-        self.firmwareVersion = next(iter_value)
-        self.protocolVersion = next(iter_value)
-        self.mainFineCurrent = next(iter_value)
-        self.usbFineCurrent = next(iter_value)
-        self.auxFineCurrent = next(iter_value)
-        self.voltage1 = next(iter_value)
-        self.mainCoarseCurrent = next(iter_value)
-        self.usbCoarseCurrent = next(iter_value)
-        self.auxCoarseCurrent = next(iter_value)
-        self.voltage2 = next(iter_value)
-        self.outputVoltageSetting = next(iter_value)
-        self.temperature = next(iter_value)
-        self.status = next(iter_value)
-        self.leds = next(iter_value)
-        self.mainFineResistor = next(iter_value)
-        self.serialNumber = next(iter_value)
-        self.sampleRate = next(iter_value)
-        self.dacCalLow = next(iter_value)
-        self.dacCalHigh = next(iter_value)
-        self.powerupCurrentLimit = next(iter_value)
-        self.runtimeCurrentLimit = next(iter_value)
-        self.powerupTime = next(iter_value)
-        self.usbFineResistor = next(iter_value)
-        self.auxFineResistor = next(iter_value)
-        self.initialUsbVoltage = next(iter_value)
-        self.initialAuxVoltage = next(iter_value)
-        self.hardwareRevision = next(iter_value)
-        self.temperatureLimit = next(iter_value)
-        self.usbPassthroughMode = next(iter_value)
-        self.mainCoarseResistor = next(iter_value)
-        self.usbCoarseResistor = next(iter_value)
-        self.auxCoarseResistor = next(iter_value)
-        self.defMainFineResistor = next(iter_value)
-        self.defUsbFineResistor = next(iter_value)
-        self.defAuxFineResistor = next(iter_value)
-        self.defMainCoarseResistor = next(iter_value)
-        self.defUsbCoarseResistor = next(iter_value)
-        self.defAuxCoarseResistor = next(iter_value)
-        self.eventCode = next(iter_value)
-        self.eventData = next(iter_value)
-
-
-class MonsoonProxy(object):
-    """Class that directly talks to monsoon over serial.
-
-    Provides a simple class to use the power meter.
-    See http://wiki/Main/MonsoonProtocol for information on the protocol.
-    """
-
-    # The format of the status packet.
-    STATUS_FORMAT = '>BBBhhhHhhhHBBBxBbHBHHHHBbbHHBBBbbbbbbbbbBH'
-
-    # The list of fields that appear in the Monsoon status packet.
-    STATUS_FIELDS = [
-        'packetType',
-        'firmwareVersion',
-        'protocolVersion',
-        'mainFineCurrent',
-        'usbFineCurrent',
-        'auxFineCurrent',
-        'voltage1',
-        'mainCoarseCurrent',
-        'usbCoarseCurrent',
-        'auxCoarseCurrent',
-        'voltage2',
-        'outputVoltageSetting',
-        'temperature',
-        'status',
-        'leds',
-        'mainFineResistorOffset',
-        'serialNumber',
-        'sampleRate',
-        'dacCalLow',
-        'dacCalHigh',
-        'powerupCurrentLimit',
-        'runtimeCurrentLimit',
-        'powerupTime',
-        'usbFineResistorOffset',
-        'auxFineResistorOffset',
-        'initialUsbVoltage',
-        'initialAuxVoltage',
-        'hardwareRevision',
-        'temperatureLimit',
-        'usbPassthroughMode',
-        'mainCoarseResistorOffset',
-        'usbCoarseResistorOffset',
-        'auxCoarseResistorOffset',
-        'defMainFineResistor',
-        'defUsbFineResistor',
-        'defAuxFineResistor',
-        'defMainCoarseResistor',
-        'defUsbCoarseResistor',
-        'defAuxCoarseResistor',
-        'eventCode',
-        'eventData',
-    ]
-
-    def __init__(self, device=None, serialno=None, connection_timeout=600):
-        """Establish a connection to a Monsoon.
-
-        By default, opens the first available port, waiting if none are ready.
-
-        Args:
-            device: The particular device port to be used.
-            serialno: The Monsoon's serial number.
-            connection_timeout: The number of seconds to wait for the device to
-                connect.
-
-        Raises:
-            TimeoutError if unable to connect to the device.
-        """
-        self.start_voltage = 0
-        self.serial = serialno
-
-        if device:
-            self.ser = serial.Serial(device, timeout=1)
-            return
-        # Try all devices connected through USB virtual serial ports until we
-        # find one we can use.
-        self._tempfile = None
-        self.obtain_dev_port(connection_timeout)
-        self.log = logging.getLogger()
-
-    def obtain_dev_port(self, timeout=600):
-        """Obtains the device port for this Monsoon.
-
-        Args:
-            timeout: The time in seconds to wait for the device to connect.
-
-        Raises:
-            TimeoutError if the device was unable to be found, or was not
-            available.
-        """
-        start_time = time.time()
-
-        while start_time + timeout > time.time():
-            for dev in os.listdir('/dev'):
-                prefix = 'ttyACM'
-                # Prefix is different on Mac OS X.
-                if sys.platform == 'darwin':
-                    prefix = 'tty.usbmodem'
-                if not dev.startswith(prefix):
-                    continue
-                tmpname = '/tmp/monsoon.%s.%s' % (os.uname()[0], dev)
-                self._tempfile = open(tmpname, 'w')
-                if not os.access(tmpname, os.R_OK | os.W_OK):
-                    try:
-                        os.chmod(tmpname, 0o666)
-                    except OSError as e:
-                        if e.errno == errno.EACCES:
-                            raise ValueError(
-                                'Unable to set permissions to read/write to '
-                                '%s. This file is owned by another user; '
-                                'please grant o+wr access to this file, or '
-                                'run as that user.')
-                        raise
-
-                try:  # Use a lock file to ensure exclusive access.
-                    fcntl.flock(self._tempfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
-                except IOError:
-                    logging.error('Device %s is in use.', repr(dev))
-                    continue
-
-                try:  # try to open the device
-                    self.ser = serial.Serial('/dev/%s' % dev, timeout=1)
-                    self.stop_data_collection()  # just in case
-                    self._flush_input()  # discard stale input
-                    status = self.get_status()
-                except Exception as e:
-                    logging.warning('Error opening device %s: %s', dev, e,
-                                    exc_info=True)
-                    continue
-
-                if not status:
-                    logging.error('No response from device %s.', dev)
-                elif self.serial and status.serialNumber != self.serial:
-                    logging.error('Another device serial #%d seen on %s',
-                                  status.serialNumber, dev)
-                else:
-                    self.start_voltage = status.voltage1
-                    return
-
-            self._tempfile = None
-            logging.info('Waiting for device...')
-            time.sleep(1)
-        raise TimeoutError(
-            'Unable to connect to Monsoon device with '
-            'serial "%s" within %s seconds.' % (self.serial, timeout))
-
-    def release_dev_port(self):
-        """Releases the dev port used to communicate with the Monsoon device."""
-        fcntl.flock(self._tempfile, fcntl.LOCK_UN)
-        self._tempfile.close()
-        self.ser.close()
-
-    def get_status(self):
-        """Requests and waits for status.
-
-        Returns:
-            status dictionary.
-        """
-        self._send_struct('BBB', 0x01, 0x00, 0x00)
-        read_bytes = self._read_packet()
-
-        if not read_bytes:
-            raise MonsoonError('Failed to read Monsoon status')
-        expected_size = struct.calcsize(self.STATUS_FORMAT)
-        if len(read_bytes) != expected_size or read_bytes[0] != 0x10:
-            raise MonsoonError('Wanted status, dropped type=0x%02x, len=%d',
-                               read_bytes[0], len(read_bytes))
-
-        status = collections.OrderedDict(
-            zip(self.STATUS_FIELDS,
-                struct.unpack(self.STATUS_FORMAT, read_bytes)))
-        p_type = status['packetType']
-        if p_type != 0x10:
-            raise MonsoonError('Packet type %s is not 0x10.' % p_type)
-
-        for k in status.keys():
-            if k.endswith('VoltageSetting'):
-                status[k] = 2.0 + status[k] * 0.01
-            elif k.endswith('FineCurrent'):
-                pass  # needs calibration data
-            elif k.endswith('CoarseCurrent'):
-                pass  # needs calibration data
-            elif k.startswith('voltage') or k.endswith('Voltage'):
-                status[k] = status[k] * 0.000125
-            elif k.endswith('Resistor'):
-                status[k] = 0.05 + status[k] * 0.0001
-                if k.startswith('aux') or k.startswith('defAux'):
-                    status[k] += 0.05
-            elif k.endswith('CurrentLimit'):
-                status[k] = 8 * (1023 - status[k]) / 1023.0
-        return LvpmStatusPacket(status.values())
-
-    def set_voltage(self, voltage):
-        """Sets the voltage on the device to the specified value.
-
-        Args:
-            voltage: Either 0 or a value between 2.01 and 4.55 inclusive.
-
-        Raises:
-            struct.error if voltage is an invalid value.
-        """
-        # The device has a range of 255 voltage values:
-        #
-        #     0   is "off". Note this value not set outputVoltageSetting to
-        #             zero. The previous outputVoltageSetting value is
-        #             maintained.
-        #     1   is 2.01V.
-        #     255 is 4.55V.
-        voltage_byte = max(0, round((voltage - 2.0) * 100))
-        self._send_struct('BBB', 0x01, 0x01, voltage_byte)
-
-    def get_voltage(self):
-        """Get the output voltage.
-
-        Returns:
-            Current Output Voltage (in unit of V).
-        """
-        return self.get_status().outputVoltageSetting
-
-    def set_max_current(self, i):
-        """Set the max output current."""
-        if i < 0 or i > 8:
-            raise MonsoonError(('Target max current %sA, is out of acceptable '
-                                'range [0, 8].') % i)
-        val = 1023 - int((i / 8) * 1023)
-        self._send_struct('BBB', 0x01, 0x0a, val & 0xff)
-        self._send_struct('BBB', 0x01, 0x0b, val >> 8)
-
-    def set_max_initial_current(self, current):
-        """Sets the maximum initial current, in mA."""
-        if current < 0 or current > 8:
-            raise MonsoonError(('Target max current %sA, is out of acceptable '
-                                'range [0, 8].') % current)
-        val = 1023 - int((current / 8) * 1023)
-        self._send_struct('BBB', 0x01, 0x08, val & 0xff)
-        self._send_struct('BBB', 0x01, 0x09, val >> 8)
-
-    def set_usb_passthrough(self, passthrough_mode):
-        """Set the USB passthrough mode.
-
-        Args:
-            passthrough_mode: The mode used for passthrough. Must be the integer
-                value. See common.PassthroughModes for a list of values and
-                their meanings.
-        """
-        self._send_struct('BBB', 0x01, 0x10, passthrough_mode)
-
-    def get_usb_passthrough(self):
-        """Get the USB passthrough mode: 0 = off, 1 = on,  2 = auto.
-
-        Returns:
-            The mode used for passthrough, as an integer. See
-                common.PassthroughModes for a list of values and their meanings.
-        """
-        return self.get_status().usbPassthroughMode
-
-    def start_data_collection(self):
-        """Tell the device to start collecting and sending measurement data."""
-        self._send_struct('BBB', 0x01, 0x1b, 0x01)  # Mystery command
-        self._send_struct('BBBBBBB', 0x02, 0xff, 0xff, 0xff, 0xff, 0x03, 0xe8)
-
-    def stop_data_collection(self):
-        """Tell the device to stop collecting measurement data."""
-        self._send_struct('BB', 0x03, 0x00)  # stop
-
-    def _send_struct(self, fmt, *args):
-        """Pack a struct (without length or checksum) and send it."""
-        # Flush out the input buffer before sending data
-        self._flush_input()
-        data = struct.pack(fmt, *args)
-        data_len = len(data) + 1
-        checksum = (data_len + sum(bytearray(data))) % 256
-        out = struct.pack('B', data_len) + data + struct.pack('B', checksum)
-        self.ser.write(out)
-
-    def _read_packet(self):
-        """Returns a single packet as a string (without length or checksum)."""
-        len_char = self.ser.read(1)
-        if not len_char:
-            raise MonsoonError('Reading from serial port timed out')
-
-        data_len = ord(len_char)
-        if not data_len:
-            return ''
-        result = self.ser.read(int(data_len))
-        result = bytearray(result)
-        if len(result) != data_len:
-            raise MonsoonError(
-                'Length mismatch, expected %d bytes, got %d bytes.', data_len,
-                len(result))
-        body = result[:-1]
-        checksum = (sum(struct.unpack('B' * len(body), body)) + data_len) % 256
-        if result[-1] != checksum:
-            raise MonsoonError(
-                'Invalid checksum from serial port! Expected %s, got %s',
-                hex(checksum), hex(result[-1]))
-        return result[:-1]
-
-    def _flush_input(self):
-        """Flushes all read data until the input is empty."""
-        self.ser.reset_input_buffer()
-        while True:
-            ready_r, ready_w, ready_x = select.select([self.ser], [],
-                                                      [self.ser], 0)
-            if len(ready_x) > 0:
-                raise MonsoonError('Exception from serial port.')
-            elif len(ready_r) > 0:
-                self.ser.read(1)  # This may cause underlying buffering.
-                # Flush the underlying buffer too.
-                self.ser.reset_input_buffer()
-            else:
-                break
diff --git a/src/antlion/controllers/monsoon_lib/api/monsoon.py b/src/antlion/controllers/monsoon_lib/api/monsoon.py
deleted file mode 100644
index 68ab81c..0000000
--- a/src/antlion/controllers/monsoon_lib/api/monsoon.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import time
-
-from antlion.controllers.monsoon_lib.api import common
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-from antlion.controllers.monsoon_lib.api.common import PassthroughStates
-
-
-class BaseMonsoon(object):
-    """The base class for all Monsoon interface devices.
-
-    Attributes:
-        on_reconnect: The function to call when Monsoon has reconnected USB.
-            Raises TimeoutError if the device cannot be found.
-        on_disconnect: The function to call when Monsoon has disconnected USB.
-    """
-
-    # The minimum non-zero supported voltage for the given Monsoon device.
-    MIN_VOLTAGE = NotImplemented
-
-    # The maximum practical voltage for the given Monsoon device.
-    MAX_VOLTAGE = NotImplemented
-
-    # When ramping voltage, the rate in volts/second to increase the voltage.
-    VOLTAGE_RAMP_RATE = 3
-
-    # The time step between voltage increments. This value does not need to be
-    # modified.
-    VOLTAGE_RAMP_TIME_STEP = .1
-
-    def __init__(self):
-        self._log = logging.getLogger()
-        self.on_disconnect = lambda: None
-        self.on_reconnect = lambda: None
-
-    @classmethod
-    def get_closest_valid_voltage(cls, voltage):
-        """Returns the nearest valid voltage value."""
-        if voltage < cls.MIN_VOLTAGE / 2:
-            return 0
-        else:
-            return max(cls.MIN_VOLTAGE, min(voltage, cls.MAX_VOLTAGE))
-
-    @classmethod
-    def is_voltage_valid(cls, voltage):
-        """Returns True iff the given voltage can be set on the device.
-
-        Valid voltage values are {x | x ∈ {0} ∪ [MIN_VOLTAGE, MAX_VOLTAGE]}.
-        """
-        return cls.get_closest_valid_voltage(voltage) == voltage
-
-    @classmethod
-    def validate_voltage(cls, voltage):
-        """Raises a MonsoonError if the given voltage cannot be set."""
-        if not cls.is_voltage_valid(voltage):
-            raise MonsoonError('Invalid voltage %s. Voltage must be zero or '
-                               'within range [%s, %s].' %
-                               (voltage, cls.MIN_VOLTAGE, cls.MAX_VOLTAGE))
-
-    def set_voltage_safe(self, voltage):
-        """Sets the output voltage of monsoon to a safe value.
-
-        This function is effectively:
-            self.set_voltage(self.get_closest_valid_voltage(voltage)).
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        normalized_voltage = self.get_closest_valid_voltage(voltage)
-        if voltage != normalized_voltage:
-            self._log.debug(
-                'Requested voltage %sV is invalid.' % voltage)
-        self.set_voltage(normalized_voltage)
-
-    def ramp_voltage(self, start, end):
-        """Ramps up the voltage to the specified end voltage.
-
-        Increments the voltage by fixed intervals of .1 Volts every .1 seconds.
-
-        Args:
-            start: The starting voltage
-            end: the end voltage. Must be higher than the starting voltage.
-        """
-        voltage = start
-
-        while voltage < end:
-            self.set_voltage(self.get_closest_valid_voltage(voltage))
-            voltage += self.VOLTAGE_RAMP_RATE * self.VOLTAGE_RAMP_TIME_STEP
-            time.sleep(self.VOLTAGE_RAMP_TIME_STEP)
-        self.set_voltage(end)
-
-    def usb(self, state):
-        """Sets the monsoon's USB passthrough mode.
-
-        This is specific to the USB port in front of the monsoon box which
-        connects to the powered device, NOT the USB that is used to talk to the
-        monsoon itself.
-
-        Args:
-            state: The state to set the USB passthrough to. Can either be the
-                string name of the state or the integer value.
-
-                "Off" or 0 means USB always off.
-                "On" or 1 means USB always on.
-                "Auto" or 2 means USB is automatically turned off during
-                    sampling, and turned back on after sampling.
-
-        Raises:
-            ValueError if the state given is invalid.
-            TimeoutError if unable to set the passthrough mode within a minute,
-                or if the device was not found after setting the state to ON.
-        """
-        expected_state = None
-        states_dict = common.PASSTHROUGH_STATES
-        if isinstance(state, str):
-            normalized_state = state.lower()
-            expected_state = states_dict.get(normalized_state, None)
-        elif state in states_dict.values():
-            expected_state = state
-
-        if expected_state is None:
-            raise ValueError(
-                'USB passthrough state %s is not a valid state. '
-                'Expected any of %s.' % (repr(state), states_dict))
-        if self.status.usbPassthroughMode == expected_state:
-            return
-
-        if expected_state in [PassthroughStates.OFF, PassthroughStates.AUTO]:
-            self.on_disconnect()
-
-        start_time = time.time()
-        time_limit_seconds = 60
-        while self.status.usbPassthroughMode != expected_state:
-            current_time = time.time()
-            if current_time >= start_time + time_limit_seconds:
-                raise TimeoutError('Setting USB mode timed out after %s '
-                                   'seconds.' % time_limit_seconds)
-            self._set_usb_passthrough_mode(expected_state)
-            time.sleep(1)
-        self._log.info('Monsoon usbPassthroughMode is now "%s"',
-                       state)
-
-        if expected_state in [PassthroughStates.ON]:
-            self._on_reconnect()
-
-    def attach_device(self, android_device):
-        """Deprecated. Use the connection callbacks instead."""
-
-        def on_reconnect():
-            # Make sure the device is connected and available for commands.
-            android_device.wait_for_boot_completion()
-            android_device.start_services()
-            # Release wake lock to put device into sleep.
-            android_device.droid.goToSleepNow()
-            self._log.info('Dut reconnected.')
-
-        def on_disconnect():
-            android_device.stop_services()
-            time.sleep(1)
-
-        self.on_reconnect = on_reconnect
-        self.on_disconnect = on_disconnect
-
-    def set_on_disconnect(self, callback):
-        """Sets the callback to be called when Monsoon disconnects USB."""
-        self.on_disconnect = callback
-
-    def set_on_reconnect(self, callback):
-        """Sets the callback to be called when Monsoon reconnects USB."""
-        self.on_reconnect = callback
-
-    def take_samples(self, assembly_line):
-        """Runs the sampling procedure based on the given assembly line."""
-        # Sampling is always done in a separate process. Release the Monsoon
-        # so the child process can sample from the Monsoon.
-        self.release_monsoon_connection()
-
-        try:
-            assembly_line.run()
-        finally:
-            self.establish_monsoon_connection()
-
-    def measure_power(self,
-                      duration,
-                      measure_after_seconds=0,
-                      hz=5000,
-                      output_path=None,
-                      transformers=None):
-        """Measure power consumption of the attached device.
-
-        This function is a default implementation of measuring power consumption
-        during gathering measurements. For offline methods, use take_samples()
-        with a custom AssemblyLine.
-
-        Args:
-            duration: Amount of time to measure power for. Note:
-                total_duration = duration + measure_after_seconds
-            measure_after_seconds: Number of seconds to wait before beginning
-                reading measurement.
-            hz: The number of samples to collect per second. Must be a factor
-                of 5000.
-            output_path: The location to write the gathered data to.
-            transformers: A list of Transformer objects that receive passed-in
-                          samples. Runs in order sent.
-
-        Returns:
-            A MonsoonData object with the measured power data.
-        """
-        raise NotImplementedError()
-
-    def set_voltage(self, voltage):
-        """Sets the output voltage of monsoon.
-
-        Args:
-            voltage: The voltage to set the output to.
-        """
-        raise NotImplementedError()
-
-    def set_max_current(self, amperes):
-        """Sets monsoon's max output current.
-
-        Args:
-            amperes: The max current in A.
-        """
-        raise NotImplementedError()
-
-    def set_max_initial_current(self, amperes):
-        """Sets the max power-up/initial current.
-
-        Args:
-            amperes: The max initial current allowed in amperes.
-        """
-        raise NotImplementedError()
-
-    @property
-    def status(self):
-        """Gets the status params of monsoon.
-
-        Returns:
-            A dictionary of {status param, value} key-value pairs.
-        """
-        raise NotImplementedError()
-
-    def _on_reconnect(self):
-        """Reconnects the DUT over USB.
-
-        Raises:
-            TimeoutError upon failure to reconnect over USB.
-        """
-        self._log.info('Reconnecting dut.')
-        # Wait for two seconds to ensure that the device is ready, then
-        # attempt to reconnect. If reconnect times out, reset the passthrough
-        # state and try again.
-        time.sleep(2)
-        try:
-            self.on_reconnect()
-        except TimeoutError as err:
-            self._log.info('Toggling USB and trying again. %s' % err)
-            self.usb(PassthroughStates.OFF)
-            time.sleep(1)
-            self.usb(PassthroughStates.ON)
-            self.on_reconnect()
-
-    def _set_usb_passthrough_mode(self, mode):
-        """Makes the underlying Monsoon call to set passthrough mode."""
-        raise NotImplementedError()
-
-    def reconnect_monsoon(self):
-        """Reconnects the Monsoon Serial/USB connection."""
-        raise NotImplementedError()
-
-    def is_allocated(self):
-        """Whether the resource is locked."""
-        raise NotImplementedError()
-
-    def release_monsoon_connection(self):
-        """Releases the underlying monsoon Serial or USB connection.
-
-        Useful for allowing other processes access to the device.
-        """
-        raise NotImplementedError()
-
-    def establish_monsoon_connection(self):
-        """Establishes the underlying monsoon Serial or USB connection."""
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/monsoon_lib/sampling/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/common.py b/src/antlion/controllers/monsoon_lib/sampling/common.py
deleted file mode 100644
index 7db8baf..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/common.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class UncalibratedSampleChunk(object):
-    """An uncalibrated sample collection stored with its calibration data.
-
-    These objects are created by the SampleChunker Transformer and read by
-    the CalibrationApplier Transformer.
-
-    Attributes:
-        samples: the uncalibrated samples list
-        calibration_data: the data used to calibrate the samples.
-    """
-
-    def __init__(self, samples, calibration_data):
-        self.samples = samples
-        self.calibration_data = calibration_data
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py b/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py
deleted file mode 100644
index 88cc733..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/assembly_line.py
+++ /dev/null
@@ -1,328 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import queue
-from concurrent.futures import ThreadPoolExecutor
-import multiprocessing
-
-
-class AssemblyLine(object):
-    """A class for passing data through a chain of threads or processes,
-    assembly-line style.
-
-    Attributes:
-        nodes: A list of AssemblyLine.Nodes that pass data from one node to the
-            next.
-    """
-
-    class Node(object):
-        """A Node in an AssemblyLine.
-
-        Each node is composed of the following:
-
-         input_stream                    output_stream
-        ==============> [ transformer ] ===============>
-
-        Attributes:
-            transformer: The Transformer that takes input from the input
-                stream, transforms the data, and sends it to the output stream.
-            input_stream: The stream of data to be taken in as input to this
-                transformer. This stream is the stream to be registered as the
-                previous node's output stream.
-
-        Properties:
-            output_stream: The stream of data to be passed to the next node.
-        """
-
-        def __init__(self, transformer=None, input_stream=None):
-            self.transformer = transformer
-            self.input_stream = input_stream
-
-        @property
-        def output_stream(self):
-            return self.transformer.output_stream
-
-        @output_stream.setter
-        def output_stream(self, value):
-            self.transformer.output_stream = value
-
-    def __init__(self, nodes):
-        """Initializes an AssemblyLine class.
-
-        nodes:
-            A list of AssemblyLine.Node objects.
-        """
-        self.nodes = nodes
-
-    def run(self):
-        """Runs the AssemblyLine, passing the data between each work node."""
-        raise NotImplementedError()
-
-
-class ProcessAssemblyLine(AssemblyLine):
-    """An AssemblyLine that uses processes to schedule work on nodes."""
-
-    def run(self):
-        """Runs the AssemblyLine within a process pool."""
-        if not self.nodes:
-            # If self.nodes is empty, it will create a multiprocessing.Pool of
-            # 0 nodes, which raises a ValueError.
-            return
-
-        process_pool = multiprocessing.Pool(processes=len(self.nodes))
-        for node in self.nodes:
-            process_pool.apply_async(node.transformer.transform,
-                                     [node.input_stream])
-        process_pool.close()
-        process_pool.join()
-
-
-class ThreadAssemblyLine(AssemblyLine):
-    """An AssemblyLine that uses threading to schedule work on nodes."""
-
-    def run(self):
-        """Runs the AssemblyLine within a thread pool."""
-        with ThreadPoolExecutor(max_workers=len(self.nodes)) as thread_pool:
-            for node in self.nodes:
-                thread_pool.submit(node.transformer.transform,
-                                   node.input_stream)
-
-
-class AssemblyLineBuilder(object):
-    """An abstract class that builds an AssemblyLine object.
-
-    Attributes:
-    _assembly_line_generator: The callable that creates the AssemblyLine.
-        Should be in the form of:
-
-            Args:
-                A list of AssemblyLine.Node objects.
-
-            Returns:
-                An AssemblyLine object.
-
-    _queue_generator: The callable that creates new queues to be used for
-        BufferStreams. Should be in the form of:
-
-            Args:
-                None.
-
-            Returns:
-                A Queue object.
-    """
-
-    def __init__(self, queue_generator, assembly_line_generator):
-        """Creates an AssemblyLineBuilder.
-
-        Args:
-            queue_generator: A callable of type lambda: Queue().
-            assembly_line_generator: A callable of type
-                lambda list<AssemblyLine.Node>: AssemblyLine.
-        """
-        super().__init__()
-        self._assembly_line_generator = assembly_line_generator
-        self._queue_generator = queue_generator
-
-        self.nodes = []
-        self._built = False
-
-    @property
-    def built(self):
-        return self._built
-
-    def __generate_queue(self):
-        """Returns a new Queue object for passing information between nodes."""
-        return self._queue_generator()
-
-    @property
-    def queue_generator(self):
-        """Returns the callable used for generating queues."""
-        return self._queue_generator
-
-    def source(self, transformer, input_stream=None):
-        """Adds a SourceTransformer to the AssemblyLine.
-
-        Must be the first function call on the AssemblyLineBuilder.
-
-        Args:
-            transformer: The SourceTransformer that generates data for the
-                AssemblyLine to process.
-            input_stream: The input stream to use, if necessary.
-
-        Raises:
-            ValueError if source is not the first transformer to be added to
-                the AssemblyLine, or the AssemblyLine has been built.
-        """
-        if self.nodes:
-            raise ValueError('AssemblyLines can only have a single source.')
-        if input_stream is None:
-            input_stream = DevNullBufferStream()
-        self.nodes.append(AssemblyLine.Node(transformer, input_stream))
-        return self
-
-    def into(self, transformer):
-        """Adds the given transformer next in the AssemblyLine.
-
-        Args:
-            transformer: The transformer next in the AssemblyLine.
-
-        Raises:
-            ValueError if no source node is set, or the AssemblyLine has been
-                built.
-        """
-        if not self.nodes:
-            raise ValueError('The source transformer must be set first.')
-        if self.built:
-            raise ValueError('Cannot add additional nodes after the '
-                             'AssemblyLine has been built.')
-        stream = BufferStream(self.__generate_queue())
-        self.nodes[-1].transformer.set_output_stream(stream)
-        self.nodes.append(AssemblyLine.Node(transformer, stream))
-        return self
-
-    def build(self, output_stream=None):
-        """Builds the AssemblyLine object.
-
-        Note that after this function is called this AssemblyLineBuilder cannot
-        be used again, as it is already marked as built.
-        """
-        if self.built:
-            raise ValueError('The AssemblyLine is already built.')
-        if not self.nodes:
-            raise ValueError('Cannot create an empty assembly line.')
-        self._built = True
-        if output_stream is None:
-            output_stream = DevNullBufferStream()
-        self.nodes[-1].output_stream = output_stream
-        return self._assembly_line_generator(self.nodes)
-
-
-class ThreadAssemblyLineBuilder(AssemblyLineBuilder):
-    """An AssemblyLineBuilder for generating ThreadAssemblyLines."""
-
-    def __init__(self, queue_generator=queue.Queue):
-        super().__init__(queue_generator, ThreadAssemblyLine)
-
-
-class ProcessAssemblyLineBuilder(AssemblyLineBuilder):
-    """An AssemblyLineBuilder for ProcessAssemblyLines.
-
-    Attributes:
-        manager: The multiprocessing.Manager used for having queues communicate
-            with one another over multiple processes.
-    """
-
-    def __init__(self):
-        self.manager = multiprocessing.Manager()
-        super().__init__(self.manager.Queue, ProcessAssemblyLine)
-
-
-class IndexedBuffer(object):
-    """A buffer indexed with the order it was generated in."""
-
-    def __init__(self, index, size_or_buffer):
-        """Creates an IndexedBuffer.
-
-        Args:
-            index: The integer index associated with the buffer.
-            size_or_buffer:
-                either:
-                    An integer specifying the number of slots in the buffer OR
-                    A list to be used as a buffer.
-        """
-        self.index = index
-        if isinstance(size_or_buffer, int):
-            self.buffer = [None] * size_or_buffer
-        else:
-            self.buffer = size_or_buffer
-
-
-class BufferList(list):
-    """A list of Buffers.
-
-    This type is useful for differentiating when a buffer has been returned
-    from a transformer, vs when a list of buffers has been returned from a
-    transformer.
-    """
-
-
-class BufferStream(object):
-    """An object that acts as a stream between two transformers."""
-
-    # The object passed to the buffer queue to signal the end-of-stream.
-    END = None
-
-    def __init__(self, buffer_queue):
-        """Creates a new BufferStream.
-
-        Args:
-            buffer_queue: A Queue object used to pass data along the
-                BufferStream.
-        """
-        self._buffer_queue = buffer_queue
-
-    def initialize(self):
-        """Initializes the stream.
-
-        When running BufferStreams through multiprocessing, initialize must
-        only be called on the process using the BufferStream.
-        """
-        # Here we need to make any call to the stream to initialize it. This
-        # makes read and write times for the first buffer faster, preventing
-        # the data at the beginning from being dropped.
-        self._buffer_queue.qsize()
-
-    def end_stream(self):
-        """Closes the stream.
-
-        By convention, a None object is used, mirroring file reads returning
-        an empty string when the end of file is reached.
-        """
-        self._buffer_queue.put(None, block=False)
-
-    def add_indexed_buffer(self, buffer):
-        """Adds the given buffer to the buffer stream."""
-        self._buffer_queue.put(buffer, block=False)
-
-    def remove_indexed_buffer(self):
-        """Removes an indexed buffer from the array.
-
-        This operation blocks until data is received.
-
-        Returns:
-            an IndexedBuffer.
-        """
-        return self._buffer_queue.get()
-
-
-class DevNullBufferStream(BufferStream):
-    """A BufferStream that is always empty."""
-
-    def __init__(self, *_):
-        super().__init__(None)
-
-    def initialize(self):
-        """Does nothing. Nothing to initialize."""
-
-    def end_stream(self):
-        """Does nothing. The stream always returns end-of-stream when read."""
-
-    def add_indexed_buffer(self, buffer):
-        """Imitating /dev/null, nothing will be written to the stream."""
-
-    def remove_indexed_buffer(self):
-        """Always returns the end-of-stream marker."""
-        return None
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py b/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py
deleted file mode 100644
index 1e531ed..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/calibration.py
+++ /dev/null
@@ -1,181 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class CalibrationError(Exception):
-    """Raised when a value is requested before it is properly calibrated."""
-
-
-class CalibrationCollection(object):
-    """The interface for keeping track of calibration values.
-
-    This class is an abstract representation of a collection of Calibration
-    values. Some CalibrationCollections may simply be a dictionary that returns
-    values given to it (see CalibrationScalars). Others may accept multiple
-    values and return the average for a set rolling window (see
-    CalibrationWindow).
-
-    Whichever the implementation, this interface gives end-users a way of
-    setting and querying a collection of calibration data that comes from a
-    Monsoon device.
-    """
-
-    def add(self, channel, origin, granularity, value):
-        """Adds a value to the calibration storage.
-
-        The passed in channel, origin, and granularity arguments will be used
-        as a key to handle and store the value passed in.
-
-        Args:
-            channel: The channel this value comes from. See
-                MonsoonConstants.Channel.
-            origin: The origin type for this value. See MonsoonConstants.Origin.
-            granularity: The granularity type for this value. See
-                MonsoonConstants.Granularity.
-            value: The value to set within the collection.
-        """
-        raise NotImplementedError()
-
-    def get_keys(self):
-        """Returns the list of possible keys for obtaining calibration data.
-
-        Not all possible (Channel, Origin, Granularity) combinations may be
-        available for all CalibrationCollections. It is also not guaranteed the
-        CalibrationCollection's key set is static.
-        """
-        raise NotImplementedError()
-
-    def get(self, channel, origin, granularity):
-        """Returns the calibration value for a given key."""
-        raise NotImplementedError()
-
-
-class CalibrationWindows(CalibrationCollection):
-    """A class that holds calibration data in sliding windows.
-
-    After the window size has been filled, a calibration value is removed every
-    time a new calibration value is added.
-    """
-
-    def __init__(self, calibration_window_size=5):
-        """Creates a collection of CalibrationWindows.
-
-        calibration_window_size: The number of entries in the rolling window to
-            consider for calibration.
-        """
-        super().__init__()
-        self._calibrations = dict()
-        self._calibration_window_size = calibration_window_size
-
-    def add(self, channel, origin, granularity, value):
-        """Adds the given value to the given calibration window.
-
-        Args:
-            channel: The channel being calibrated.
-            origin: The origin value being calibrated.
-            granularity: The granularity level being calibrated.
-            value: The calibration value.
-        """
-        window = self._calibrations[(channel, origin, granularity)]
-        if len(window) == self._calibration_window_size:
-            window.popleft()
-        window.append(value)
-
-    def get_keys(self):
-        return self._calibrations.keys()
-
-    def get(self, channel, origin, granularity):
-        window = self._calibrations[(channel, origin, granularity)]
-        if len(window) < self._calibration_window_size:
-            raise CalibrationError('%s is not calibrated yet.' % repr(
-                (channel, origin, granularity)))
-        return sum(window) / self._calibration_window_size
-
-
-class CalibrationScalars(CalibrationCollection):
-    """A collection of calibrations where scalar values are used.
-
-    Reading scalar calibration values are faster than calculating the
-    calibration value from rolling windows.
-    """
-
-    def __init__(self):
-        self._calibrations = dict()
-
-    def get_keys(self):
-        return self._calibrations.keys()
-
-    def add(self, channel, origin, granularity, value):
-        """Adds a value to the calibration storage.
-
-        Note that if a value is already within the collection, it will be
-        overwritten, since CalibrationScalars can only hold a single value.
-
-        Args:
-            channel: The channel being calibrated.
-            origin: The origin value being calibrated.
-            granularity: The granularity level being calibrated.
-            value: The calibration value.
-        """
-        self._calibrations[(channel, origin, granularity)] = value
-
-    def get(self, channel, origin, granularity):
-        return self._calibrations[(channel, origin, granularity)]
-
-
-class CalibrationSnapshot(CalibrationScalars):
-    """A collection of calibrations taken from another CalibrationCollection.
-
-    CalibrationSnapshot calculates all of the calibration values of another
-    CalibrationCollection and creates a snapshot of those values. This allows
-    the CalibrationWindows to continue getting new values while another thread
-    processes the calibration on previously gathered values.
-    """
-
-    def __init__(self, calibration_collection):
-        """Generates a CalibrationSnapshot from another CalibrationCollection.
-
-        Args:
-            calibration_collection: The CalibrationCollection to create a
-                snapshot of.
-        """
-        super().__init__()
-
-        if not isinstance(calibration_collection, CalibrationCollection):
-            raise ValueError('Argument must inherit from '
-                             'CalibrationCollection.')
-
-        for key in calibration_collection.get_keys():
-            try:
-                # key's type is tuple(Channel, Origin, Granularity)
-                value = calibration_collection.get(*key)
-            except CalibrationError as calibration_error:
-                # If uncalibrated, store the CalibrationError and raise when a
-                # user has asked for the value.
-                value = calibration_error
-            self._calibrations[key] = value
-
-    def get(self, channel, origin, granularity):
-        """Returns the calibration value for the given key.
-
-        Raises:
-            CalibrationError if the requested key is not calibrated.
-        """
-        value = self._calibrations[(channel, origin, granularity)]
-        if isinstance(value, CalibrationError):
-            # The user requested an uncalibrated value. Raise that error.
-            raise value
-        return value
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py b/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py
deleted file mode 100644
index 080a69e..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/transformer.py
+++ /dev/null
@@ -1,221 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import DevNullBufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import IndexedBuffer
-
-
-class Transformer(object):
-    """An object that represents how to transform a given buffer into a result.
-
-    Attributes:
-        output_stream: The stream to output data to upon transformation.
-            Defaults to a DevNullBufferStream.
-    """
-
-    def __init__(self):
-        self.output_stream = DevNullBufferStream(None)
-
-    def set_output_stream(self, output_stream):
-        """Sets the Transformer's output stream to the given output stream."""
-        self.output_stream = output_stream
-
-    def transform(self, input_stream):
-        """Transforms input_stream data and passes it to self.output_stream.
-
-        Args:
-            input_stream: The BufferStream of input data this transformer should
-                transform. Note that the type of data stored within BufferStream
-                is not guaranteed to be in the format expected, much like STDIN
-                is not guaranteed to be the format a process expects. However,
-                for performance, users should expect the data to be properly
-                formatted anyway.
-        """
-        input_stream.initialize()
-        self.output_stream.initialize()
-        class_name = self.__class__.__qualname__
-        try:
-            logging.debug('%s transformer beginning.', class_name)
-            self.on_begin()
-            logging.debug('%s transformation started.', class_name)
-            self._transform(input_stream)
-        except Exception:
-            # TODO(markdr): Get multi-process error reporting to play nicer.
-            logging.exception('%s ran into an exception.', class_name)
-            raise
-        finally:
-            logging.debug('%s transformation ended.', class_name)
-            self.on_end()
-            logging.debug('%s finished.', class_name)
-
-    def _transform_buffer(self, buffer):
-        """Transforms a given buffer.
-
-        The implementation can either:
-
-        1) Return the transformed buffer. Can be either in-place or a new
-           buffer.
-
-        2) Return a BufferList: a list of transformed buffers. This is useful
-           for grouping data together for faster operations.
-
-        Args:
-            buffer: The buffer to transform
-
-        Returns:
-            either a buffer or a BufferList. See detailed documentation.
-        """
-        raise NotImplementedError()
-
-    def _on_end_of_stream(self, input_stream):
-        """To be called when the input stream has sent the end of stream signal.
-
-        This is particularly useful for flushing any stored memory into the
-        output stream.
-
-        Args:
-            input_stream: the stream that was closed.
-        """
-        # By default, this function closes the output stream.
-        self.output_stream.end_stream()
-
-    def _transform(self, input_stream):
-        """Should call _transform_buffer within this function."""
-        raise NotImplementedError()
-
-    def on_begin(self):
-        """A function called before the transform loop begins."""
-
-    def on_end(self):
-        """A function called after the transform loop has ended."""
-
-
-class SourceTransformer(Transformer):
-    """The base class for generating data in an AssemblyLine.
-
-    Note that any Transformer will be able to generate data, but this class is
-    a generic way to send data.
-
-    Attributes:
-        _buffer_size: The buffer size for each IndexedBuffer sent over the
-            output stream.
-    """
-
-    def __init__(self):
-        super().__init__()
-        # Defaulted to 64, which is small enough to be passed within the .6ms
-        # window, but large enough so that it does not spam the queue.
-        self._buffer_size = 64
-
-    def _transform(self, _):
-        """Generates data and sends it to the output stream."""
-        buffer_index = 0
-        while True:
-            indexed_buffer = IndexedBuffer(buffer_index, self._buffer_size)
-            buffer = self._transform_buffer(indexed_buffer.buffer)
-            if buffer is BufferStream.END:
-                break
-            indexed_buffer.buffer = buffer
-            self.output_stream.add_indexed_buffer(indexed_buffer)
-            buffer_index += 1
-
-        self.output_stream.end_stream()
-
-    def _transform_buffer(self, buffer):
-        """Fills the passed-in buffer with data."""
-        raise NotImplementedError()
-
-
-class SequentialTransformer(Transformer):
-    """A transformer that receives input in sequential order.
-
-    Attributes:
-        _next_index: The index of the next IndexedBuffer that should be read.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._next_index = 0
-
-    def _transform(self, input_stream):
-        while True:
-            indexed_buffer = input_stream.remove_indexed_buffer()
-            if indexed_buffer is BufferStream.END:
-                break
-            buffer_or_buffers = self._transform_buffer(indexed_buffer.buffer)
-            if buffer_or_buffers is not None:
-                self._send_buffers(buffer_or_buffers)
-
-        self._on_end_of_stream(input_stream)
-
-    def _send_buffers(self, buffer_or_buffer_list):
-        """Sends buffers over to the output_stream.
-
-        Args:
-            buffer_or_buffer_list: A BufferList or buffer object. Note that if
-                buffer is None, it is effectively an end-of-stream signal.
-        """
-        if not isinstance(buffer_or_buffer_list, BufferList):
-            # Assume a single buffer was returned
-            buffer_or_buffer_list = BufferList([buffer_or_buffer_list])
-
-        buffer_list = buffer_or_buffer_list
-        for buffer in buffer_list:
-            new_buffer = IndexedBuffer(self._next_index, buffer)
-            self.output_stream.add_indexed_buffer(new_buffer)
-            self._next_index += 1
-
-    def _transform_buffer(self, buffer):
-        raise NotImplementedError()
-
-
-class ParallelTransformer(Transformer):
-    """A Transformer that is capable of running in parallel.
-
-    Buffers received may be unordered. For ordered input, use
-    SequentialTransformer.
-    """
-
-    def _transform(self, input_stream):
-        while True:
-            indexed_buffer = input_stream.remove_indexed_buffer()
-            if indexed_buffer is None:
-                break
-            buffer = self._transform_buffer(indexed_buffer.buffer)
-            indexed_buffer.buffer = buffer
-            self.output_stream.add_indexed_buffer(indexed_buffer)
-
-        self._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Transforms a given buffer.
-
-        Note that ParallelTransformers can NOT return a BufferList. This is a
-        limitation with the current indexing system. If the input buffer is
-        replaced with multiple buffers, later transformers will not know what
-        the proper order of buffers is.
-
-        Args:
-            buffer: The buffer to transform
-
-        Returns:
-            either None or a buffer. See detailed documentation.
-        """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py b/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py
deleted file mode 100644
index a9bdee1..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/engine/transformers.py
+++ /dev/null
@@ -1,246 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import numpy as np
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-
-
-class Tee(SequentialTransformer):
-    """Outputs main_current values to the specified file.
-
-    Attributes:
-        _filename: the name of the file to open.
-        _fd: the filestream written to.
-    """
-
-    def __init__(self, filename, measure_after_seconds=0):
-        """Creates an OutputStream.
-
-        Args:
-            filename: the path to the file to write the collected data to.
-            measure_after_seconds: the number of seconds to skip before
-                logging data as part of the measurement.
-        """
-        super().__init__()
-        self._filename = filename
-        self._fd = None
-        self.measure_after_seconds = measure_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def on_begin(self):
-        self._fd = open(self._filename, 'w+')
-
-    def on_end(self):
-        self._fd.close()
-
-    def _transform_buffer(self, buffer):
-        """Writes the reading values to a file.
-
-        Args:
-            buffer: A list of HvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if (sample.sample_time - self._start_time <
-                    self.measure_after_seconds):
-                continue
-            self._fd.write('%0.9f %.12f\n' %
-                           (sample.sample_time, sample.main_current))
-        self._fd.flush()
-        return BufferList([buffer])
-
-
-class PerfgateTee(SequentialTransformer):
-    """Outputs records of nanoseconds,current,voltage to the specified file.
-
-    Similar to Tee, but this version includes voltage, which may help with
-    accuracy in the power calculations.
-
-    This output type can be enabled by passing this transformer to the
-    transformers kwarg in Monsoon.measure_power():
-
-    # Uses the default Tee
-    > monsoon.measure_power(..., output_path=filename])
-
-    # Uses PerfgateTee
-    > monsoon.measure_power(..., transformers=[PerfgateTee(filename)])
-
-    Attributes:
-        _filename: the name of the file to open.
-        _fd: the filestream written to.
-    """
-
-    def __init__(self, filename, measure_after_seconds=0):
-        """Creates an OutputStream.
-
-        Args:
-            filename: the path to the file to write the collected data to.
-            measure_after_seconds: the number of seconds to skip before logging
-              data as part of the measurement.
-        """
-        super().__init__()
-        self._filename = filename
-        self._fd = None
-        self.measure_after_seconds = measure_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def on_begin(self):
-        self._fd = open(self._filename, 'w+')
-
-    def on_end(self):
-        self._fd.close()
-
-    def _transform_buffer(self, buffer):
-        """Writes the reading values to a file.
-
-            Args:
-                buffer: A list of HvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if (sample.sample_time - self._start_time <
-                    self.measure_after_seconds):
-                continue
-            self._fd.write(
-                '%i,%.6f,%.6f\n' %
-                (sample.sample_time * 1e9, sample.main_current,
-                 sample.main_voltage))
-        self._fd.flush()
-        return BufferList([buffer])
-
-
-class SampleAggregator(ParallelTransformer):
-    """Aggregates the main current value and the number of samples gathered."""
-
-    def __init__(self, start_after_seconds=0):
-        """Creates a new SampleAggregator.
-
-        Args:
-            start_after_seconds: The number of seconds to wait before gathering
-                data. Useful for allowing the device to settle after USB
-                disconnect.
-        """
-        super().__init__()
-        self._num_samples = 0
-        self._sum_currents = 0
-        self.start_after_seconds = start_after_seconds
-        # The time of the first sample gathered.
-        self._start_time = None
-
-    def _transform_buffer(self, buffer):
-        """Aggregates the sample data.
-
-        Args:
-            buffer: A buffer of H/LvpmReadings.
-        """
-        for sample in buffer:
-            if self._start_time is None:
-                self._start_time = sample.sample_time
-            if sample.sample_time - self._start_time < self.start_after_seconds:
-                continue
-            self._num_samples += 1
-            self._sum_currents += sample.main_current
-        return buffer
-
-    @property
-    def num_samples(self):
-        """The number of samples read from the device."""
-        return self._num_samples
-
-    @property
-    def sum_currents(self):
-        """The total sum of current values gathered so far."""
-        return self._sum_currents
-
-
-class DownSampler(SequentialTransformer):
-    """Takes in sample outputs and returns a downsampled version of that data.
-
-    Note for speed, the downsampling must occur at a perfect integer divisor of
-    the Monsoon's sample rate (5000 hz).
-    """
-    _MONSOON_SAMPLE_RATE = 5000
-
-    def __init__(self, downsample_factor):
-        """Creates a DownSampler Transformer.
-
-        Args:
-            downsample_factor: The number of samples averaged together for a
-                single output sample.
-        """
-        super().__init__()
-
-        self._mean_width = int(downsample_factor)
-        self._leftovers = []
-
-    def _transform_buffer(self, buffer):
-        """Returns the buffer downsampled by an integer factor.
-
-        The algorithm splits data points into three categories:
-
-            tail: The remaining samples where not enough were collected to
-                  reach the integer factor for downsampling. The tail is stored
-                  in self._leftovers between _transform_buffer calls.
-            tailless_buffer: The samples excluding the tail that can be
-                             downsampled directly.
-
-        Below is a diagram explaining the buffer math:
-
-        input:          input buffer n              input buffer n + 1
-                 ╔══════════════════════════╗  ╔══════════════════════════╗
-             ... ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗ ║  ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗ ║ ...
-                 ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝ ║  ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝ ║
-                 ╚══════════════════════════╝  ╚══════════════════════════╝
-                               ▼                             ▼
-        alg:     ╔═════════════════════╦════╗  ╔═════════════════════╦════╗
-                 ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗║╔╗╔╗║  ║ ╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗╔╗║╔╗╔╗║
-                 ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝║╚╝╚╝║  ║ ╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝╚╝║╚╝╚╝║
-             ... ║   tailless_buffer   ║tail║  ║   tailless_buffer   ║tail║ ...
-                 ╚═════════════════════╩════╝  ╚═════════════════════╩════╝
-               ──┬───┘ └─┬─┘ ...  └─┬─┘ └────┬─────┘ └─┬─┘ ...  └─┬─┘ └──┬───
-                 ╔╗      ╔╗ ╔╗  ╔╗ ╔╗        ╔╗        ╔╗ ╔╗  ╔╗ ╔╗      ╔╗
-                 ╚╝      ╚╝ ╚╝  ╚╝ ╚╝        ╚╝        ╚╝ ╚╝  ╚╝ ╚╝      ╚╝
-                 └─────────┬────────┘        └──────────┬─────────┘
-                           ▼                            ▼
-        output:   ╔════════════════╗           ╔════════════════╗
-                  ║ ╔╗ ╔╗ ╔╗ ╔╗ ╔╗ ║           ║ ╔╗ ╔╗ ╔╗ ╔╗ ╔╗ ║
-                  ║ ╚╝ ╚╝ ╚╝ ╚╝ ╚╝ ║           ║ ╚╝ ╚╝ ╚╝ ╚╝ ╚╝ ║
-                  ╚════════════════╝           ╚════════════════╝
-                   output buffer n             output buffer n + 1
-        """
-        tail_length = int(
-            (len(buffer) + len(self._leftovers)) % self._mean_width)
-
-        tailless_buffer = np.array(buffer[:len(buffer) - tail_length])
-
-        sample_count = len(tailless_buffer) + len(self._leftovers)
-
-        downsampled_values = np.mean(
-            np.resize(
-                np.append(self._leftovers, tailless_buffer),
-                (sample_count // self._mean_width, self._mean_width)),
-            axis=1)
-
-        self._leftovers = buffer[len(buffer) - tail_length:]
-
-        return downsampled_values
diff --git a/src/antlion/controllers/monsoon_lib/sampling/enums.py b/src/antlion/controllers/monsoon_lib/sampling/enums.py
deleted file mode 100644
index 5fc30c9..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/enums.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class Origin:
-    """The origin types of a given measurement or calibration.
-
-    The Monsoon returns calibration packets for three types of origin:
-
-        ZERO: The calibrated zeroing point.
-        REFERENCE: The reference point used for the returned samples.
-        SCALE: The factor at which to scale the returned samples to get power
-               consumption data.
-    """
-    ZERO = 0
-    REFERENCE = 1
-    SCALE = 2
-
-    values = [ZERO, REFERENCE, SCALE]
-
-
-class Granularity:
-    """The granularity types.
-
-    Monsoon leverages two different granularities when returning power
-    measurements. If the power usage exceeds the threshold of the fine
-    measurement region, a coarse measurement will be used instead.
-
-    This also means that there need to be two calibration values: one for coarse
-    and one for fine.
-    """
-    COARSE = 0
-    FINE = 1
-
-    values = [COARSE, FINE]
-
-
-class Reading:
-    """The extraneous possible reading types.
-
-    Aside from coarse and fine readings (see Granularity), some Monsoons can
-    gather readings on the voltage and gain control.
-    """
-    VOLTAGE = 0x4
-    GAIN = 0x6
-
-    values = [VOLTAGE, GAIN]
-
-
-class Channel:
-    """The possible channel types.
-
-    Monsoons can read power measurements from the following three inputs.
-    Calibration and reading values may also be available on these channels.
-    """
-    MAIN = 0
-    USB = 1
-    AUX = 2
-
-    values = [MAIN, USB, AUX]
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py
deleted file mode 100644
index d07d404..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/calibrations.py
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-from collections import deque
-
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationScalars
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationWindows
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import SampleType
-
-
-class HvpmCalibrationData(CalibrationWindows):
-    """An object that holds the Dynamic Calibration values for HVPM Sampling."""
-
-    def __init__(self, calibration_window_size=5):
-        super().__init__(calibration_window_size)
-
-        all_variable_sets = [
-            Channel.values,
-            (Origin.REFERENCE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            self._calibrations[key] = deque()
-
-    def add_calibration_sample(self, sample):
-        """Adds calibration values from a calibration sample.
-
-        The packet is formatted the following way:
-            [0]: MAIN, COARSE
-            [1]: MAIN, FINE
-            [2]: USB,  COARSE
-            [3]: USB,  FINE
-            [4]: AUX,  COARSE
-            [5]: AUX,  FINE
-            [...]: ?
-            [8]: 0x10 == Origin.ZERO
-                 0x30 == Origin.REFERENCE
-        """
-        sample_type = sample.get_sample_type()
-        if sample_type == SampleType.ZERO_CAL:
-            origin = Origin.ZERO
-        elif sample_type == SampleType.REF_CAL:
-            origin = Origin.REFERENCE
-        else:
-            raise ValueError(
-                'Packet of type %s is not a calibration packet.' % sample_type)
-
-        for i in range(6):
-            # Reads the last bit to get the Granularity value.
-            granularity = i & 0x01
-            # Divides by 2 to get the Channel value.
-            channel = i >> 1
-            self.add(channel, origin, granularity,
-                     sample[channel, granularity])
-
-
-class HvpmCalibrationConstants(CalibrationScalars):
-    """Tracks the calibration values gathered from the Monsoon status packet."""
-
-    def __init__(self, monsoon_status_packet):
-        """Initializes the calibration constants."""
-        super().__init__()
-
-        # Invalid combinations:
-        #   *,   REFERENCE, *
-        #   AUX, ZERO,      *
-        all_variable_sets = [
-            Channel.values,
-            (Origin.SCALE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            if key[0] == Channel.AUX and key[1] == Origin.ZERO:
-                # Monsoon status packets do not contain AUX, ZERO readings.
-                # Monsoon defaults these values to 0:
-                self._calibrations[key] = 0
-            else:
-                self._calibrations[key] = getattr(
-                    monsoon_status_packet,
-                    build_status_packet_attribute_name(*key))
-
-
-# TODO(markdr): Potentially find a better home for this function.
-def build_status_packet_attribute_name(channel, origin, granularity):
-    """Creates the status packet attribute name from the given keys.
-
-    The HVPM Monsoon status packet returns values in the following format:
-
-        <channel><Granularity><Origin>
-
-    Note that the following combinations are invalid:
-        <channel><Granularity>Reference
-        aux<Granularity>ZeroOffset
-
-    Args:
-        channel: the Channel value of the attribute
-        origin: the Origin value of the attribute
-        granularity: the Granularity value of the attribute
-
-    Returns:
-        A string that corresponds to the attribute of the Monsoon status packet.
-    """
-    if channel == Channel.MAIN:
-        channel = 'main'
-    elif channel == Channel.USB:
-        channel = 'usb'
-    elif channel == Channel.AUX:
-        channel = 'aux'
-    else:
-        raise ValueError('Unknown channel "%s".' % channel)
-
-    if granularity == Granularity.COARSE:
-        granularity = 'Coarse'
-    elif granularity == Granularity.FINE:
-        granularity = 'Fine'
-    else:
-        raise ValueError('Invalid granularity "%s"' % granularity)
-
-    if origin == Origin.SCALE:
-        origin = 'Scale'
-    elif origin == Origin.ZERO:
-        origin = 'ZeroOffset'
-    else:
-        # Note: Origin.REFERENCE is not valid for monsoon_status_packet
-        # attribute names.
-        raise ValueError('Invalid origin "%s"' % origin)
-
-    return '%s%s%s' % (channel, granularity, origin)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py
deleted file mode 100644
index 8951400..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/packet.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import struct
-
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-
-
-class SampleType:
-    """An enum-like class that defines the SampleTypes for LVPM data.
-
-    Note that these values differ from the LVPM values.
-    """
-
-    # A measurement sample.
-    MEASUREMENT = 0x00
-
-    # A zero calibration sample.
-    ZERO_CAL = 0x10
-
-    # A reference calibration sample.
-    REF_CAL = 0x30
-
-    @staticmethod
-    def is_calibration(value):
-        """Returns true iff the SampleType is a type of calibration."""
-        return bool(value & 0x10)
-
-
-class HvpmMeasurement(object):
-    """An object that represents a single measurement from the HVPM device.
-
-    Attributes:
-        _sample_time: The time the sample was taken.
-        values: From the Monsoon API doc, the values are as follows:
-
-    Val │  Byte  │  Type  | Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │    0   │ uint16 │  Main   │ Coarse  │ Calibration/Measurement value
-     1  │    2   │ uint16 │  Main   │ Fine    │ Calibration/Measurement value
-     2  │    4   │ uint16 │  USB    │ Coarse  │ Calibration/Measurement value
-     3  │    6   │ uint16 │  USB    │ Fine    │ Calibration/Measurement value
-     4  │    8   │ uint16 │  Aux    │ Coarse  │ Calibration/Measurement value
-     5  │   10   │ uint16 │  Aux    │ Fine    │ Calibration/Measurement value
-     6  │   12   │ uint16 │  Main   │ Voltage │ Main V measurement, or Aux V
-        │        │        │         │         │    if setVoltageChannel == 1
-     7  │   14   │ uint16 │  USB    │ Voltage │ USB Voltage
-    ╔══════════════════════════════════════════════════════════════════════╗
-    ║ Note: The Monsoon API Doc puts the below values in the wrong order.  ║
-    ║       The values in this docstring are in the correct order.         ║
-    ╚══════════════════════════════════════════════════════════════════════╝
-     8  │   16   │ uint8? │  USB    │ Gain    │ Measurement gain control.
-        │        │        │         │         │  * Structure Unknown. May be
-        │        │        │         │         │    similar to Main Gain.
-     9  │   17   │ uint8  │  Main   │ Gain    │ Measurement gain control.
-        │        │        │         │         │  * b0-3: Believed to be gain.
-        │        │        │         │         │  * b4-5: SampleType.
-        │        │        │         │         │  * b6-7: Unknown.
-
-    """
-
-    # The total number of bytes in a measurement. See the table above.
-    SIZE = 18
-
-    def __init__(self, raw_data, sample_time):
-        self.values = struct.unpack('>8H2B', raw_data)
-        self._sample_time = sample_time
-
-    def __getitem__(self, channel_and_reading_granularity):
-        """Returns the requested reading for the given channel.
-
-        See HvpmMeasurement.__doc__ for a reference table.
-
-        Args:
-            channel_and_reading_granularity: A tuple of (channel,
-                reading_or_granularity).
-        """
-        channel = channel_and_reading_granularity[0]
-        reading_or_granularity = channel_and_reading_granularity[1]
-
-        data_index = self.get_index(channel, reading_or_granularity)
-
-        if reading_or_granularity == Reading.GAIN:
-            # The format of this value is undocumented by Monsoon Inc.
-            # Assume an unsigned 4-bit integer is used.
-            return self.values[data_index] & 0x0F
-        return self.values[data_index]
-
-    @staticmethod
-    def get_index(channel, reading_or_granularity):
-        """Returns the values array index that corresponds with the given query.
-
-        See HvpmMeasurement.__doc__ for details on how this is determined.
-
-        Args:
-            channel: The channel to read data from.
-            reading_or_granularity: The reading or granularity desired.
-
-        Returns:
-            An index corresponding to the data's location in self.values
-        """
-        if reading_or_granularity == Reading.VOLTAGE:
-            return 6 + channel
-        if reading_or_granularity == Reading.GAIN:
-            return 9 - channel
-        # reading_or_granularity is a granularity value.
-        return channel * 2 + reading_or_granularity
-
-    def get_sample_time(self):
-        """Returns the calculated time for the given sample."""
-        return self._sample_time
-
-    def get_sample_type(self):
-        """Returns a value contained in SampleType."""
-        return self.values[9] & 0x30
-
-
-class Packet(object):
-    """A packet collected directly from serial.read() during sample collection.
-
-    Large amounts of documentation here are pulled directly from
-    http://msoon.github.io/powermonitor/Python_Implementation/docs/API.pdf
-
-    For convenience, here is the table of values stored:
-
-    Offset │ Format │ Field            │ Description
-    ───────┼────────┼──────────────────┼────────────────────────────────────────
-       0   │ uint16 │ dropped_count    │ Number of dropped packets
-       2   │  bits  │ flags            │ Flag values. see self.flags property
-       3   │ uint8  │ num_measurements │ Number of measurements in this packet
-       4   │ byte[] │ measurement[0]   │ Measurement. See HvpmMeasurement class
-      22   │ byte[] │ measurement[1]   │ Optional Measurement. See above
-      44   │ byte[] │ measurement[2]   │ Optional Measurement. See above
-
-    Note that all of values except dropped_count are stored in big-endian
-    format.
-
-    Attributes:
-        _packet_data: The raw data received from the packet.
-        time_of_read: The unix timestamp this packet was collected at.
-        time_since_last_sample: The differential between this packet's
-            time_of_read and the previous packet's.
-    """
-
-    FIRST_MEASUREMENT_OFFSET = 8
-
-    # The maximum size of a packet read from USB.
-    # Note: each HVPM Packet can hold a maximum of 3 measurements.
-    MAX_PACKET_SIZE = FIRST_MEASUREMENT_OFFSET + HvpmMeasurement.SIZE * 3
-
-    def __init__(self, sampled_bytes):
-        self._packet_data = sampled_bytes
-
-        num_data_bytes = (len(sampled_bytes) - Packet.FIRST_MEASUREMENT_OFFSET)
-        self.num_measurements = num_data_bytes // HvpmMeasurement.SIZE
-
-        struct_string = (
-            '<2dhBx' +
-            (str(HvpmMeasurement.SIZE) + 's') * self.num_measurements)
-
-        # yapf: disable. Yapf forces these to try to fit one after the other.
-        (self.time_of_read,
-         self.time_since_last_sample,
-         self.dropped_count,
-         self.flags,
-         *samples) = struct.unpack(struct_string, sampled_bytes)
-        # yapf: enable
-
-        self.measurements = [None] * self.num_measurements
-
-        for i, raw_data in enumerate(samples):
-            self.measurements[i] = HvpmMeasurement(raw_data,
-                                                   self._get_sample_time(i))
-
-    def _get_sample_time(self, index):
-        """Returns the time the sample at the given index was received.
-
-        If multiple samples were captured within the same reading, the samples
-        are assumed to be uniformly distributed during the time it took to
-        sample the values.
-        """
-        time_per_sample = self.time_since_last_sample / self.num_measurements
-        return time_per_sample * (index + 1) + self.time_of_read
-
-    @property
-    def packet_counter(self):
-        """The 4-bit packet index."""
-        return self.flags & 0x0F
-
-    def get_bytes(self):
-        return list(self._packet_data)
-
-    def __getitem__(self, index):
-        return self.measurements[index]
-
-    def __len__(self):
-        return self.num_measurements
diff --git a/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py b/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py
deleted file mode 100644
index 775c309..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/hvpm/transformers.py
+++ /dev/null
@@ -1,476 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import array
-import logging
-import struct
-import time
-
-import numpy as np
-from Monsoon import HVPM
-
-from antlion.controllers.monsoon_lib.sampling.common import UncalibratedSampleChunk
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ProcessAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationError
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SourceTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import Transformer
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-from antlion.controllers.monsoon_lib.sampling.hvpm.calibrations import HvpmCalibrationConstants
-from antlion.controllers.monsoon_lib.sampling.hvpm.calibrations import HvpmCalibrationData
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import HvpmMeasurement
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import Packet
-from antlion.controllers.monsoon_lib.sampling.hvpm.packet import SampleType
-
-
-class HvpmTransformer(Transformer):
-    """Gathers samples from the Monsoon and brings them back to the caller."""
-
-    def __init__(self, monsoon_serial, duration):
-        super().__init__()
-        self.monsoon_serial = monsoon_serial
-        self.duration = duration
-
-    def _transform(self, input_stream):
-        # We need to gather the status packet before sampling so we can use the
-        # static calibration during sample normalization.
-        monsoon = HVPM.Monsoon()
-        monsoon.setup_usb(self.monsoon_serial)
-        monsoon.fillStatusPacket()
-        monsoon_status_packet = monsoon.statusPacket()
-        monsoon.closeDevice()
-
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ProcessAssemblyLineBuilder()
-         .source(PacketCollector(self.monsoon_serial, self.duration))
-         .into(SampleNormalizer(monsoon_status_packet=monsoon_status_packet))
-         .build(output_stream=self.output_stream).run())
-        # yapf: enable
-
-
-class PacketCollector(SourceTransformer):
-    """Collects Monsoon packets into a buffer to be sent to another transformer.
-
-    Ideally, the other transformer will be in a separate process to prevent the
-    GIL from slowing down packet collection.
-
-    Attributes:
-        _monsoon_id: The id of the monsoon.
-        _monsoon: The monsoon instance. This is left unset until
-                  _initialize_monsoon() is called.
-    """
-
-    def __init__(self, monsoon_id, sampling_duration=None):
-        super().__init__()
-        self._monsoon_id = monsoon_id
-        self._monsoon = None
-        self.start_time = None
-        self.array = array.array('B', b'\x00' * Packet.MAX_PACKET_SIZE)
-        self.sampling_duration = sampling_duration
-
-    def _initialize_monsoon(self):
-        """Initializes the monsoon object.
-
-        Note that this must be done after the Transformer has started.
-        Otherwise, this transformer will have c-like objects, preventing
-        the transformer from being used with the multiprocess libraries.
-        """
-        self._monsoon = HVPM.Monsoon()
-        self._monsoon.setup_usb(self._monsoon_id)
-        self._monsoon.stopSampling()
-        self._monsoon.fillStatusPacket()
-        self._monsoon.StartSampling()
-
-    def on_begin(self):
-        if __debug__:
-            logging.warning(
-                'Debug mode is enabled. Expect a higher frequency of dropped '
-                'packets. To reduce packet drop, disable your python debugger.'
-            )
-
-        self.start_time = time.time()
-        self._initialize_monsoon()
-
-    def __del__(self):
-        if self._monsoon:
-            self.on_end()
-
-    def on_end(self):
-        self._monsoon.stopSampling()
-        self._monsoon.closeDevice()
-
-    def _transform_buffer(self, buffer):
-        """Fills the buffer with packets until time has been reached.
-
-        Returns:
-            A BufferList of a single buffer if collection is not yet finished.
-            None if sampling is complete.
-        """
-        if (self.sampling_duration
-                and self.sampling_duration < time.time() - self.start_time):
-            return None
-
-        for index in range(len(buffer)):
-            time_before_read = time.time()
-            try:
-                data = self._monsoon.Protocol.DEVICE.read(
-                    # Magic value for USB bulk reads.
-                    0x81,
-                    Packet.MAX_PACKET_SIZE,
-                    # In milliseconds.
-                    timeout=1000)
-            except Exception as e:
-                logging.warning(e)
-                continue
-            time_after_read = time.time()
-            time_data = struct.pack('dd', time_after_read,
-                                    time_after_read - time_before_read)
-            buffer[index] = time_data + data.tobytes()
-
-        return buffer
-
-
-class SampleNormalizer(Transformer):
-    """A Transformer that applies calibration to the input's packets."""
-
-    def __init__(self, monsoon_status_packet):
-        """Creates a SampleNormalizer.
-
-        Args:
-            monsoon_status_packet: The status of the monsoon. Used for gathering
-                the constant calibration data from the device.
-        """
-        super().__init__()
-        self.monsoon_status_packet = monsoon_status_packet
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ThreadAssemblyLineBuilder()
-         .source(PacketReader(), input_stream=input_stream)
-         .into(SampleChunker())
-         .into(CalibrationApplier(self.monsoon_status_packet))
-         .build(output_stream=self.output_stream).run())
-        # yapf: enable
-
-
-class PacketReader(ParallelTransformer):
-    """Reads raw HVPM Monsoon data and converts it into Packet objects.
-
-    Attributes:
-        rollover_count: The number of times the dropped_count value has rolled
-            over it's maximum value (2^16-1).
-        previous_dropped_count: The dropped count read from the last packet.
-            Used for determining the true number of dropped samples.
-        start_time: The time of the first packet ever read.
-    """
-    """The number of seconds before considering dropped_count to be meaningful.
-
-    Monsoon devices will often report 2^16-1 as the dropped count when first
-    starting the monsoon. This usually goes away within a few milliseconds.
-    """
-    DROP_COUNT_TIMER_THRESHOLD = 1
-
-    def __init__(self):
-        super().__init__()
-        self.rollover_count = 0
-        self.previous_dropped_count = 0
-        self.start_time = 0
-
-    def _transform_buffer(self, buffer):
-        """Reads raw sample data and converts it into packet objects."""
-
-        for i in range(len(buffer)):
-            buffer[i] = Packet(buffer[i])
-
-            if buffer and not self.start_time and i == 0:
-                self.start_time = buffer[0].time_of_read
-
-            if (buffer[i].time_of_read - self.start_time >
-                    PacketReader.DROP_COUNT_TIMER_THRESHOLD):
-                self._process_dropped_count(buffer[i])
-
-        return buffer
-
-    def _process_dropped_count(self, packet):
-        """Processes the dropped count value, updating the internal counters."""
-        if packet.dropped_count == self.previous_dropped_count:
-            return
-
-        if packet.dropped_count < self.previous_dropped_count:
-            self.rollover_count += 1
-
-        self.previous_dropped_count = packet.dropped_count
-        log_function = logging.info if __debug__ else logging.warning
-        log_function('At %9f, total dropped count: %s' %
-                     (packet.time_of_read, self.total_dropped_count))
-
-    @property
-    def total_dropped_count(self):
-        """Returns the total dropped count, accounting for rollovers."""
-        return self.rollover_count * 2**16 + self.previous_dropped_count
-
-    def on_begin(self):
-        if __debug__:
-            logging.info(
-                'The python debugger is enabled. Expect results to '
-                'take longer to process after collection is complete.')
-
-    def on_end(self):
-        if self.previous_dropped_count > 0:
-            if __debug__:
-                logging.info(
-                    'During collection, a total of %d packets were '
-                    'dropped. To reduce this amount, run your test '
-                    'without debug mode enabled.' % self.total_dropped_count)
-            else:
-                logging.warning(
-                    'During collection, a total of %d packets were '
-                    'dropped.' % self.total_dropped_count)
-
-
-class SampleChunker(SequentialTransformer):
-    """Chunks input packets into lists of samples with identical calibration.
-
-    This step helps to quickly apply calibration across many samples at once.
-
-    Attributes:
-        _stored_raw_samples: The queue of raw samples that have yet to be
-            split into a new calibration group.
-        calibration_data: The calibration window information.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._stored_raw_samples = []
-        self.calibration_data = HvpmCalibrationData()
-
-    def _on_end_of_stream(self, input_stream):
-        self._send_buffers(BufferList([self._cut_new_buffer()]))
-        super()._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Takes in data from the buffer and splits it based on calibration.
-
-        This transformer is meant to after the PacketReader.
-
-        Args:
-            buffer: A list of Packet objects.
-
-        Returns:
-            A BufferList containing 0 or more UncalibratedSampleChunk objects.
-        """
-        buffer_list = BufferList()
-        for packet in buffer:
-            for sample in packet:
-                sample_type = sample.get_sample_type()
-
-                if sample_type == SampleType.MEASUREMENT:
-                    self._stored_raw_samples.append(sample)
-                elif SampleType.is_calibration(sample_type):
-                    if len(self._stored_raw_samples) > 0:
-                        buffer_list.append(self._cut_new_buffer())
-                    self.calibration_data.add_calibration_sample(sample)
-                else:
-                    # There's no information on what this packet means within
-                    # the documentation or code Monsoon Inc. provides.
-                    logging.warning('Received unidentifiable packet with '
-                                    'SampleType %s: %s' % (sample_type,
-                                                           packet.get_bytes()))
-        return buffer_list
-
-    def _cut_new_buffer(self):
-        """Cuts a new buffer from the input stream data.
-
-        Returns:
-            The newly generated UncalibratedSampleChunk.
-        """
-        calibration_snapshot = CalibrationSnapshot(self.calibration_data)
-        new_chunk = UncalibratedSampleChunk(self._stored_raw_samples,
-                                            calibration_snapshot)
-        # Do not clear the list. Instead, create a new one so the old list can
-        # be owned solely by the UncalibratedSampleChunk.
-        self._stored_raw_samples = []
-        return new_chunk
-
-
-class HvpmReading(object):
-    """The result of fully calibrating a sample. Contains all Monsoon readings.
-
-    Attributes:
-        _reading_list: The list of values obtained from the Monsoon.
-        _time_of_reading: The time since sampling began that the reading was
-            collected at.
-    """
-
-    def __init__(self, reading_list, time_of_reading):
-        """
-        Args:
-            reading_list: A list of reading values in the order of:
-                [0] Main Current
-                [1] USB Current
-                [2] Aux Current
-                [3] Main Voltage
-                [4] USB Voltage
-            time_of_reading: The time the reading was received.
-        """
-        self._reading_list = reading_list
-        self._time_of_reading = time_of_reading
-
-    @property
-    def main_current(self):
-        return self._reading_list[0]
-
-    @property
-    def usb_current(self):
-        return self._reading_list[1]
-
-    @property
-    def aux_current(self):
-        return self._reading_list[2]
-
-    @property
-    def main_voltage(self):
-        return self._reading_list[3]
-
-    @property
-    def usb_voltage(self):
-        return self._reading_list[4]
-
-    @property
-    def sample_time(self):
-        return self._time_of_reading
-
-    def __add__(self, other):
-        return HvpmReading([
-            self.main_current + other.main_current,
-            self.usb_current + other.usb_current,
-            self.aux_current + other.aux_current,
-            self.main_voltage + other.main_voltage,
-            self.usb_voltage + other.usb_voltage,
-        ], self.sample_time + other.sample_time)
-
-    def __truediv__(self, other):
-        return HvpmReading([
-            self.main_current / other,
-            self.usb_current / other,
-            self.aux_current / other,
-            self.main_voltage / other,
-            self.usb_voltage / other,
-        ], self.sample_time / other)
-
-
-class CalibrationApplier(ParallelTransformer):
-    """Applies the calibration formula to the all given samples."""
-
-    def __init__(self, monsoon_status_packet):
-        super().__init__()
-        self.cal_constants = HvpmCalibrationConstants(monsoon_status_packet)
-        monsoon = HVPM.Monsoon()
-        self.fine_threshold = monsoon.fineThreshold
-        self._main_voltage_scale = monsoon.mainvoltageScale
-        self._usb_voltage_scale = monsoon.usbVoltageScale
-        # According to Monsoon.sampleEngine.__ADCRatio, each tick of the ADC
-        # represents this much voltage
-        self._adc_ratio = 6.25e-5
-
-    @staticmethod
-    def _is_device_calibrated(data):
-        """Checks to see if the Monsoon has completed calibration.
-
-        Args:
-            data: the calibration data.
-
-        Returns:
-            True if the data is calibrated. False otherwise.
-        """
-        try:
-            # If the data is calibrated for any Origin.REFERENCE value, it is
-            # calibrated for all Origin.REFERENCE values. The same is true for
-            # Origin.ZERO.
-            data.get(Channel.MAIN, Origin.REFERENCE, Granularity.COARSE)
-            data.get(Channel.MAIN, Origin.ZERO, Granularity.COARSE)
-        except CalibrationError:
-            return False
-        return True
-
-    def _transform_buffer(self, buffer):
-        """Transforms the buffer's information into HvpmReadings.
-
-        Args:
-            buffer: An UncalibratedSampleChunk. This buffer is in-place
-                transformed into a buffer of HvpmReadings.
-        """
-        calibration_data = buffer.calibration_data
-
-        if not self._is_device_calibrated(calibration_data):
-            buffer.samples.clear()
-            return buffer.samples
-
-        readings = np.zeros((len(buffer.samples), 5))
-
-        measurements = np.array([sample.values for sample in buffer.samples])
-        calibrated_value = np.zeros((len(buffer.samples), 2))
-
-        for channel in Channel.values:
-            for granularity in Granularity.values:
-                scale = self.cal_constants.get(channel, Origin.SCALE,
-                                               granularity)
-                zero_offset = self.cal_constants.get(channel, Origin.ZERO,
-                                                     granularity)
-                cal_ref = calibration_data.get(channel, Origin.REFERENCE,
-                                               granularity)
-                cal_zero = calibration_data.get(channel, Origin.ZERO,
-                                                granularity)
-                zero_offset += cal_zero
-                if cal_ref - zero_offset != 0:
-                    slope = scale / (cal_ref - zero_offset)
-                else:
-                    slope = 0
-                if granularity == Granularity.FINE:
-                    slope /= 1000
-
-                index = HvpmMeasurement.get_index(channel, granularity)
-                calibrated_value[:, granularity] = slope * (
-                    measurements[:, index] - zero_offset)
-
-            fine_data_position = HvpmMeasurement.get_index(
-                channel, Granularity.FINE)
-            readings[:, channel] = np.where(
-                measurements[:, fine_data_position] < self.fine_threshold,
-                calibrated_value[:, Granularity.FINE],
-                calibrated_value[:, Granularity.COARSE]) / 1000.0  # to mA
-
-        main_voltage_index = HvpmMeasurement.get_index(Channel.MAIN,
-                                                       Reading.VOLTAGE)
-        usb_voltage_index = HvpmMeasurement.get_index(Channel.USB,
-                                                      Reading.VOLTAGE)
-        readings[:, 3] = (measurements[:, main_voltage_index] * self._adc_ratio
-                          * self._main_voltage_scale)
-        readings[:, 4] = (measurements[:, usb_voltage_index] * self._adc_ratio
-                          * self._usb_voltage_scale)
-
-        for i in range(len(buffer.samples)):
-            buffer.samples[i] = HvpmReading(
-                list(readings[i]), buffer.samples[i].get_sample_time())
-
-        return buffer.samples
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py
deleted file mode 100644
index d9f5fdb..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/calibrations.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Note: These calibration classes are based on the original reverse-engineered
-algorithm for handling calibration values. As a result, LvpmCalibrationConstants
-does not exist for the LVPM stock sampling algorithm."""
-
-import itertools
-from collections import deque
-
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationWindows
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import SampleType
-
-# The numerator used for FINE granularity calibration.
-_FINE_NUMERATOR = .0332
-
-# The numerator used for COARSE granularity calibration
-_COARSE_NUMERATOR = 2.88
-
-
-class LvpmCalibrationData(CalibrationWindows):
-    """An object that holds the Dynamic Calibration values for HVPM Sampling."""
-
-    def __init__(self, calibration_window_size=5):
-        super().__init__(calibration_window_size)
-
-        all_variable_sets = [
-            Channel.values,
-            (Origin.REFERENCE, Origin.ZERO),
-            Granularity.values
-        ]  # yapf: disable
-
-        for key in itertools.product(*all_variable_sets):
-            self._calibrations[key] = deque()
-
-    def add_calibration_sample(self, sample):
-        """Adds calibration values from a calibration sample.
-
-        LVPM Calibration Data is stored as:
-            [0]: Main Current calibration
-            [1]: USB Current calibration
-            [2]: Aux Current calibration
-            [3]: Main Voltage (unknown if this is actually calibration or a
-                               measurement!)
-
-        Note that coarse vs fine is determined by the position within the
-        packet. Even indexes are fine values, odd indexes are coarse values.
-        """
-        sample_type = sample.get_sample_type()
-        if sample_type == SampleType.ZERO_CAL:
-            origin = Origin.ZERO
-        elif sample_type == SampleType.REF_CAL:
-            origin = Origin.REFERENCE
-        else:
-            raise ValueError(
-                'Packet of type %s is not a calibration packet.' % sample_type)
-        granularity = sample.get_calibration_granularity()
-        for channel in Channel.values:
-            self.add(channel, origin, granularity, sample[channel])
-
-
-class LvpmCalibrationSnapshot(CalibrationSnapshot):
-    """A class that holds a snapshot of LVPM Calibration Data.
-
-    According to the original reverse-engineered algorithm for obtaining
-    samples, the LVPM determines scale from the reference and zero calibration
-    values. Here, we calculate those when taking a snapshot."""
-
-    def __init__(self, lvpm_calibration_base):
-        super().__init__(lvpm_calibration_base)
-        pairs = itertools.product(Channel.values, Granularity.values)
-
-        for channel, granularity in pairs:
-            if granularity == Granularity.COARSE:
-                numerator = _COARSE_NUMERATOR
-            else:
-                numerator = _FINE_NUMERATOR
-
-            divisor = (
-                self._calibrations[(channel, Origin.REFERENCE, granularity)] -
-                self._calibrations[(channel, Origin.ZERO, granularity)])
-            # Prevent division by zero.
-            if divisor == 0:
-                divisor = .0001
-
-            self._calibrations[(channel, Origin.SCALE,
-                                granularity)] = (numerator / divisor)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py
deleted file mode 100644
index 75d7af3..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/packet.py
+++ /dev/null
@@ -1,222 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import struct
-
-from antlion.controllers.monsoon_lib.sampling.enums import Reading
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-
-
-class SampleType:
-    """An enum-like class that defines the SampleTypes for LVPM data.
-
-    Note that these values differ from the HVPM values.
-    """
-
-    # A measurement sample.
-    MEASUREMENT = 0x00
-
-    # A zero calibration sample.
-    ZERO_CAL = 0x01
-
-    # A reference calibration sample.
-    REF_CAL = 0x02
-
-    @staticmethod
-    def is_calibration(value):
-        """Returns true iff the SampleType is a type of calibration."""
-        return value == SampleType.ZERO_CAL or value == SampleType.REF_CAL
-
-
-class LvpmMeasurement(object):
-    """An object that tracks an individual measurement within the LvpmPacket.
-
-    Attributes:
-        _sample_time: The time the sample was taken.
-        _sample_type: The type of sample stored.
-        values: From reverse engineering, the values are as follows:
-
-
-    If the measurement is a calibration measurement:
-
-    Val │  Byte  │  Type  │ Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │   0    │  int16 │  Main   │ Current │ Calibration value.
-     1  │   2    │  int16 │  USB    │ Current │ Calibration value.
-     2  │   4    │  int16 │  Aux    │ Current │ Calibration value.
-     3  │   6    │ uint16 │  Main   │ Voltage │ Calibration value.
-
-    If the measurement is a power reading:
-
-    Val │  Byte  │  Type  │ Monsoon │ Reading │
-    Pos │ Offset │ Format │ Channel │  Type   │ Description
-    ────┼────────┼────────┼─────────┼─────────┼──────────────────────────────
-     0  │   0    │  int16 │  Main   │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     1  │   2    │  int16 │  USB    │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     2  │   4    │  int16 │  Aux    │ Current │ b0: if 1, Coarse, else Fine
-        │        │        │         │         │ b1-7: Measurement value.
-     3  │   6    │ uint16 │  Main   │ Voltage │ Measurement value.
-
-    """
-
-    # The total number of bytes in a measurement. See the table above.
-    SIZE = 8
-
-    def __init__(self, raw_data, sample_time, sample_type, entry_index):
-        """Creates a new LVPM Measurement.
-
-        Args:
-            raw_data: The raw data format of the LvpmMeasurement.
-            sample_time: The time the sample was recorded.
-            sample_type: The type of sample that was recorded.
-            entry_index: The index of the measurement within the packet.
-        """
-        self.values = struct.unpack('>3hH', raw_data)
-        self._sample_time = sample_time
-        self._sample_type = sample_type
-
-        if SampleType.is_calibration(self._sample_type):
-            # Calibration packets have granularity values determined by whether
-            # or not the entry was odd or even within the returned packet.
-            if entry_index % 2 == 0:
-                self._granularity = Granularity.FINE
-            else:
-                self._granularity = Granularity.COARSE
-        else:
-            # If it is not a calibration packet, each individual reading (main
-            # current, usb current, etc) determines granularity value by
-            # checking the LSB of the measurement value.
-            self._granularity = None
-
-    def __getitem__(self, channel_or_reading):
-        """Returns the requested reading for the given channel.
-
-        Args:
-            channel_or_reading: either a Channel or Reading.Voltage.
-        """
-        if channel_or_reading == Reading.VOLTAGE:
-            return self.values[3]
-        else:
-            # Must be a channel. If it is not, this line will throw an
-            # IndexError, which is what we will want for invalid values.
-            return self.values[channel_or_reading]
-
-    def get_sample_time(self):
-        """Returns the time (since the start time) this sample was collected."""
-        return self._sample_time
-
-    def get_sample_type(self):
-        """Returns a value contained in SampleType."""
-        return self._sample_type
-
-    def get_calibration_granularity(self):
-        """Returns the granularity associated with this packet.
-
-        If the packet is not a calibration packet, None is returned.
-        """
-        return self._granularity
-
-
-class Packet(object):
-    """A packet collected directly from serial.read() during sample collection.
-
-    Note that the true documentation for this has been lost to time. This class
-    and documentation uses knowledge that comes from several reverse-engineering
-    projects. Most of this knowledge comes from
-    http://wiki/Main/MonsoonProtocol.
-
-    The data table looks approximately like this:
-
-    Offset │ Format  │ Field   │ Description
-    ───────┼─────────┼─────────┼────────────────────────────────────────────
-       0   │  uint8  │  flags  │ Bits:
-           │         │    &    │  * b0-3: Sequence number (0-15). Increments
-           │         │   seq   │          each packet
-           │         │         │  * b4: 1 means over-current or thermal kill
-           │         │         │  * b5: Main Output, 1 == unit is at voltage,
-           │         │         │                     0 == output disabled.
-           │         │         │  * b6-7: reserved.
-       1   │  uint8  │ packet  │ The type of the packet:
-           │         │  type   │   * 0: A data packet
-           │         │         │   * 1: A zero calibration packet
-           │         │         │   * 2: A reference calibration packet
-       2   │  uint8  │ unknown │ Always seems to be 0x00
-       3   │  uint8  │ unknown │ Always seems to be 0x00 or 0xC4.
-       4   │ byte[8] │   data  │ See LvpmMeasurement.
-      ...  │ byte[8] │   data  │ Additional LvpmMeasurements.
-      -1   │  uint8  │ unknown │ Last byte, unknown values. Has been seen to
-           │         │         │ usually be \x00, or \x84.
-
-    Attributes:
-        _packet_data: The raw data received from the packet.
-        time_of_read: The unix timestamp this packet was collected at.
-        time_since_last_sample: The difference between this packet's
-            time_of_read and the previous packet's.
-    """
-
-    # The number of bytes before the first packet.
-    FIRST_MEASUREMENT_OFFSET = 4
-
-    def __init__(self, sampled_bytes, time_of_read,
-                 time_since_last_sample):
-        self._packet_data = sampled_bytes
-        self.time_of_read = time_of_read
-        self.time_since_last_sample = time_since_last_sample
-
-        num_data_bytes = len(sampled_bytes) - Packet.FIRST_MEASUREMENT_OFFSET
-        num_packets = num_data_bytes // LvpmMeasurement.SIZE
-
-        sample_struct_format = (str(LvpmMeasurement.SIZE) + 's') * num_packets
-        struct_string = '>2B2x%sx' % sample_struct_format
-
-        self._flag_data, self.packet_type, *samples = struct.unpack(
-            struct_string, sampled_bytes)
-
-        self.measurements = [None] * len(samples)
-
-        for index, raw_measurement in enumerate(samples):
-            self.measurements[index] = LvpmMeasurement(
-                raw_measurement, self._get_sample_time(index),
-                self.packet_type, index)
-
-    def _get_sample_time(self, index):
-        """Returns the time the sample at the given index was received.
-
-        If multiple samples were captured within the same reading, the samples
-        are assumed to be uniformly distributed during the time it took to
-        sample the values.
-
-        Args:
-            index: the index of the individual reading from within the sample.
-        """
-        time_per_sample = self.time_since_last_sample / len(self.measurements)
-        return time_per_sample * (index + 1) + self.time_of_read
-
-    @property
-    def packet_counter(self):
-        return self._flag_data & 0x0F
-
-    def get_bytes(self, start, end_exclusive):
-        """Returns a bytearray spanning from start to the end (exclusive)."""
-        return self._packet_data[start:end_exclusive]
-
-    def __getitem__(self, index):
-        return self.measurements[index]
-
-    def __len__(self):
-        return len(self.measurements)
diff --git a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py b/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py
deleted file mode 100644
index 39658dd..0000000
--- a/src/antlion/controllers/monsoon_lib/sampling/lvpm_stock/stock_transformers.py
+++ /dev/null
@@ -1,389 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import struct
-import time
-
-import numpy as np
-
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon_proxy import MonsoonProxy
-from antlion.controllers.monsoon_lib.sampling.common import UncalibratedSampleChunk
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ProcessAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationError
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SourceTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import Transformer
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.calibrations import LvpmCalibrationData
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.calibrations import LvpmCalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import Packet
-from antlion.controllers.monsoon_lib.sampling.lvpm_stock.packet import SampleType
-
-
-class StockLvpmSampler(Transformer):
-    """Gathers samples from the Monsoon and brings them back to the caller."""
-
-    def __init__(self, monsoon_serial, duration):
-        super().__init__()
-        self.monsoon_serial = monsoon_serial
-        self.duration = duration
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ProcessAssemblyLineBuilder()
-         .source(PacketCollector(self.monsoon_serial, self.duration))
-         .into(SampleNormalizer())
-         .build(output_stream=self.output_stream)
-         .run())
-        # yapf: enable
-
-
-class PacketCollector(SourceTransformer):
-    """Collects Monsoon packets into a buffer to be sent to another process."""
-
-    def __init__(self, serial=None, sampling_duration=None):
-        super().__init__()
-        self._monsoon_serial = serial
-        self._monsoon_proxy = None
-        self.start_time = 0
-        self.sampling_duration = sampling_duration
-
-    def _initialize_monsoon(self):
-        """Initializes the MonsoonProxy object."""
-        self._monsoon_proxy = MonsoonProxy(serialno=self._monsoon_serial)
-
-    def on_begin(self):
-        """Begins data collection."""
-        self.start_time = time.time()
-        self._initialize_monsoon()
-        self._monsoon_proxy.start_data_collection()
-
-    def on_end(self):
-        """Stops data collection."""
-        self._monsoon_proxy.stop_data_collection()
-        self._monsoon_proxy.ser.close()
-
-    def _transform_buffer(self, buffer):
-        """Fills the given buffer with raw monsoon data at each entry."""
-        if (self.sampling_duration
-                and self.sampling_duration < time.time() - self.start_time):
-            return None
-
-        for index in range(len(buffer)):
-            time_before_read = time.time()
-            data = self._read_packet()
-            if data is None:
-                continue
-            time_after_read = time.time()
-            time_data = struct.pack('dd', time_after_read,
-                                    time_after_read - time_before_read)
-            buffer[index] = time_data + data
-
-        return buffer
-
-    def _read_packet(self):
-        """Reads a single packet from the serial port.
-
-        Packets are sent as Length-Value-Checksum, where the first byte is the
-        length, the following bytes are the value and checksum. The checksum is
-        the stored in the final byte, and is calculated as the 16 least-
-        significant-bits of the sum of all value bytes.
-
-        Returns:
-            None if the read failed. Otherwise, the packet data received.
-        """
-        len_char = self._monsoon_proxy.ser.read(1)
-        if not len_char:
-            logging.warning('Reading from serial timed out.')
-            return None
-
-        data_len = ord(len_char)
-        if not data_len:
-            logging.warning('Unable to read packet length.')
-            return None
-
-        result = self._monsoon_proxy.ser.read(int(data_len))
-        result = bytearray(result)
-        if len(result) != data_len:
-            logging.warning(
-                'Length mismatch, expected %d bytes, got %d bytes.', data_len,
-                len(result))
-            return None
-        body = result[:-1]
-        checksum = sum(body, data_len) & 0xFF
-        if result[-1] != checksum:
-            logging.warning(
-                'Invalid checksum from serial port! Expected %s, '
-                'got %s', hex(checksum), hex(result[-1]))
-            return None
-        return body
-
-
-class SampleNormalizer(Transformer):
-    """Normalizes the raw packet data into reading values."""
-
-    def _transform(self, input_stream):
-        # yapf: disable. Yapf doesn't handle fluent interfaces well.
-        (ThreadAssemblyLineBuilder()
-         .source(PacketReader(), input_stream=input_stream)
-         .into(SampleChunker())
-         .into(CalibrationApplier())
-         .build(output_stream=self.output_stream)
-         .run())
-        # yapf: enable
-
-    def _transform_buffer(self, buffer):
-        """_transform is overloaded, so this function can be left empty."""
-
-
-class PacketReader(ParallelTransformer):
-    """Reads the raw packets and converts them into LVPM Packet objects."""
-
-    def _transform_buffer(self, buffer):
-        """Converts the raw packets to Packet objects in-place in buffer.
-
-        Args:
-            buffer: A list of bytes objects. Will be in-place replaced with
-                Packet objects.
-        """
-        for i, packet in enumerate(buffer):
-            time_bytes_size = struct.calcsize('dd')
-            # Unpacks the two time.time() values sent by PacketCollector.
-            time_of_read, time_since_last_read = struct.unpack(
-                'dd', packet[:time_bytes_size])
-            packet = packet[time_bytes_size:]
-            # Magic number explanation:
-            # LVPM sample packets begin with 4 bytes, have at least one
-            # measurement (8 bytes), and have 1 last byte (usually a \x00 byte).
-            if len(packet) < 4 + 8 + 1 or packet[0] & 0x20 != 0x20:
-                logging.warning(
-                    'Tried to collect power sample values, received data of '
-                    'type=0x%02x, len=%d instead.', packet[0], len(packet))
-                buffer[i] = None
-                continue
-
-            buffer[i] = Packet(packet, time_of_read, time_since_last_read)
-
-        return buffer
-
-
-class SampleChunker(SequentialTransformer):
-    """Chunks input packets into lists of samples with identical calibration.
-
-    This step helps to quickly apply calibration across many samples at once.
-
-    Attributes:
-        _stored_raw_samples: The queue of raw samples that have yet to be
-            split into a new calibration group.
-        calibration_data: The calibration window information.
-    """
-
-    def __init__(self):
-        super().__init__()
-        self._stored_raw_samples = []
-        self.calibration_data = LvpmCalibrationData()
-
-    def _on_end_of_stream(self, input_stream):
-        self._send_buffers(BufferList([self._cut_new_buffer()]))
-        super()._on_end_of_stream(input_stream)
-
-    def _transform_buffer(self, buffer):
-        """Takes in data from the buffer and splits it based on calibration.
-
-        This transformer is meant to after the PacketReader.
-
-        Args:
-            buffer: A list of Packet objects.
-
-        Returns:
-            A BufferList containing 0 or more UncalibratedSampleChunk objects.
-        """
-        buffer_list = BufferList()
-        for packet in buffer:
-            # If a read packet was not a sample, the PacketReader returns None.
-            # Skip over these dud values.
-            if packet is None:
-                continue
-
-            for sample in packet:
-                sample_type = sample.get_sample_type()
-
-                if sample_type == SampleType.MEASUREMENT:
-                    self._stored_raw_samples.append(sample)
-                elif SampleType.is_calibration(sample_type):
-                    if len(self._stored_raw_samples) > 0:
-                        buffer_list.append(self._cut_new_buffer())
-                    self.calibration_data.add_calibration_sample(sample)
-                else:
-                    # There's no information on what this packet means within
-                    # Monsoon documentation or code.
-                    logging.warning('Received unidentifiable packet with '
-                                    'SampleType %s: %s' %
-                                    (sample_type, packet.get_bytes(0, None)))
-        return buffer_list
-
-    def _cut_new_buffer(self):
-        """Cuts a new buffer from the input stream data.
-
-        Returns:
-            The newly generated UncalibratedSampleChunk.
-        """
-        calibration_snapshot = LvpmCalibrationSnapshot(self.calibration_data)
-        new_chunk = UncalibratedSampleChunk(self._stored_raw_samples,
-                                            calibration_snapshot)
-        self._stored_raw_samples = []
-        return new_chunk
-
-
-class LvpmReading(object):
-    """The result of fully calibrating a sample. Contains all Monsoon readings.
-
-    Attributes:
-        _reading_list: The list of values obtained from the Monsoon.
-        _time_of_reading: The time since sampling began that the reading was
-            collected at.
-    """
-
-    def __init__(self, reading_list, time_of_reading):
-        """Creates an LvpmReading.
-
-        Args:
-            reading_list:
-                [0] Main Current
-                [1] USB Current
-                [2] Aux Current
-                [3] Main Voltage
-            time_of_reading: The time the reading was received.
-        """
-        self._reading_list = reading_list
-        self._time_of_reading = time_of_reading
-
-    @property
-    def main_current(self):
-        return self._reading_list[0]
-
-    @property
-    def usb_current(self):
-        return self._reading_list[1]
-
-    @property
-    def aux_current(self):
-        return self._reading_list[2]
-
-    @property
-    def main_voltage(self):
-        return self._reading_list[3]
-
-    @property
-    def sample_time(self):
-        return self._time_of_reading
-
-    def __add__(self, other):
-        reading_list = [
-            self.main_current + other.main_current,
-            self.usb_current + other.usb_current,
-            self.aux_current + other.aux_current,
-            self.main_voltage + other.main_voltage,
-        ]
-        sample_time = self.sample_time + other.sample_time
-
-        return LvpmReading(reading_list, sample_time)
-
-    def __truediv__(self, other):
-        reading_list = [
-            self.main_current / other,
-            self.usb_current / other,
-            self.aux_current / other,
-            self.main_voltage / other,
-        ]
-        sample_time = self.sample_time / other
-
-        return LvpmReading(reading_list, sample_time)
-
-
-class CalibrationApplier(ParallelTransformer):
-    """Applies the calibration formula to the all given samples.
-
-    Designed to come after a SampleChunker Transformer.
-    """
-
-    @staticmethod
-    def _is_device_calibrated(data):
-        """Checks to see if the Monsoon has completed calibration.
-
-        Args:
-            data: the calibration data.
-
-        Returns:
-            True if the data is calibrated. False otherwise.
-        """
-        try:
-            # If the data is calibrated for any Origin.REFERENCE value, it is
-            # calibrated for all Origin.REFERENCE values. The same is true for
-            # Origin.ZERO.
-            data.get(Channel.MAIN, Origin.REFERENCE, Granularity.COARSE)
-            data.get(Channel.MAIN, Origin.ZERO, Granularity.COARSE)
-        except CalibrationError:
-            return False
-        return True
-
-    def _transform_buffer(self, buffer):
-        calibration_data = buffer.calibration_data
-
-        if not self._is_device_calibrated(calibration_data):
-            return []
-
-        measurements = np.array([sample.values for sample in buffer.samples])
-        readings = np.zeros((len(buffer.samples), 5))
-
-        for channel in Channel.values:
-            fine_zero = calibration_data.get(channel, Origin.ZERO,
-                                             Granularity.FINE)
-            fine_scale = calibration_data.get(channel, Origin.SCALE,
-                                              Granularity.FINE)
-            coarse_zero = calibration_data.get(channel, Origin.ZERO,
-                                               Granularity.COARSE)
-            coarse_scale = calibration_data.get(channel, Origin.SCALE,
-                                                Granularity.COARSE)
-
-            # A set LSB means a coarse measurement. This bit needs to be
-            # cleared before setting calibration. Note that the
-            # reverse-engineered algorithm does not rightshift the bits after
-            # this operation. This explains the mismatch of calibration
-            # constants between the reverse-engineered algorithm and the
-            # Monsoon.py algorithm.
-            readings[:, channel] = np.where(
-                measurements[:, channel] & 1,
-                ((measurements[:, channel] & ~1) - coarse_zero) * coarse_scale,
-                (measurements[:, channel] - fine_zero) * fine_scale)
-
-        # The magic number 0.000125 is documented at
-        # http://wiki/Main/MonsoonProtocol#Data_response
-        # It represents how many volts represents each tick in the sample
-        # packet.
-        readings[:, 3] = measurements[:, 3] * 0.000125
-
-        for i in range(len(buffer.samples)):
-            buffer.samples[i] = LvpmReading(
-                list(readings[i]), buffer.samples[i].get_sample_time())
-
-        return buffer.samples
diff --git a/src/antlion/controllers/native.py b/src/antlion/controllers/native.py
deleted file mode 100644
index 8e11c87..0000000
--- a/src/antlion/controllers/native.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.sl4a_lib.rpc_connection import RpcConnection
-import json
-import os
-
-HOST = os.environ.get('AP_HOST', None)
-PORT = os.environ.get('AP_PORT', 9999)
-
-
-class SL4NException(Exception):
-    pass
-
-
-class SL4NAPIError(SL4NException):
-    """Raised when remote API reports an error."""
-
-
-class SL4NProtocolError(SL4NException):
-    """Raised when there is an error exchanging data with the device server."""
-    NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake."
-    NO_RESPONSE_FROM_SERVER = "No response from server."
-    MISMATCHED_API_ID = "Mismatched API id."
-
-
-def IDCounter():
-    i = 0
-    while True:
-        yield i
-        i += 1
-
-
-class NativeAndroid(RpcConnection):
-    COUNTER = IDCounter()
-
-    def _rpc(self, method, *args):
-        with self._lock:
-            apiid = next(self._counter)
-        data = {'id': apiid, 'method': method, 'params': args}
-        request = json.dumps(data)
-        self.client.write(request.encode("utf8") + b'\n')
-        self.client.flush()
-        response = self.client.readline()
-        if not response:
-            raise SL4NProtocolError(SL4NProtocolError.NO_RESPONSE_FROM_SERVER)
-        #TODO: (tturney) fix the C side from sending \x00 char over the socket.
-        result = json.loads(
-            str(response, encoding="utf8").rstrip().replace("\x00", ""))
-        if result['error']:
-            raise SL4NAPIError(result['error'])
-        if result['id'] != apiid:
-            raise SL4NProtocolError(SL4NProtocolError.MISMATCHED_API_ID)
-        return result['result']
diff --git a/src/antlion/controllers/native_android_device.py b/src/antlion/controllers/native_android_device.py
deleted file mode 100644
index b898bf6..0000000
--- a/src/antlion/controllers/native_android_device.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.utils_lib import host_utils
-import antlion.controllers.native as native
-from subprocess import call
-
-import logging
-import time
-
-#TODO(tturney): Merge this into android device
-
-MOBLY_CONTROLLER_CONFIG_NAME = "NativeAndroidDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "native_android_devices"
-
-
-def create(configs):
-    logger = logging
-    ads = get_instances(configs)
-    for ad in ads:
-        try:
-            ad.get_droid()
-        except:
-            logger.exception("Failed to start sl4n on %s" % ad.serial)
-    return ads
-
-
-def destroy(ads):
-    pass
-
-
-def get_instances(serials, ):
-    """Create AndroidDevice instances from a list of serials.
-
-    Args:
-        serials: A list of android device serials.
-        logger: A logger to be passed to each instance.
-
-    Returns:
-        A list of AndroidDevice objects.
-    """
-    results = []
-    for s in serials:
-        results.append(NativeAndroidDevice(s))
-    return results
-
-
-class NativeAndroidDeviceError(Exception):
-    pass
-
-
-class NativeAndroidDevice(AndroidDevice):
-    def __del__(self):
-        if self.h_port:
-            self.adb.forward("--remove tcp:%d" % self.h_port)
-
-    def get_droid(self, handle_event=True):
-        """Create an sl4n connection to the device.
-
-        Return the connection handler 'droid'. By default, another connection
-        on the same session is made for EventDispatcher, and the dispatcher is
-        returned to the caller as well.
-        If sl4n server is not started on the device, try to start it.
-
-        Args:
-            handle_event: True if this droid session will need to handle
-                events.
-
-        Returns:
-            droid: Android object useds to communicate with sl4n on the android
-                device.
-            ed: An optional EventDispatcher to organize events for this droid.
-
-        Examples:
-            Don't need event handling:
-            >>> ad = NativeAndroidDevice()
-            >>> droid = ad.get_droid(False)
-
-            Need event handling:
-            >>> ad = NativeAndroidDevice()
-            >>> droid, ed = ad.get_droid()
-        """
-        if not self.h_port or not host_utils.is_port_available(self.h_port):
-            self.h_port = host_utils.get_available_host_port()
-        self.adb.tcp_forward(self.h_port, self.d_port)
-        pid = self.adb.shell("pidof -s sl4n", ignore_status=True)
-        while (pid):
-            self.adb.shell("kill {}".format(pid))
-            pid = self.adb.shell("pidof -s sl4n", ignore_status=True)
-        call(
-            ["adb -s " + self.serial + " shell sh -c \"/system/bin/sl4n\" &"],
-            shell=True)
-        try:
-            time.sleep(3)
-            droid = self.start_new_session()
-        except:
-            droid = self.start_new_session()
-        return droid
-
-    def start_new_session(self):
-        """Start a new session in sl4n.
-
-        Also caches the droid in a dict with its uid being the key.
-
-        Returns:
-            An Android object used to communicate with sl4n on the android
-                device.
-
-        Raises:
-            sl4nException: Something is wrong with sl4n and it returned an
-            existing uid to a new session.
-        """
-        droid = native.NativeAndroid(port=self.h_port)
-        droid.open()
-        if droid.uid in self._droid_sessions:
-            raise bt.SL4NException(("SL4N returned an existing uid for a "
-                                    "new session. Abort."))
-            return droid
-        self._droid_sessions[droid.uid] = [droid]
-        return droid
diff --git a/src/antlion/controllers/openwrt_ap.py b/src/antlion/controllers/openwrt_ap.py
deleted file mode 100644
index ad6e3de..0000000
--- a/src/antlion/controllers/openwrt_ap.py
+++ /dev/null
@@ -1,652 +0,0 @@
-"""Controller for Open WRT access point."""
-
-import random
-import re
-import time
-
-from antlion import logger
-from antlion import signals
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.openwrt_lib import network_settings
-from antlion.controllers.openwrt_lib import wireless_config
-from antlion.controllers.openwrt_lib import wireless_settings_applier
-from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtModelMap as modelmap
-from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtWifiSetting
-from antlion.controllers.openwrt_lib.openwrt_constants import SYSTEM_INFO_CMD
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-import yaml
-
-
-MOBLY_CONTROLLER_CONFIG_NAME = "OpenWrtAP"
-ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
-OPEN_SECURITY = "none"
-PSK1_SECURITY = "psk"
-PSK_SECURITY = "psk2"
-WEP_SECURITY = "wep"
-ENT_SECURITY = "wpa2"
-OWE_SECURITY = "owe"
-SAE_SECURITY = "sae"
-SAEMIXED_SECURITY = "sae-mixed"
-ENABLE_RADIO = "0"
-PMF_ENABLED = 2
-WIFI_2G = "wifi2g"
-WIFI_5G = "wifi5g"
-WAIT_TIME = 20
-DEFAULT_RADIOS = ("radio0", "radio1")
-
-
-def create(configs):
-  """Creates ap controllers from a json config.
-
-  Creates an ap controller from either a list, or a single element. The element
-  can either be just the hostname or a dictionary containing the hostname and
-  username of the AP to connect to over SSH.
-
-  Args:
-    configs: The json configs that represent this controller.
-
-  Returns:
-    AccessPoint object
-
-  Example:
-    Below is the config file entry for OpenWrtAP as a list. A testbed can have
-    1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
-    login information. OpenWrtAP#__init__() uses this to create SSH object.
-
-      "OpenWrtAP": [
-        {
-          "ssh_config": {
-            "user" : "root",
-            "host" : "192.168.1.1"
-          }
-        },
-        {
-          "ssh_config": {
-            "user" : "root",
-            "host" : "192.168.1.2"
-          }
-        }
-      ]
-  """
-  return [OpenWrtAP(c) for c in configs]
-
-
-def destroy(aps):
-  """Destroys a list of AccessPoints.
-
-  Args:
-    aps: The list of AccessPoints to destroy.
-  """
-  for ap in aps:
-    ap.close()
-    ap.close_ssh()
-
-
-def get_info(aps):
-  """Get information on a list of access points.
-
-  Args:
-    aps: A list of AccessPoints.
-
-  Returns:
-    A list of all aps hostname.
-  """
-  return [ap.ssh_settings.hostname for ap in aps]
-
-
-class OpenWrtAP(object):
-  """An AccessPoint controller.
-
-  Attributes:
-    ssh: The ssh connection to the AP.
-    ssh_settings: The ssh settings being used by the ssh connection.
-    log: Logging object for AccessPoint.
-    wireless_setting: object holding wireless configuration.
-    network_setting: Object for network configuration.
-    model: OpenWrt HW model.
-    radios: Fit interface for test.
-  """
-
-  def __init__(self, config):
-    """Initialize AP."""
-    self.ssh_settings = settings.from_config(config["ssh_config"])
-    self.ssh = connection.SshConnection(self.ssh_settings)
-    self.log = logger.create_logger(
-        lambda msg: "[OpenWrtAP|%s] %s" % (self.ssh_settings.hostname, msg))
-    self.wireless_setting = None
-    self.network_setting = network_settings.NetworkSettings(
-        self.ssh, self.ssh_settings, self.log)
-    self.model = self.get_model_name()
-    if self.model in modelmap.__dict__:
-      self.radios = modelmap.__dict__[self.model]
-    else:
-      self.radios = DEFAULT_RADIOS
-
-  def configure_ap(self, wifi_configs, channel_2g, channel_5g):
-    """Configure AP with the required settings.
-
-    Each test class inherits WifiBaseTest. Based on the test, we may need to
-    configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
-    combination. We call WifiBaseTest methods get_psk_network(),
-    get_open_network(), get_wep_network() and get_ent_network() to create
-    dictionaries which contains this information. 'wifi_configs' is a list of
-    such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
-    1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
-    configure the APs.
-
-    wifi_configs = [
-      {
-        '2g': {
-          'SSID': '2g_AkqXWPK4',
-          'security': 'psk2',
-          'password': 'YgYuXqDO9H',
-          'hiddenSSID': False
-        },
-      },
-      {
-        '5g': {
-          'SSID': '5g_8IcMR1Sg',
-          'security': 'none',
-          'hiddenSSID': False
-        },
-      }
-    ]
-
-    Args:
-      wifi_configs: list of network settings for 2G and 5G bands.
-      channel_2g: channel for 2G band.
-      channel_5g: channel for 5G band.
-    """
-    # generate wifi configs to configure
-    wireless_configs = self.generate_wireless_configs(wifi_configs)
-    self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier(
-        self.ssh, wireless_configs, channel_2g, channel_5g, self.radios[1], self.radios[0])
-    self.wireless_setting.apply_wireless_settings()
-
-  def start_ap(self):
-    """Starts the AP with the settings in /etc/config/wireless."""
-    self.ssh.run("wifi up")
-    curr_time = time.time()
-    while time.time() < curr_time + WAIT_TIME:
-      if self.get_wifi_status():
-        return
-      time.sleep(3)
-    if not self.get_wifi_status():
-      raise ValueError("Failed to turn on WiFi on the AP.")
-
-  def stop_ap(self):
-    """Stops the AP."""
-    self.ssh.run("wifi down")
-    curr_time = time.time()
-    while time.time() < curr_time + WAIT_TIME:
-      if not self.get_wifi_status():
-        return
-      time.sleep(3)
-    if self.get_wifi_status():
-      raise ValueError("Failed to turn off WiFi on the AP.")
-
-  def get_bssids_for_wifi_networks(self):
-    """Get BSSIDs for wifi networks configured.
-
-    Returns:
-      Dictionary of SSID - BSSID map for both bands.
-    """
-    bssid_map = {"2g": {}, "5g": {}}
-    for radio in self.radios:
-      ssid_ifname_map = self.get_ifnames_for_ssids(radio)
-      if radio == self.radios[0]:
-        for ssid, ifname in ssid_ifname_map.items():
-          bssid_map["5g"][ssid] = self.get_bssid(ifname)
-      elif radio == self.radios[1]:
-        for ssid, ifname in ssid_ifname_map.items():
-          bssid_map["2g"][ssid] = self.get_bssid(ifname)
-    return bssid_map
-
-  def get_ifnames_for_ssids(self, radio):
-    """Get interfaces for wifi networks.
-
-    Args:
-      radio: 2g or 5g radio get the bssids from.
-
-    Returns:
-      dictionary of ssid - ifname mappings.
-    """
-    ssid_ifname_map = {}
-    str_output = self.ssh.run("wifi status %s" % radio).stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-    wifi_status = wifi_status[radio]
-    if wifi_status["up"]:
-      interfaces = wifi_status["interfaces"]
-      for config in interfaces:
-        ssid = config["config"]["ssid"]
-        ifname = config["ifname"]
-        ssid_ifname_map[ssid] = ifname
-    return ssid_ifname_map
-
-  def get_bssid(self, ifname):
-    """Get MAC address from an interface.
-
-    Args:
-      ifname: interface name of the corresponding MAC.
-
-    Returns:
-      BSSID of the interface.
-    """
-    ifconfig = self.ssh.run("ifconfig %s" % ifname).stdout
-    mac_addr = ifconfig.split("\n")[0].split()[-1]
-    return mac_addr
-
-  def set_wpa_encryption(self, encryption):
-    """Set different encryptions to wpa or wpa2.
-
-    Args:
-      encryption: ccmp, tkip, or ccmp+tkip.
-    """
-    str_output = self.ssh.run("wifi status").stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-
-    # Counting how many interface are enabled.
-    total_interface = 0
-    for radio in self.radios:
-      num_interface = len(wifi_status[radio]["interfaces"])
-      total_interface += num_interface
-
-    # Iterates every interface to get and set wpa encryption.
-    default_extra_interface = 2
-    for i in range(total_interface + default_extra_interface):
-      origin_encryption = self.ssh.run(
-          "uci get wireless.@wifi-iface[{}].encryption".format(i)).stdout
-      origin_psk_pattern = re.match(r"psk\b", origin_encryption)
-      target_psk_pattern = re.match(r"psk\b", encryption)
-      origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
-      target_psk2_pattern = re.match(r"psk2\b", encryption)
-
-      if origin_psk_pattern == target_psk_pattern:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].encryption={}".format(
-                i, encryption))
-
-      if origin_psk2_pattern == target_psk2_pattern:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].encryption={}".format(
-                i, encryption))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def set_password(self, pwd_5g=None, pwd_2g=None):
-    """Set password for individual interface.
-
-    Args:
-        pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
-        pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
-    """
-    if pwd_5g:
-      if len(pwd_5g) < 8 or len(pwd_5g) > 63:
-        self.log.error("Password must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
-        self.log.error("Password must only contains ascii letters and digits")
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].key={}".format(3, pwd_5g))
-        self.log.info("Set 5G password to :{}".format(pwd_5g))
-
-    if pwd_2g:
-      if len(pwd_2g) < 8 or len(pwd_2g) > 63:
-        self.log.error("Password must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
-        self.log.error("Password must only contains ascii letters and digits")
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].key={}".format(2, pwd_2g))
-        self.log.info("Set 2G password to :{}".format(pwd_2g))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def set_ssid(self, ssid_5g=None, ssid_2g=None):
-    """Set SSID for individual interface.
-
-    Args:
-        ssid_5g: 8 ~ 63 chars for 5g network.
-        ssid_2g: 8 ~ 63 chars for 2g network.
-    """
-    if ssid_5g:
-      if len(ssid_5g) < 8 or len(ssid_5g) > 63:
-        self.log.error("SSID must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].ssid={}".format(3, ssid_5g))
-        self.log.info("Set 5G SSID to :{}".format(ssid_5g))
-
-    if ssid_2g:
-      if len(ssid_2g) < 8 or len(ssid_2g) > 63:
-        self.log.error("SSID must be 8~63 characters long")
-      # Only accept ascii letters and digits
-      else:
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].ssid={}".format(2, ssid_2g))
-        self.log.info("Set 2G SSID to :{}".format(ssid_2g))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("wifi")
-
-  def generate_mobility_domain(self):
-    """Generate 4-character hexadecimal ID.
-
-    Returns:
-      String; a 4-character hexadecimal ID.
-    """
-    md = "{:04x}".format(random.getrandbits(16))
-    self.log.info("Mobility Domain ID: {}".format(md))
-    return md
-
-  def enable_80211r(self, iface, md):
-    """Enable 802.11r for one single radio.
-
-    Args:
-      iface: index number of wifi-iface.
-              2: radio1
-              3: radio0
-      md: mobility domain. a 4-character hexadecimal ID.
-    Raises:
-      TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
-    """
-    str_output = self.ssh.run("wifi status").stdout
-    wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                            Loader=yaml.SafeLoader)
-    # Check if the radio is up.
-    if iface == OpenWrtWifiSetting.IFACE_2G:
-      if wifi_status[self.radios[1]]["up"]:
-        self.log.info("2g network is ENABLED")
-      else:
-        raise signals.TestSkip("2g network is NOT ENABLED")
-    elif iface == OpenWrtWifiSetting.IFACE_5G:
-      if wifi_status[self.radios[0]]["up"]:
-        self.log.info("5g network is ENABLED")
-      else:
-        raise signals.TestSkip("5g network is NOT ENABLED")
-
-    # Setup 802.11r.
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].ieee80211r='1'".format(iface))
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].ft_psk_generate_local='1'"
-        .format(iface))
-    self.ssh.run(
-        "uci set wireless.@wifi-iface[{}].mobility_domain='{}'"
-        .format(iface, md))
-    self.ssh.run(
-        "uci commit wireless")
-    self.ssh.run("wifi")
-
-    # Check if 802.11r is enabled.
-    result = self.ssh.run(
-        "uci get wireless.@wifi-iface[{}].ieee80211r".format(iface)).stdout
-    if result == "1":
-      self.log.info("802.11r is ENABLED")
-    else:
-      raise signals.TestSkip("802.11r is NOT ENABLED")
-
-  def generate_wireless_configs(self, wifi_configs):
-    """Generate wireless configs to configure.
-
-    Converts wifi_configs from configure_ap() to a list of 'WirelessConfig'
-    objects. Each object represents a wifi network to configure on the AP.
-
-    Args:
-      wifi_configs: Network list of different security types and bands.
-
-    Returns:
-      wireless configuration for openwrt AP.
-    """
-    num_2g = 1
-    num_5g = 1
-    wireless_configs = []
-
-    for i in range(len(wifi_configs)):
-      if hostapd_constants.BAND_2G in wifi_configs[i]:
-        config = wifi_configs[i][hostapd_constants.BAND_2G]
-        if config["security"] == PSK_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == PSK1_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == WEP_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             wep_key=config["wepKeys"][0],
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OPEN_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OWE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAEMIXED_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_2G, num_2g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_2G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == ENT_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig(
-                  "%s%s" % (WIFI_2G, num_2g),
-                  config["SSID"],
-                  config["security"],
-                  hostapd_constants.BAND_2G,
-                  radius_server_ip=config["radius_server_ip"],
-                  radius_server_port=config["radius_server_port"],
-                  radius_server_secret=config["radius_server_secret"],
-                  hidden=config["hiddenSSID"]))
-        num_2g += 1
-      if hostapd_constants.BAND_5G in wifi_configs[i]:
-        config = wifi_configs[i][hostapd_constants.BAND_5G]
-        if config["security"] == PSK_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == PSK1_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == WEP_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             wep_key=config["wepKeys"][0],
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OPEN_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             hidden=config["hiddenSSID"]))
-        elif config["security"] == OWE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAE_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=PMF_ENABLED))
-        elif config["security"] == SAEMIXED_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig("%s%s" % (WIFI_5G, num_5g),
-                                             config["SSID"],
-                                             config["security"],
-                                             hostapd_constants.BAND_5G,
-                                             password=config["password"],
-                                             hidden=config["hiddenSSID"],
-                                             ieee80211w=config["ieee80211w"]))
-        elif config["security"] == ENT_SECURITY:
-          wireless_configs.append(
-              wireless_config.WirelessConfig(
-                  "%s%s" % (WIFI_5G, num_5g),
-                  config["SSID"],
-                  config["security"],
-                  hostapd_constants.BAND_5G,
-                  radius_server_ip=config["radius_server_ip"],
-                  radius_server_port=config["radius_server_port"],
-                  radius_server_secret=config["radius_server_secret"],
-                  hidden=config["hiddenSSID"]))
-        num_5g += 1
-
-    return wireless_configs
-
-  def get_wifi_network(self, security=None, band=None):
-    """Return first match wifi interface's config.
-
-    Args:
-      security: psk2 or none
-      band: '2g' or '5g'
-
-    Returns:
-      A dict contains match wifi interface's config.
-    """
-
-    for wifi_iface in self.wireless_setting.wireless_configs:
-      match_list = []
-      wifi_network = wifi_iface.__dict__
-      if security:
-        match_list.append(security == wifi_network["security"])
-      if band:
-        match_list.append(band == wifi_network["band"])
-
-      if all(match_list):
-        wifi_network["SSID"] = wifi_network["ssid"]
-        if not wifi_network["password"]:
-          del wifi_network["password"]
-        return wifi_network
-    return None
-
-  def get_wifi_status(self):
-    """Check if radios are up. Default are 2G and 5G bands.
-
-    Returns:
-      True if both radios are up. False if not.
-    """
-    status = True
-    for radio in self.radios:
-      try:
-        str_output = self.ssh.run("wifi status %s" % radio).stdout
-        wifi_status = yaml.load(str_output.replace("\t", "").replace("\n", ""),
-                                Loader=yaml.SafeLoader)
-        status = wifi_status[radio]["up"] and status
-      except:
-        self.log.info("Failed to make ssh connection to the OpenWrt")
-        return False
-    return status
-
-  def verify_wifi_status(self, timeout=20):
-    """Ensure wifi interfaces are ready.
-
-    Args:
-      timeout: An integer that is the number of times to try
-               wait for interface ready.
-    Returns:
-      True if both radios are up. False if not.
-    """
-    start_time = time.time()
-    end_time = start_time + timeout
-    while time.time() < end_time:
-      if self.get_wifi_status():
-        return True
-      time.sleep(1)
-    return False
-
-  def get_model_name(self):
-    """Get Openwrt model name.
-
-    Returns:
-      A string include device brand and model. e.g. NETGEAR_R8000
-    """
-    out = self.ssh.run(SYSTEM_INFO_CMD).stdout.split("\n")
-    for line in out:
-      if "board_name" in line:
-        model = (line.split()[1].strip("\",").split(","))
-        return "_".join(map(lambda i: i.upper(), model))
-    self.log.info("Failed to retrieve OpenWrt model information.")
-    return None
-
-  def close(self):
-    """Reset wireless and network settings to default and stop AP."""
-    if self.network_setting.config:
-      self.network_setting.cleanup_network_settings()
-    if self.wireless_setting:
-      self.wireless_setting.cleanup_wireless_settings()
-
-  def close_ssh(self):
-    """Close SSH connection to AP."""
-    self.ssh.close()
-
-  def reboot(self):
-    """Reboot Openwrt."""
-    self.ssh.run("reboot")
-
diff --git a/src/antlion/controllers/openwrt_lib/network_const.py b/src/antlion/controllers/openwrt_lib/network_const.py
deleted file mode 100644
index 3aba0de..0000000
--- a/src/antlion/controllers/openwrt_lib/network_const.py
+++ /dev/null
@@ -1,290 +0,0 @@
-LOCALHOST = "192.168.1.1"
-
-# params for ipsec.conf
-IPSEC_CONF = {
-    "config setup": {
-        "charondebug": "chd 2,ike 2,knl 2,net 2,esp 2,dmn 2,"
-                       "mgr 2,lib 1,cfg 2,enc 1".__repr__(),
-        "uniqueids": "never"
-    },
-    "conn %default": {
-        "ike": "aes128-sha-modp1024",
-        "esp": "aes128-sha1"
-    }
-}
-
-IPSEC_L2TP_PSK = {
-    "conn L2TP_PSK": {
-        "keyexchange": "ikev1",
-        "type": "transport",
-        "left": LOCALHOST,
-        "leftprotoport": "17/1701",
-        "leftauth": "psk",
-        "right": "%any",
-        "rightprotoport": "17/%any",
-        "rightsubnet": "0.0.0.0/0",
-        "rightauth": "psk",
-        "auto": "add"
-    }
-}
-
-IPSEC_L2TP_RSA = {
-    "conn L2TP_RSA": {
-        "keyexchange": "ikev1",
-        "type": "transport",
-        "left": LOCALHOST,
-        "leftprotoport": "17/1701",
-        "leftauth": "pubkey",
-        "leftcert": "serverCert.der",
-        "right": "%any",
-        "rightprotoport": "17/%any",
-        "rightsubnet": "0.0.0.0/0",
-        "rightauth": "pubkey",
-        "auto": "add"
-    }
-}
-
-IPSEC_HYBRID_RSA = {
-    "conn HYBRID_RSA": {
-        "keyexchange": "ikev1",
-        "left": LOCALHOST,
-        "leftsubnet": "0.0.0.0/0",
-        "leftauth": "pubkey",
-        "leftcert": "serverCert.der",
-        "leftsendcert": "always",
-        "right": "%any",
-        "rightsubnet": "0.0.0.0/0",
-        "rightauth": "pubkey",
-        "rightauth2": "xauth",
-        "xauth": "server",
-        "auto": "add",
-    }
-}
-
-IPSEC_XAUTH_PSK = {
-    "conn XAUTH_PSK": {
-        "keyexchange": "ikev1",
-        "left": LOCALHOST,
-        "leftsubnet": "0.0.0.0/0",
-        "leftauth": "psk",
-        "right": "%any",
-        "rightsubnet": "0.0.0.0/0",
-        "rightauth": "psk",
-        "rightauth2": "xauth",
-        "auto": "add",
-    }
-}
-
-IPSEC_XAUTH_RSA = {
-    "conn XAUTH_RSA": {
-        "keyexchange": "ikev1",
-        "left": LOCALHOST,
-        "leftsubnet": "0.0.0.0/0",
-        "leftcert": "serverCert.der",
-        "leftsendcert": "always",
-        "right": "%any",
-        "rightsubnet": "0.0.0.0/0",
-        "rightauth": "xauth",
-        "xauth": "server",
-        "auto": "add",
-    }
-}
-
-IPSEC_IKEV2_MSCHAPV2 = {
-    "conn IKEV2_MSCHAPV2": {
-        "keyexchange": "ikev2",
-        "left": LOCALHOST,
-        "leftid": LOCALHOST,
-        "leftcert": "serverCert.der",
-        "leftsubnet": "0.0.0.0/0",
-        "leftauth": "pubkey",
-        "leftsendcert": "always",
-        "right": "%any",
-        "rightid": "vpntest",
-        "rightauth": "eap-mschapv2",
-        "auto": "add"
-    }
-}
-
-IPSEC_IKEV2_PSK = {
-    "conn IKEV2_PSK": {
-        "keyexchange": "ikev2",
-        "left": LOCALHOST,
-        "leftid": LOCALHOST,
-        "leftauth": "psk",
-        "leftsubnet": "0.0.0.0/0",
-        "right": "%any",
-        "rightid": "vpntest",
-        "rightauth": "psk",
-        "auto": "add"
-    }
-}
-
-IPSEC_IKEV2_RSA = {
-    "conn IKEV2_RSA": {
-        "keyexchange": "ikev2",
-        "left": LOCALHOST,
-        "leftid": LOCALHOST,
-        "leftcert": "serverCert.der",
-        "leftsubnet": "0.0.0.0/0",
-        "leftauth": "pubkey",
-        "leftsendcert": "always",
-        "right": "%any",
-        "rightid": "vpntest@%s" % LOCALHOST,
-        "rightauth": "pubkey",
-        "rightcert": "clientCert.pem",
-        "auto": "add"
-    }
-}
-
-IPSEC_IKEV2_MSCHAPV2_HOSTNAME = {
-    "conn IKEV2_MSCHAPV2_HOSTNAME": {
-        "keyexchange": "ikev2",
-        "left": LOCALHOST,
-        "leftid": "strongswan-vpn-server.android-iperf.com",
-        "leftcert": "serverCert.der",
-        "leftsubnet": "0.0.0.0/0",
-        "leftauth": "pubkey",
-        "leftsendcert": "always",
-        "right": "%any",
-        "rightid": "vpntest",
-        "rightauth": "eap-mschapv2",
-        "auto": "add"
-    }
-}
-
-IPSEC_IKEV2_PSK_HOSTNAME = {
-    "conn IKEV2_PSK_HOSTNAME": {
-        "keyexchange": "ikev2",
-        "left": LOCALHOST,
-        "leftid": "strongswan-vpn-server.android-iperf.com",
-        "leftauth": "psk",
-        "leftsubnet": "0.0.0.0/0",
-        "right": "%any",
-        "rightid": "vpntest",
-        "rightauth": "psk",
-        "auto": "add"
-    }
-}
-
-IPSEC_IKEV2_RSA_HOSTNAME = {
-    "conn IKEV2_RSA_HOSTNAME": {
-        "keyexchange": "ikev2",
-        "left": LOCALHOST,
-        "leftid": "strongswan-vpn-server.android-iperf.com",
-        "leftcert": "serverCert.der",
-        "leftsubnet": "0.0.0.0/0",
-        "leftauth": "pubkey",
-        "leftsendcert": "always",
-        "right": "%any",
-        "rightid": "vpntest@strongswan-vpn-server.android-iperf.com",
-        "rightauth": "pubkey",
-        "rightcert": "clientCert.pem",
-        "auto": "add"
-    }
-}
-
-# parmas for lx2tpd
-
-XL2TPD_CONF_GLOBAL = (
-    "[global]",
-    "ipsec saref = no",
-    "debug tunnel = no",
-    "debug avp = no",
-    "debug network = no",
-    "debug state = no",
-    "access control = no",
-    "rand source = dev",
-    "port = 1701",
-)
-
-XL2TPD_CONF_INS = (
-    "[lns default]",
-    "require authentication = yes",
-    "pass peer = yes",
-    "ppp debug = no",
-    "length bit = yes",
-    "refuse pap = yes",
-    "refuse chap = yes",
-)
-
-XL2TPD_OPTION = (
-    "require-mschap-v2",
-    "refuse-mschap",
-    "ms-dns 8.8.8.8",
-    "ms-dns 8.8.4.4",
-    "asyncmap 0",
-    "auth",
-    "crtscts",
-    "idle 1800",
-    "mtu 1410",
-    "mru 1410",
-    "connect-delay 5000",
-    "lock",
-    "hide-password",
-    "local",
-    "debug",
-    "modem",
-    "proxyarp",
-    "lcp-echo-interval 30",
-    "lcp-echo-failure 4",
-    "nomppe"
-)
-
-# iptable rules for vpn_pptp
-FIREWALL_RULES_FOR_PPTP = (
-    "iptables -A input_rule -i ppp+ -j ACCEPT",
-    "iptables -A output_rule -o ppp+ -j ACCEPT",
-    "iptables -A forwarding_rule -i ppp+ -j ACCEPT"
-)
-
-# iptable rules for vpn_l2tp
-FIREWALL_RULES_FOR_L2TP = (
-    "iptables -I INPUT  -m policy --dir in --pol ipsec --proto esp -j ACCEPT",
-    "iptables -I FORWARD  -m policy --dir in --pol ipsec --proto esp -j ACCEPT",
-    "iptables -I FORWARD  -m policy --dir out --pol ipsec --proto esp -j ACCEPT",
-    "iptables -I OUTPUT   -m policy --dir out --pol ipsec --proto esp -j ACCEPT",
-    "iptables -t nat -I POSTROUTING -m policy --pol ipsec --dir out -j ACCEPT",
-    "iptables -A INPUT -p esp -j ACCEPT",
-    "iptables -A INPUT -i eth0.2 -p udp --dport 500 -j ACCEPT",
-    "iptables -A INPUT -i eth0.2 -p tcp --dport 500 -j ACCEPT",
-    "iptables -A INPUT -i eth0.2 -p udp --dport 4500 -j ACCEPT",
-    "iptables -A INPUT -p udp --dport 500 -j ACCEPT",
-    "iptables -A INPUT -p udp --dport 4500 -j ACCEPT",
-    "iptables -A INPUT -p udp -m policy --dir in --pol ipsec -m udp --dport 1701 -j ACCEPT"
-)
-
-FIREWALL_RULES_DISABLE_DNS_RESPONSE = (
-    "iptables -I OUTPUT -p udp --sport 53 -j DROP",
-    "iptables -I OUTPUT -p tcp --sport 53 -j DROP",
-    "ip6tables -I OUTPUT -p udp --sport 53 -j DROP",
-    "ip6tables -I OUTPUT -p tcp --sport 53 -j DROP",
-)
-
-
-# Object for vpn profile
-class VpnL2tp(object):
-    """Profile for vpn l2tp type.
-
-    Attributes:
-        hostname: vpn server domain name
-        address: vpn server address
-        username: vpn user account
-        password: vpn user password
-        psk_secret: psk for ipsec
-        name: vpn server name for register in OpenWrt
-    """
-
-    def __init__(self,
-                 vpn_server_hostname,
-                 vpn_server_address,
-                 vpn_username,
-                 vpn_password,
-                 psk_secret,
-                 server_name):
-        self.name = server_name
-        self.hostname = vpn_server_hostname
-        self.address = vpn_server_address
-        self.username = vpn_username
-        self.password = vpn_password
-        self.psk_secret = psk_secret
diff --git a/src/antlion/controllers/openwrt_lib/network_settings.py b/src/antlion/controllers/openwrt_lib/network_settings.py
deleted file mode 100644
index 10ed891..0000000
--- a/src/antlion/controllers/openwrt_lib/network_settings.py
+++ /dev/null
@@ -1,1108 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-import time
-
-from antlion import signals
-from antlion import utils
-from antlion.controllers.openwrt_lib import network_const
-
-
-SERVICE_DNSMASQ = "dnsmasq"
-SERVICE_STUNNEL = "stunnel"
-SERVICE_NETWORK = "network"
-SERVICE_PPTPD = "pptpd"
-SERVICE_FIREWALL = "firewall"
-SERVICE_IPSEC = "ipsec"
-SERVICE_XL2TPD = "xl2tpd"
-SERVICE_ODHCPD = "odhcpd"
-SERVICE_OPENNDS = "opennds"
-SERVICE_UHTTPD = "uhttpd"
-PPTP_PACKAGE = "pptpd kmod-nf-nathelper-extra"
-L2TP_PACKAGE = "strongswan-full openssl-util xl2tpd"
-NAT6_PACKAGE = "ip6tables kmod-ipt-nat6"
-CAPTIVE_PORTAL_PACKAGE = "opennds php7-cli php7-mod-openssl php7-cgi php7"
-MDNS_PACKAGE = "avahi-utils avahi-daemon-service-http avahi-daemon-service-ssh libavahi-client avahi-dbus-daemon"
-STUNNEL_CONFIG_PATH = "/etc/stunnel/DoTServer.conf"
-HISTORY_CONFIG_PATH = "/etc/dirty_configs"
-PPTPD_OPTION_PATH = "/etc/ppp/options.pptpd"
-XL2TPD_CONFIG_PATH = "/etc/xl2tpd/xl2tpd.conf"
-XL2TPD_OPTION_CONFIG_PATH = "/etc/ppp/options.xl2tpd"
-FIREWALL_CUSTOM_OPTION_PATH = "/etc/firewall.user"
-PPP_CHAP_SECRET_PATH = "/etc/ppp/chap-secrets"
-IKEV2_VPN_CERT_KEYS_PATH = "/var/ikev2_cert.sh"
-TCPDUMP_DIR = "/tmp/tcpdump/"
-LOCALHOST = "192.168.1.1"
-DEFAULT_PACKAGE_INSTALL_TIMEOUT = 200
-
-
-class NetworkSettings(object):
-    """Class for network settings.
-
-    Attributes:
-        ssh: ssh connection object.
-        ssh_settings: ssh settings for AccessPoint.
-        service_manager: Object manage service configuration.
-        user: username for ssh.
-        ip: ip address for AccessPoint.
-        log: Logging object for AccessPoint.
-        config: A list to store changes on network settings.
-        firewall_rules_list: A list of firewall rule name list.
-        cleanup_map: A dict for compare oppo functions.
-        l2tp: profile for vpn l2tp server.
-    """
-
-    def __init__(self, ssh, ssh_settings, logger):
-        """Initialize wireless settings.
-
-        Args:
-            ssh: ssh connection object.
-            ssh_settings: ssh settings for AccessPoint.
-            logger: Logging object for AccessPoint.
-        """
-        self.ssh = ssh
-        self.service_manager = ServiceManager(ssh)
-        self.ssh_settings = ssh_settings
-        self.user = self.ssh_settings.username
-        self.ip = self.ssh_settings.hostname
-        self.log = logger
-        self.config = set()
-        self.firewall_rules_list = []
-        self.cleanup_map = {
-            "setup_dns_server": self.remove_dns_server,
-            "setup_vpn_pptp_server": self.remove_vpn_pptp_server,
-            "setup_vpn_l2tp_server": self.remove_vpn_l2tp_server,
-            "disable_ipv6": self.enable_ipv6,
-            "setup_ipv6_bridge": self.remove_ipv6_bridge,
-            "default_dns": self.del_default_dns,
-            "default_v6_dns": self.del_default_v6_dns,
-            "ipv6_prefer_option": self.remove_ipv6_prefer_option,
-            "block_dns_response": self.unblock_dns_response,
-            "setup_mdns": self.remove_mdns,
-            "add_dhcp_rapid_commit": self.remove_dhcp_rapid_commit,
-            "setup_captive_portal": self.remove_cpative_portal
-        }
-        # This map contains cleanup functions to restore the configuration to
-        # its default state. We write these keys to HISTORY_CONFIG_PATH prior to
-        # making any changes to that subsystem.
-        # This makes it easier to recover after an aborted test.
-        self.update_firewall_rules_list()
-        self.cleanup_network_settings()
-        self.clear_tcpdump()
-
-    def cleanup_network_settings(self):
-        """Reset all changes on Access point."""
-
-        # Detect if any changes that is not clean up.
-        if self.file_exists(HISTORY_CONFIG_PATH):
-            out = self.ssh.run("cat %s" % HISTORY_CONFIG_PATH).stdout
-            if out:
-                self.config = set(out.split("\n"))
-
-        if self.config:
-            temp = self.config.copy()
-            for change in temp:
-                change_list = change.split()
-                if len(change_list) > 1:
-                    self.cleanup_map[change_list[0]](*change_list[1:])
-                else:
-                    self.cleanup_map[change]()
-            self.config = set()
-
-        if self.file_exists(HISTORY_CONFIG_PATH):
-            out = self.ssh.run("cat %s" % HISTORY_CONFIG_PATH).stdout
-            if not out:
-                self.ssh.run("rm %s" % HISTORY_CONFIG_PATH)
-
-    def commit_changes(self):
-        """Apply changes on Access point."""
-        self.ssh.run("uci commit")
-        self.service_manager.restart_services()
-        self.create_config_file("\n".join(self.config),
-                                HISTORY_CONFIG_PATH)
-
-    def package_install(self, package_list):
-        """Install packages on OpenWrtAP via opkg If not installed.
-
-        Args:
-            package_list: package list to install.
-                          e.g. "pptpd kmod-mppe kmod-nf-nathelper-extra"
-        """
-        self.ssh.run("opkg update")
-        for package_name in package_list.split(" "):
-            if not self._package_installed(package_name):
-                self.ssh.run("opkg install %s" % package_name,
-                             timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT)
-                self.log.info("Package: %s installed." % package_name)
-            else:
-                self.log.info("Package: %s skipped (already installed)." % package_name)
-
-    def package_remove(self, package_list):
-        """Remove packages on OpenWrtAP via opkg If existed.
-
-        Args:
-            package_list: package list to remove.
-        """
-        for package_name in package_list.split(" "):
-            if self._package_installed(package_name):
-                self.ssh.run("opkg remove %s" % package_name)
-                self.log.info("Package: %s removed." % package_name)
-            else:
-                self.log.info("No exist package %s found." % package_name)
-
-    def _package_installed(self, package_name):
-        """Check if target package installed on OpenWrtAP.
-
-        Args:
-            package_name: package name want to check.
-
-        Returns:
-            True if installed.
-        """
-        if self.ssh.run("opkg list-installed %s" % package_name).stdout:
-            return True
-        return False
-
-    def file_exists(self, abs_file_path):
-        """Check if target file exist on specific path on OpenWrt.
-
-        Args:
-            abs_file_path: Absolute path for the file.
-
-        Returns:
-            True if Existed.
-        """
-        path, file_name = abs_file_path.rsplit("/", 1)
-        if self.ssh.run("ls %s | grep %s" % (path, file_name),
-                        ignore_status=True).stdout:
-            return True
-        return False
-
-    def path_exists(self, abs_path):
-        """Check if dir exist on OpenWrt.
-
-        Args:
-            abs_path: absolutely path for create folder.
-        """
-        try:
-            self.ssh.run("ls %s" % abs_path)
-        except:
-            return False
-        return True
-
-    def create_folder(self, abs_path):
-        """If dir not exist, create it.
-
-        Args:
-            abs_path: absolutely path for create folder.
-        """
-        if not self.path_exists(abs_path):
-            self.ssh.run("mkdir %s" % abs_path)
-        else:
-            self.log.info("%s already existed." %abs_path)
-
-    def count(self, config, key):
-        """Count in uci config.
-
-        Args:
-            config: config or section to research
-            key: keywords to  e.g. rule, domain
-        Returns:
-            Numbers of the count.
-        """
-        count = self.ssh.run("uci show %s | grep =%s" % (config, key),
-                             ignore_status=True).stdout
-        return len(count.split("\n"))
-
-    def create_config_file(self, config, file_path):
-        """Create config file. Overwrite if file already exist.
-
-        Args:
-            config: A string of content of config.
-            file_path: Config's abs_path.
-        """
-        self.ssh.run("echo -e \"%s\" > %s" % (config, file_path))
-
-    def replace_config_option(self, old_option, new_option, file_path):
-        """Replace config option if pattern match.
-
-        If find match pattern with old_option, then replace it with new_option.
-        Else add new_option to the file.
-
-        Args:
-            old_option: the regexp pattern to replace.
-            new_option: the option to add.
-            file_path: Config's abs_path.
-        """
-        config = self.ssh.run("cat %s" % file_path).stdout
-        config, count = re.subn(old_option, new_option, config)
-        if not count:
-            config = "\n".join([config, new_option])
-        self.create_config_file(config, file_path)
-
-    def remove_config_option(self, option, file_path):
-        """Remove option from config file.
-
-        Args:
-            option: Option to remove. Support regular expression.
-            file_path: Config's abs_path.
-        Returns:
-            Boolean for find option to remove.
-        """
-        config = self.ssh.run("cat %s" % file_path).stdout.split("\n")
-        for line in config:
-            count = re.subn(option, "", line)[1]
-            if count > 0:
-                config.remove(line)
-                self.create_config_file("\n".join(config), file_path)
-                return True
-        self.log.warning("No match option to remove.")
-        return False
-
-    def setup_dns_server(self, domain_name):
-        """Setup DNS server on OpenWrtAP.
-
-        Args:
-            domain_name: Local dns domain name.
-        """
-        self.config.add("setup_dns_server")
-        self.log.info("Setup DNS server with domain name %s" % domain_name)
-        self.ssh.run("uci set dhcp.@dnsmasq[0].local='/%s/'" % domain_name)
-        self.ssh.run("uci set dhcp.@dnsmasq[0].domain='%s'" % domain_name)
-        self.add_resource_record(domain_name, self.ip)
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-        self.commit_changes()
-
-        # Check stunnel package is installed
-        self.package_install("stunnel")
-        self.service_manager.stop(SERVICE_STUNNEL)
-        self.service_manager.disable(SERVICE_STUNNEL)
-
-        # Enable stunnel
-        self.create_stunnel_config()
-        self.ssh.run("stunnel /etc/stunnel/DoTServer.conf")
-
-    def remove_dns_server(self):
-        """Remove DNS server on OpenWrtAP."""
-        if self.file_exists("/var/run/stunnel.pid"):
-            self.ssh.run("kill $(cat /var/run/stunnel.pid)")
-        self.ssh.run("uci set dhcp.@dnsmasq[0].local='/lan/'")
-        self.ssh.run("uci set dhcp.@dnsmasq[0].domain='lan'")
-        self.clear_resource_record()
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-        self.config.discard("setup_dns_server")
-        self.commit_changes()
-
-    def add_resource_record(self, domain_name, domain_ip):
-        """Add resource record.
-
-        Args:
-            domain_name: A string for domain name.
-            domain_ip: A string for domain ip.
-        """
-        self.ssh.run("uci add dhcp domain")
-        self.ssh.run("uci set dhcp.@domain[-1].name='%s'" % domain_name)
-        self.ssh.run("uci set dhcp.@domain[-1].ip='%s'" % domain_ip)
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-
-    def del_resource_record(self):
-        """Delete the last resource record."""
-        self.ssh.run("uci delete dhcp.@domain[-1]")
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-
-    def clear_resource_record(self):
-        """Delete the all resource record."""
-        rr = self.ssh.run("uci show dhcp | grep =domain",
-                          ignore_status=True).stdout
-        if rr:
-            for _ in rr.split("\n"):
-                self.del_resource_record()
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-
-    def create_stunnel_config(self):
-        """Create config for stunnel service."""
-        stunnel_config = [
-            "pid = /var/run/stunnel.pid",
-            "[dns]",
-            "accept = 853",
-            "connect = 127.0.0.1:53",
-            "cert = /etc/stunnel/fullchain.pem",
-            "key = /etc/stunnel/privkey.pem",
-        ]
-        config_string = "\n".join(stunnel_config)
-        self.create_config_file(config_string, STUNNEL_CONFIG_PATH)
-
-    def setup_vpn_pptp_server(self, local_ip, user, password):
-        """Setup pptp vpn server on OpenWrt.
-
-        Args:
-            local_ip: local pptp server ip address.
-            user: username for pptp user.
-            password: password for pptp user.
-        """
-        #  Install pptp service
-        self.package_install(PPTP_PACKAGE)
-
-        self.config.add("setup_vpn_pptp_server")
-        # Edit /etc/config/pptpd & /etc/ppp/options.pptpd
-        self.setup_pptpd(local_ip, user, password)
-        # Edit /etc/config/firewall & /etc/firewall.user
-        self.setup_firewall_rules_for_pptp()
-        # Enable service
-        self.service_manager.enable(SERVICE_PPTPD)
-        self.service_manager.need_restart(SERVICE_PPTPD)
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-        self.commit_changes()
-
-    def remove_vpn_pptp_server(self):
-        """Remove pptp vpn server on OpenWrt."""
-        # Edit /etc/config/pptpd
-        self.restore_pptpd()
-        # Edit /etc/config/firewall & /etc/firewall.user
-        self.restore_firewall_rules_for_pptp()
-        # Disable service
-        self.service_manager.disable(SERVICE_PPTPD)
-        self.service_manager.need_restart(SERVICE_PPTPD)
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-        self.config.discard("setup_vpn_pptp_server")
-        self.commit_changes()
-
-        self.package_remove(PPTP_PACKAGE)
-        self.ssh.run("rm /etc/ppp/options.pptpd")
-        self.ssh.run("rm /etc/config/pptpd")
-
-    def setup_pptpd(self, local_ip, username, password, ms_dns="8.8.8.8"):
-        """Setup pptpd config for ip addr and account.
-
-        Args:
-            local_ip: vpn server address
-            username: pptp vpn username
-            password: pptp vpn password
-            ms_dns: DNS server
-        """
-        # Calculate remote ip address
-        # e.g. local_ip = 10.10.10.9
-        # remote_ip = 10.10.10.10 -250
-        remote_ip = local_ip.split(".")
-        remote_ip.append(str(int(remote_ip.pop(-1)) + 1))
-        remote_ip = ".".join(remote_ip)
-        # Enable pptp service and set ip addr
-        self.ssh.run("uci set pptpd.pptpd.enabled=1")
-        self.ssh.run("uci set pptpd.pptpd.localip='%s'" % local_ip)
-        self.ssh.run("uci set pptpd.pptpd.remoteip='%s-250'" % remote_ip)
-
-        # Setup pptp service account
-        self.ssh.run("uci set pptpd.@login[0].username='%s'" % username)
-        self.ssh.run("uci set pptpd.@login[0].password='%s'" % password)
-        self.service_manager.need_restart(SERVICE_PPTPD)
-
-        self.replace_config_option(r"#*ms-dns \d+.\d+.\d+.\d+",
-                                   "ms-dns %s" % ms_dns, PPTPD_OPTION_PATH)
-        self.replace_config_option("(#no)*proxyarp",
-                                   "proxyarp", PPTPD_OPTION_PATH)
-
-    def restore_pptpd(self):
-        """Disable pptpd."""
-        self.ssh.run("uci set pptpd.pptpd.enabled=0")
-        self.remove_config_option(r"\S+ pptp-server \S+ \*",
-                                  PPP_CHAP_SECRET_PATH)
-        self.service_manager.need_restart(SERVICE_PPTPD)
-
-    def setup_vpn_l2tp_server(self,
-                              vpn_server_hostname,
-                              vpn_server_address,
-                              vpn_username,
-                              vpn_password,
-                              psk_secret,
-                              server_name,
-                              country,
-                              org):
-        """Setup l2tp vpn server on OpenWrt.
-
-        Args:
-            vpn_server_hostname: vpn server domain name
-            vpn_server_address: vpn server addr
-            vpn_username: vpn account
-            vpn_password: vpn password
-            psk_secret: psk for ipsec
-            server_name: vpn server name for register in OpenWrt
-            country: country code for generate cert keys.
-            org: Organization name for generate cert keys.
-        """
-        self.l2tp = network_const.VpnL2tp(vpn_server_hostname,
-                                          vpn_server_address,
-                                          vpn_username,
-                                          vpn_password,
-                                          psk_secret,
-                                          server_name)
-
-        self.package_install(L2TP_PACKAGE)
-        self.config.add("setup_vpn_l2tp_server")
-
-        # /etc/strongswan.conf: Strongswan configuration file
-        self.setup_strongswan()
-        # /etc/ipsec.conf /etc/ipsec.secrets
-        self.setup_ipsec()
-        # /etc/xl2tpd/xl2tpd.conf & /etc/ppp/options.xl2tpd
-        self.setup_xl2tpd()
-        # /etc/ppp/chap-secrets
-        self.setup_ppp_secret()
-        # /etc/config/firewall & /etc/firewall.user
-        self.setup_firewall_rules_for_l2tp()
-        # setup vpn server local ip
-        self.setup_vpn_local_ip()
-        # generate cert and key for rsa
-        if self.l2tp.name == "ikev2-server":
-            self.generate_ikev2_vpn_cert_keys(country, org)
-            self.add_resource_record(self.l2tp.hostname, LOCALHOST)
-        else:
-            self.generate_vpn_cert_keys(country, org)
-        # restart service
-        self.service_manager.need_restart(SERVICE_IPSEC)
-        self.service_manager.need_restart(SERVICE_XL2TPD)
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-        self.commit_changes()
-
-    def remove_vpn_l2tp_server(self):
-        """Remove l2tp vpn server on OpenWrt."""
-        self.config.discard("setup_vpn_l2tp_server")
-        self.restore_firewall_rules_for_l2tp()
-        self.remove_vpn_local_ip()
-        if self.l2tp.name == "ikev2-server":
-            self.clear_resource_record()
-        self.service_manager.need_restart(SERVICE_IPSEC)
-        self.service_manager.need_restart(SERVICE_XL2TPD)
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-        self.commit_changes()
-        self.package_remove(L2TP_PACKAGE)
-        if hasattr(self, "l2tp"):
-            delattr(self, "l2tp")
-
-    def setup_strongswan(self, dns="8.8.8.8"):
-        """Setup strongswan config."""
-        config = [
-            "charon {",
-            "   load_modular = yes",
-            "   plugins {",
-            "       include strongswan.d/charon/*.conf",
-            "   }",
-            "   dns1=%s" % dns,
-            "}"
-        ]
-        self.create_config_file("\n".join(config), "/etc/strongswan.conf")
-
-    def setup_ipsec(self):
-        """Setup ipsec config."""
-        def load_ipsec_config(data, rightsourceip=False):
-            for i in data.keys():
-                config.append(i)
-                for j in data[i].keys():
-                    config.append("\t %s=%s" % (j, data[i][j]))
-                if rightsourceip:
-                    config.append("\t rightsourceip=%s.16/26" % self.l2tp.address.rsplit(".", 1)[0])
-                config.append("")
-
-        config = []
-        load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2, True)
-        load_ipsec_config(network_const.IPSEC_IKEV2_PSK, True)
-        load_ipsec_config(network_const.IPSEC_IKEV2_RSA, True)
-        load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2_HOSTNAME, True)
-        load_ipsec_config(network_const.IPSEC_IKEV2_PSK_HOSTNAME, True)
-        load_ipsec_config(network_const.IPSEC_IKEV2_RSA_HOSTNAME, True)
-        load_ipsec_config(network_const.IPSEC_CONF)
-        load_ipsec_config(network_const.IPSEC_L2TP_PSK)
-        load_ipsec_config(network_const.IPSEC_L2TP_RSA)
-        load_ipsec_config(network_const.IPSEC_HYBRID_RSA, True)
-        load_ipsec_config(network_const.IPSEC_XAUTH_PSK, True)
-        load_ipsec_config(network_const.IPSEC_XAUTH_RSA, True)
-        self.create_config_file("\n".join(config), "/etc/ipsec.conf")
-
-        ipsec_secret = []
-        ipsec_secret.append(r": PSK \"%s\"" % self.l2tp.psk_secret)
-        ipsec_secret.append(r": RSA \"%s\"" % "serverKey.der")
-        ipsec_secret.append(r"%s : XAUTH \"%s\"" % (self.l2tp.username,
-                                                    self.l2tp.password))
-        self.create_config_file("\n".join(ipsec_secret), "/etc/ipsec.secrets")
-
-    def setup_xl2tpd(self, ip_range=20):
-        """Setup xl2tpd config."""
-        net_id, host_id = self.l2tp.address.rsplit(".", 1)
-        xl2tpd_conf = list(network_const.XL2TPD_CONF_GLOBAL)
-        xl2tpd_conf.append("auth file = %s" % PPP_CHAP_SECRET_PATH)
-        xl2tpd_conf.extend(network_const.XL2TPD_CONF_INS)
-        xl2tpd_conf.append("ip range = %s.%s-%s.%s" %
-                           (net_id, host_id, net_id,
-                            str(int(host_id)+ip_range)))
-        xl2tpd_conf.append("local ip = %s" % self.l2tp.address)
-        xl2tpd_conf.append("name = %s" % self.l2tp.name)
-        xl2tpd_conf.append("pppoptfile = %s" % XL2TPD_OPTION_CONFIG_PATH)
-
-        self.create_config_file("\n".join(xl2tpd_conf), XL2TPD_CONFIG_PATH)
-        xl2tpd_option = list(network_const.XL2TPD_OPTION)
-        xl2tpd_option.append("name %s" % self.l2tp.name)
-        self.create_config_file("\n".join(xl2tpd_option),
-                                XL2TPD_OPTION_CONFIG_PATH)
-
-    def setup_ppp_secret(self):
-        self.replace_config_option(
-            r"\S+ %s \S+ \*" % self.l2tp.name,
-            "%s %s %s *" % (self.l2tp.username,
-                            self.l2tp.name,
-                            self.l2tp.password),
-            PPP_CHAP_SECRET_PATH)
-
-    def generate_vpn_cert_keys(self, country, org):
-        """Generate cert and keys for vpn server."""
-        rsa = "--type rsa"
-        lifetime = "--lifetime 365"
-        size = "--size 4096"
-
-        self.ssh.run("ipsec pki --gen %s %s --outform der > caKey.der" %
-                     (rsa, size))
-        self.ssh.run("ipsec pki --self --ca %s --in caKey.der %s --dn "
-                     "\"C=%s, O=%s, CN=%s\" --outform der > caCert.der" %
-                     (lifetime, rsa, country, org, self.l2tp.hostname))
-        self.ssh.run("ipsec pki --gen %s %s --outform der > serverKey.der" %
-                     (size, rsa))
-        self.ssh.run("ipsec pki --pub --in serverKey.der %s | ipsec pki "
-                     "--issue %s --cacert caCert.der --cakey caKey.der "
-                     "--dn \"C=%s, O=%s, CN=%s\" --san %s --flag serverAuth"
-                     " --flag ikeIntermediate --outform der > serverCert.der" %
-                     (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST))
-        self.ssh.run("ipsec pki --gen %s %s --outform der > clientKey.der" %
-                     (size, rsa))
-        self.ssh.run("ipsec pki --pub --in clientKey.der %s | ipsec pki "
-                     "--issue %s --cacert caCert.der --cakey caKey.der "
-                     "--dn \"C=%s, O=%s, CN=%s@%s\" --outform der > "
-                     "clientCert.der" % (rsa, lifetime, country, org,
-                                         self.l2tp.username, self.l2tp.hostname))
-
-        self.ssh.run(
-            "openssl rsa -inform DER -in clientKey.der"
-            " -out clientKey.pem -outform PEM"
-        )
-        self.ssh.run(
-            "openssl x509 -inform DER -in clientCert.der"
-            " -out clientCert.pem -outform PEM"
-        )
-        self.ssh.run(
-            "openssl x509 -inform DER -in caCert.der"
-            " -out caCert.pem -outform PEM"
-        )
-        self.ssh.run(
-            "openssl pkcs12 -in clientCert.pem -inkey  clientKey.pem"
-            " -certfile caCert.pem -export -out clientPkcs.p12 -passout pass:"
-        )
-
-        self.ssh.run("mv caCert.pem /etc/ipsec.d/cacerts/")
-        self.ssh.run("mv *Cert* /etc/ipsec.d/certs/")
-        self.ssh.run("mv *Key* /etc/ipsec.d/private/")
-        if not self.path_exists("/www/downloads/"):
-            self.ssh.run("mkdir /www/downloads/")
-        self.ssh.run("mv clientPkcs.p12 /www/downloads/")
-        self.ssh.run("chmod 664 /www/downloads/clientPkcs.p12")
-
-    def generate_ikev2_vpn_cert_keys(self, country, org):
-        rsa = "--type rsa"
-        lifetime = "--lifetime 365"
-        size = "--size 4096"
-
-        if not self.path_exists("/www/downloads/"):
-            self.ssh.run("mkdir /www/downloads/")
-
-        ikev2_vpn_cert_keys = [
-            "ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size),
-            "ipsec pki --self --ca %s --in caKey.der %s --dn "
-            "\"C=%s, O=%s, CN=%s\" --outform der > caCert.der" %
-            (lifetime, rsa, country, org, self.l2tp.hostname),
-            "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa),
-            "ipsec pki --pub --in serverKey.der %s | ipsec pki --issue %s "
-            r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s\" "
-            "--san %s --san %s --flag serverAuth --flag ikeIntermediate "
-            "--outform der > serverCert.der" % (rsa, lifetime, country, org,
-                                                self.l2tp.hostname, LOCALHOST,
-                                                self.l2tp.hostname),
-            "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa),
-            "ipsec pki --pub --in clientKey.der %s | ipsec pki --issue %s "
-            r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s@%s\" "
-            r"--san \"%s\" --san \"%s@%s\" --san \"%s@%s\" --outform der "
-            "> clientCert.der" % (rsa, lifetime, country, org, self.l2tp.username,
-                                  self.l2tp.hostname, self.l2tp.username,
-                                  self.l2tp.username, LOCALHOST,
-                                  self.l2tp.username, self.l2tp.hostname),
-            "openssl rsa -inform DER -in clientKey.der "
-            "-out clientKey.pem -outform PEM",
-            "openssl x509 -inform DER -in clientCert.der "
-            "-out clientCert.pem -outform PEM",
-            "openssl x509 -inform DER -in caCert.der "
-            "-out caCert.pem -outform PEM",
-            "openssl pkcs12 -in clientCert.pem -inkey  clientKey.pem "
-            "-certfile caCert.pem -export -out clientPkcs.p12 -passout pass:",
-            "mv caCert.pem /etc/ipsec.d/cacerts/",
-            "mv *Cert* /etc/ipsec.d/certs/",
-            "mv *Key* /etc/ipsec.d/private/",
-            "mv clientPkcs.p12 /www/downloads/",
-            "chmod 664 /www/downloads/clientPkcs.p12",
-        ]
-        file_string = "\n".join(ikev2_vpn_cert_keys)
-        self.create_config_file(file_string, IKEV2_VPN_CERT_KEYS_PATH)
-
-        self.ssh.run("chmod +x %s" % IKEV2_VPN_CERT_KEYS_PATH)
-        self.ssh.run("%s" % IKEV2_VPN_CERT_KEYS_PATH)
-
-    def update_firewall_rules_list(self):
-        """Update rule list in /etc/config/firewall."""
-        new_rules_list = []
-        for i in range(self.count("firewall", "rule")):
-            rule = self.ssh.run("uci get firewall.@rule[%s].name" % i).stdout
-            new_rules_list.append(rule)
-        self.firewall_rules_list = new_rules_list
-
-    def setup_firewall_rules_for_pptp(self):
-        """Setup firewall for vpn pptp server."""
-        self.update_firewall_rules_list()
-        if "pptpd" not in self.firewall_rules_list:
-            self.ssh.run("uci add firewall rule")
-            self.ssh.run("uci set firewall.@rule[-1].name='pptpd'")
-            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
-            self.ssh.run("uci set firewall.@rule[-1].proto='tcp'")
-            self.ssh.run("uci set firewall.@rule[-1].dest_port='1723'")
-            self.ssh.run("uci set firewall.@rule[-1].family='ipv4'")
-            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
-
-        if "GRP" not in self.firewall_rules_list:
-            self.ssh.run("uci add firewall rule")
-            self.ssh.run("uci set firewall.@rule[-1].name='GRP'")
-            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
-            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
-            self.ssh.run("uci set firewall.@rule[-1].proto='47'")
-
-        iptable_rules = list(network_const.FIREWALL_RULES_FOR_PPTP)
-        self.add_custom_firewall_rules(iptable_rules)
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-
-    def restore_firewall_rules_for_pptp(self):
-        """Restore firewall for vpn pptp server."""
-        self.update_firewall_rules_list()
-        if "pptpd" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("pptpd"))
-        self.update_firewall_rules_list()
-        if "GRP" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("GRP"))
-        self.remove_custom_firewall_rules()
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-
-    def setup_firewall_rules_for_l2tp(self):
-        """Setup firewall for vpn l2tp server."""
-        self.update_firewall_rules_list()
-        if "ipsec esp" not in self.firewall_rules_list:
-            self.ssh.run("uci add firewall rule")
-            self.ssh.run("uci set firewall.@rule[-1].name='ipsec esp'")
-            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
-            self.ssh.run("uci set firewall.@rule[-1].proto='esp'")
-            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
-
-        if "ipsec nat-t" not in self.firewall_rules_list:
-            self.ssh.run("uci add firewall rule")
-            self.ssh.run("uci set firewall.@rule[-1].name='ipsec nat-t'")
-            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
-            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
-            self.ssh.run("uci set firewall.@rule[-1].proto='udp'")
-            self.ssh.run("uci set firewall.@rule[-1].dest_port='4500'")
-
-        if "auth header" not in self.firewall_rules_list:
-            self.ssh.run("uci add firewall rule")
-            self.ssh.run("uci set firewall.@rule[-1].name='auth header'")
-            self.ssh.run("uci set firewall.@rule[-1].target='ACCEPT'")
-            self.ssh.run("uci set firewall.@rule[-1].src='wan'")
-            self.ssh.run("uci set firewall.@rule[-1].proto='ah'")
-
-        net_id = self.l2tp.address.rsplit(".", 1)[0]
-        iptable_rules = list(network_const.FIREWALL_RULES_FOR_L2TP)
-        iptable_rules.append("iptables -A FORWARD -s %s.0/24"
-                             "  -j ACCEPT" % net_id)
-        iptable_rules.append("iptables -t nat -A POSTROUTING"
-                             " -s %s.0/24 -o eth0.2 -j MASQUERADE" % net_id)
-
-        self.add_custom_firewall_rules(iptable_rules)
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-
-    def restore_firewall_rules_for_l2tp(self):
-        """Restore firewall for vpn l2tp server."""
-        self.update_firewall_rules_list()
-        if "ipsec esp" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("ipsec esp"))
-        self.update_firewall_rules_list()
-        if "ipsec nat-t" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("ipsec nat-t"))
-        self.update_firewall_rules_list()
-        if "auth header" in self.firewall_rules_list:
-            self.ssh.run("uci del firewall.@rule[%s]"
-                         % self.firewall_rules_list.index("auth header"))
-        self.remove_custom_firewall_rules()
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-
-    def add_custom_firewall_rules(self, rules):
-        """Backup current custom rules and replace with arguments.
-
-        Args:
-            rules: A list of iptable rules to apply.
-        """
-        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH+".backup"
-        if not self.file_exists(backup_file_path):
-            self.ssh.run("mv %s %s" % (FIREWALL_CUSTOM_OPTION_PATH,
-                                       backup_file_path))
-        for rule in rules:
-            self.ssh.run("echo %s >> %s" % (rule, FIREWALL_CUSTOM_OPTION_PATH))
-
-    def remove_custom_firewall_rules(self):
-        """Clean up and recover custom firewall rules."""
-        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH+".backup"
-        if self.file_exists(backup_file_path):
-            self.ssh.run("mv %s %s" % (backup_file_path,
-                                       FIREWALL_CUSTOM_OPTION_PATH))
-        else:
-            self.log.debug("Did not find %s" % backup_file_path)
-            self.ssh.run("echo "" > %s" % FIREWALL_CUSTOM_OPTION_PATH)
-
-    def disable_pptp_service(self):
-        """Disable pptp service."""
-        self.package_remove(PPTP_PACKAGE)
-
-    def setup_vpn_local_ip(self):
-        """Setup VPN Server local ip on OpenWrt for client ping verify."""
-        self.ssh.run("uci set network.lan2=interface")
-        self.ssh.run("uci set network.lan2.type=bridge")
-        self.ssh.run("uci set network.lan2.ifname=eth1.2")
-        self.ssh.run("uci set network.lan2.proto=static")
-        self.ssh.run("uci set network.lan2.ipaddr=\"%s\"" % self.l2tp.address)
-        self.ssh.run("uci set network.lan2.netmask=255.255.255.0")
-        self.ssh.run("uci set network.lan2=interface")
-        self.service_manager.reload(SERVICE_NETWORK)
-        self.commit_changes()
-
-    def remove_vpn_local_ip(self):
-        """Discard vpn local ip on OpenWrt."""
-        self.ssh.run("uci delete network.lan2")
-        self.service_manager.reload(SERVICE_NETWORK)
-        self.commit_changes()
-
-    def enable_ipv6(self):
-        """Enable ipv6 on OpenWrt."""
-        self.ssh.run("uci set network.lan.ipv6=1")
-        self.ssh.run("uci set network.wan.ipv6=1")
-        self.service_manager.enable("odhcpd")
-        self.service_manager.reload(SERVICE_NETWORK)
-        self.config.discard("disable_ipv6")
-        self.commit_changes()
-
-    def disable_ipv6(self):
-        """Disable ipv6 on OpenWrt."""
-        self.config.add("disable_ipv6")
-        self.ssh.run("uci set network.lan.ipv6=0")
-        self.ssh.run("uci set network.wan.ipv6=0")
-        self.service_manager.disable("odhcpd")
-        self.service_manager.reload(SERVICE_NETWORK)
-        self.commit_changes()
-
-    def setup_ipv6_bridge(self):
-        """Setup ipv6 bridge for client have ability to access network."""
-        self.config.add("setup_ipv6_bridge")
-
-        self.ssh.run("uci set dhcp.lan.dhcpv6=relay")
-        self.ssh.run("uci set dhcp.lan.ra=relay")
-        self.ssh.run("uci set dhcp.lan.ndp=relay")
-
-        self.ssh.run("uci set dhcp.wan6=dhcp")
-        self.ssh.run("uci set dhcp.wan6.dhcpv6=relay")
-        self.ssh.run("uci set dhcp.wan6.ra=relay")
-        self.ssh.run("uci set dhcp.wan6.ndp=relay")
-        self.ssh.run("uci set dhcp.wan6.master=1")
-        self.ssh.run("uci set dhcp.wan6.interface=wan6")
-
-        # Enable service
-        self.service_manager.need_restart(SERVICE_ODHCPD)
-        self.commit_changes()
-
-    def remove_ipv6_bridge(self):
-        """Discard ipv6 bridge on OpenWrt."""
-        if "setup_ipv6_bridge" in self.config:
-            self.config.discard("setup_ipv6_bridge")
-
-            self.ssh.run("uci set dhcp.lan.dhcpv6=server")
-            self.ssh.run("uci set dhcp.lan.ra=server")
-            self.ssh.run("uci delete dhcp.lan.ndp")
-
-            self.ssh.run("uci delete dhcp.wan6")
-
-            self.service_manager.need_restart(SERVICE_ODHCPD)
-            self.commit_changes()
-
-    def _add_dhcp_option(self, args):
-        self.ssh.run("uci add_list dhcp.lan.dhcp_option=\"%s\"" % args)
-
-    def _remove_dhcp_option(self, args):
-        self.ssh.run("uci del_list dhcp.lan.dhcp_option=\"%s\"" % args)
-
-    def add_default_dns(self, addr_list):
-        """Add default dns server for client.
-
-        Args:
-            addr_list: dns ip address for Openwrt client.
-        """
-        self._add_dhcp_option("6,%s" % ",".join(addr_list))
-        self.config.add("default_dns %s" % addr_list)
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-        self.commit_changes()
-
-    def del_default_dns(self, addr_list):
-        """Remove default dns server for client.
-
-        Args:
-            addr_list: list of dns ip address for Openwrt client.
-        """
-        self._remove_dhcp_option("6,%s" % addr_list)
-        self.config.discard("default_dns %s" % addr_list)
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-        self.commit_changes()
-
-    def add_default_v6_dns(self, addr_list):
-        """Add default v6 dns server for client.
-
-        Args:
-            addr_list: dns ip address for Openwrt client.
-        """
-        self.ssh.run("uci add_list dhcp.lan.dns=\"%s\"" % addr_list)
-        self.config.add("default_v6_dns %s" % addr_list)
-        self.service_manager.need_restart(SERVICE_ODHCPD)
-        self.commit_changes()
-
-    def del_default_v6_dns(self, addr_list):
-        """Del default v6 dns server for client.
-
-        Args:
-            addr_list: dns ip address for Openwrt client.
-        """
-        self.ssh.run("uci del_list dhcp.lan.dns=\"%s\"" % addr_list)
-        self.config.add("default_v6_dns %s" % addr_list)
-        self.service_manager.need_restart(SERVICE_ODHCPD)
-        self.commit_changes()
-
-    def add_ipv6_prefer_option(self):
-        self._add_dhcp_option("108,1800i")
-        self.config.add("ipv6_prefer_option")
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-        self.commit_changes()
-
-    def remove_ipv6_prefer_option(self):
-        self._remove_dhcp_option("108,1800i")
-        self.config.discard("ipv6_prefer_option")
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-        self.commit_changes()
-
-    def add_dhcp_rapid_commit(self):
-        self.create_config_file("dhcp-rapid-commit\n","/etc/dnsmasq.conf")
-        self.config.add("add_dhcp_rapid_commit")
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-        self.commit_changes()
-
-    def remove_dhcp_rapid_commit(self):
-        self.create_config_file("","/etc/dnsmasq.conf")
-        self.config.discard("add_dhcp_rapid_commit")
-        self.service_manager.need_restart(SERVICE_DNSMASQ)
-        self.commit_changes()
-
-    def start_tcpdump(self, test_name, args="", interface="br-lan"):
-        """"Start tcpdump on OpenWrt.
-
-        Args:
-            test_name: Test name for create tcpdump file name.
-            args: Option args for tcpdump.
-            interface: Interface to logging.
-        Returns:
-            tcpdump_file_name: tcpdump file name on OpenWrt.
-            pid: tcpdump process id.
-        """
-        self.package_install("tcpdump")
-        if not self.path_exists(TCPDUMP_DIR):
-            self.ssh.run("mkdir %s" % TCPDUMP_DIR)
-        tcpdump_file_name = "openwrt_%s_%s.pcap" % (test_name,
-                                                    time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())))
-        tcpdump_file_path = "".join([TCPDUMP_DIR, tcpdump_file_name])
-        cmd = "tcpdump -i %s -s0 %s -w %s" % (interface, args, tcpdump_file_path)
-        self.ssh.run_async(cmd)
-        pid = self._get_tcpdump_pid(tcpdump_file_name)
-        if not pid:
-            raise signals.TestFailure("Fail to start tcpdump on OpenWrt.")
-        # Set delay to prevent tcpdump fail to capture target packet.
-        time.sleep(15)
-        return tcpdump_file_name
-
-    def stop_tcpdump(self, tcpdump_file_name, pull_dir=None):
-        """Stop tcpdump on OpenWrt and pull the pcap file.
-
-        Args:
-            tcpdump_file_name: tcpdump file name on OpenWrt.
-            pull_dir: Keep none if no need to pull.
-        Returns:
-            tcpdump abs_path on host.
-        """
-        # Set delay to prevent tcpdump fail to capture target packet.
-        time.sleep(15)
-        pid = self._get_tcpdump_pid(tcpdump_file_name)
-        self.ssh.run("kill -9 %s" % pid, ignore_status=True)
-        if self.path_exists(TCPDUMP_DIR) and pull_dir:
-            tcpdump_path = "".join([TCPDUMP_DIR, tcpdump_file_name])
-            tcpdump_remote_path = "/".join([pull_dir, tcpdump_file_name])
-            tcpdump_local_path = "%s@%s:%s" % (self.user, self.ip, tcpdump_path)
-            utils.exe_cmd("scp %s %s" % (tcpdump_local_path, tcpdump_remote_path))
-
-        if self._get_tcpdump_pid(tcpdump_file_name):
-            raise signals.TestFailure("Failed to stop tcpdump on OpenWrt.")
-        if self.file_exists(tcpdump_path):
-            self.ssh.run("rm -f %s" % tcpdump_path)
-        return tcpdump_remote_path if pull_dir else None
-
-    def clear_tcpdump(self):
-        self.ssh.run("killall tcpdump", ignore_status=True)
-        if self.ssh.run("pgrep tcpdump", ignore_status=True).stdout:
-            raise signals.TestFailure("Failed to clean up tcpdump process.")
-        if self.path_exists(TCPDUMP_DIR):
-            self.ssh.run("rm -f  %s/*" % TCPDUMP_DIR)
-
-    def _get_tcpdump_pid(self, tcpdump_file_name):
-        """Check tcpdump process on OpenWrt."""
-        return self.ssh.run("pgrep -f %s" % (tcpdump_file_name), ignore_status=True).stdout
-
-    def setup_mdns(self):
-        self.config.add("setup_mdns")
-        self.package_install(MDNS_PACKAGE)
-        self.commit_changes()
-
-    def remove_mdns(self):
-        self.config.discard("setup_mdns")
-        self.package_remove(MDNS_PACKAGE)
-        self.commit_changes()
-
-    def block_dns_response(self):
-        self.config.add("block_dns_response")
-        iptable_rules = list(network_const.FIREWALL_RULES_DISABLE_DNS_RESPONSE)
-        self.add_custom_firewall_rules(iptable_rules)
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-        self.commit_changes()
-
-    def unblock_dns_response(self):
-        self.config.discard("block_dns_response")
-        self.remove_custom_firewall_rules()
-        self.service_manager.need_restart(SERVICE_FIREWALL)
-        self.commit_changes()
-
-    def setup_captive_portal(self, fas_fdqn,fas_port=2080):
-        """Create captive portal with Forwarding Authentication Service.
-
-        Args:
-             fas_fdqn: String for captive portal page's fdqn add to local dns server.
-             fas_port: Port for captive portal page.
-        """
-        self.package_install(CAPTIVE_PORTAL_PACKAGE)
-        self.config.add("setup_captive_portal %s" % fas_port)
-        self.ssh.run("uci set opennds.@opennds[0].fas_secure_enabled=2")
-        self.ssh.run("uci set opennds.@opennds[0].gatewayport=2050")
-        self.ssh.run("uci set opennds.@opennds[0].fasport=%s" % fas_port)
-        self.ssh.run("uci set opennds.@opennds[0].fasremotefqdn=%s" % fas_fdqn)
-        self.ssh.run("uci set opennds.@opennds[0].faspath=\"/nds/fas-aes.php\"")
-        self.ssh.run("uci set opennds.@opennds[0].faskey=1234567890")
-        self.service_manager.need_restart(SERVICE_OPENNDS)
-        # Config uhttpd
-        self.ssh.run("uci set uhttpd.main.interpreter=.php=/usr/bin/php-cgi")
-        self.ssh.run("uci add_list uhttpd.main.listen_http=0.0.0.0:%s" % fas_port)
-        self.ssh.run("uci add_list uhttpd.main.listen_http=[::]:%s" % fas_port)
-        self.service_manager.need_restart(SERVICE_UHTTPD)
-        # cp fas-aes.php
-        self.create_folder("/www/nds/")
-        self.ssh.run("cp /etc/opennds/fas-aes.php /www/nds")
-        # Add fdqn
-        self.add_resource_record(fas_fdqn, LOCALHOST)
-        self.commit_changes()
-
-    def remove_cpative_portal(self, fas_port=2080):
-        """Remove captive portal.
-
-        Args:
-             fas_port: Port for captive portal page.
-        """
-        # Remove package
-        self.package_remove(CAPTIVE_PORTAL_PACKAGE)
-        # Clean up config
-        self.ssh.run("rm /etc/config/opennds")
-        # Remove fdqn
-        self.clear_resource_record()
-        # Restore uhttpd
-        self.ssh.run("uci del uhttpd.main.interpreter")
-        self.ssh.run("uci del_list uhttpd.main.listen_http=\'0.0.0.0:%s\'" % fas_port)
-        self.ssh.run("uci del_list uhttpd.main.listen_http=\'[::]:%s\'" % fas_port)
-        self.service_manager.need_restart(SERVICE_UHTTPD)
-        # Clean web root
-        self.ssh.run("rm -r /www/nds")
-        self.config.discard("setup_captive_portal %s" % fas_port)
-        self.commit_changes()
-
-
-class ServiceManager(object):
-    """Class for service on OpenWrt.
-
-        Attributes:
-        ssh: ssh object for the AP.
-        _need_restart: Record service need to restart.
-    """
-
-    def __init__(self, ssh):
-        self.ssh = ssh
-        self._need_restart = set()
-
-    def enable(self, service_name):
-        """Enable service auto start."""
-        self.ssh.run("/etc/init.d/%s enable" % service_name)
-
-    def disable(self, service_name):
-        """Disable service auto start."""
-        self.ssh.run("/etc/init.d/%s disable" % service_name)
-
-    def restart(self, service_name):
-        """Restart the service."""
-        self.ssh.run("/etc/init.d/%s restart" % service_name)
-
-    def reload(self, service_name):
-        """Restart the service."""
-        self.ssh.run("/etc/init.d/%s reload" % service_name)
-
-    def restart_services(self):
-        """Restart all services need to restart."""
-        for service in self._need_restart:
-            if service == SERVICE_NETWORK:
-                self.reload(service)
-            self.restart(service)
-        self._need_restart = set()
-
-    def stop(self, service_name):
-        """Stop the service."""
-        self.ssh.run("/etc/init.d/%s stop" % service_name)
-
-    def need_restart(self, service_name):
-        self._need_restart.add(service_name)
diff --git a/src/antlion/controllers/openwrt_lib/openwrt_constants.py b/src/antlion/controllers/openwrt_lib/openwrt_constants.py
deleted file mode 100644
index 8fd7686..0000000
--- a/src/antlion/controllers/openwrt_lib/openwrt_constants.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-SYSTEM_INFO_CMD = "ubus call system board"
-
-
-class OpenWrtWifiSecurity:
-  # Used by OpenWrt AP
-  WPA_PSK_DEFAULT = "psk"
-  WPA_PSK_CCMP = "psk+ccmp"
-  WPA_PSK_TKIP = "psk+tkip"
-  WPA_PSK_TKIP_AND_CCMP = "psk+tkip+ccmp"
-  WPA2_PSK_DEFAULT = "psk2"
-  WPA2_PSK_CCMP = "psk2+ccmp"
-  WPA2_PSK_TKIP = "psk2+tkip"
-  WPA2_PSK_TKIP_AND_CCMP = "psk2+tkip+ccmp"
-
-
-class OpenWrtWifiSetting:
-  IFACE_2G = 2
-  IFACE_5G = 3
-
-
-class OpenWrtModelMap:
-  NETGEAR_R8000 = ("radio2", "radio1")
diff --git a/src/antlion/controllers/openwrt_lib/wireless_config.py b/src/antlion/controllers/openwrt_lib/wireless_config.py
deleted file mode 100644
index 7810fa2..0000000
--- a/src/antlion/controllers/openwrt_lib/wireless_config.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""Class for Wireless config."""
-
-NET_IFACE = "lan"
-
-
-class WirelessConfig(object):
-  """Creates an object to hold wireless config.
-
-  Attributes:
-    name: name of the wireless config
-    ssid: SSID of the network.
-    security: security of the wifi network.
-    band: band of the wifi network.
-    iface: network interface of the wifi network.
-    password: password for psk network.
-    wep_key: wep keys for wep network.
-    wep_key_num: key number for wep network.
-    radius_server_ip: IP address of radius server.
-    radius_server_port: Port number of radius server.
-    radius_server_secret: Secret key of radius server.
-    hidden: Boolean, if the wifi network is hidden.
-    ieee80211w: PMF bit of the wifi network.
-  """
-
-  def __init__(
-      self,
-      name,
-      ssid,
-      security,
-      band,
-      iface=NET_IFACE,
-      password=None,
-      wep_key=None,
-      wep_key_num=1,
-      radius_server_ip=None,
-      radius_server_port=None,
-      radius_server_secret=None,
-      hidden=False,
-      ieee80211w=None):
-    self.name = name
-    self.ssid = ssid
-    self.security = security
-    self.band = band
-    self.iface = iface
-    self.password = password
-    self.wep_key = wep_key
-    self.wep_key_num = wep_key_num
-    self.radius_server_ip = radius_server_ip
-    self.radius_server_port = radius_server_port
-    self.radius_server_secret = radius_server_secret
-    self.hidden = hidden
-    self.ieee80211w = ieee80211w
-
diff --git a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py b/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
deleted file mode 100644
index ad36482..0000000
--- a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
+++ /dev/null
@@ -1,154 +0,0 @@
-"""Class to configure wireless settings."""
-
-import time
-
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.openwrt_lib.network_settings import SERVICE_DNSMASQ
-from antlion.controllers.openwrt_lib.network_settings import ServiceManager
-
-
-LEASE_FILE = "/tmp/dhcp.leases"
-OPEN_SECURITY = "none"
-PSK1_SECURITY = "psk"
-PSK_SECURITY = "psk2"
-WEP_SECURITY = "wep"
-ENT_SECURITY = "wpa2"
-OWE_SECURITY = "owe"
-SAE_SECURITY = "sae"
-SAEMIXED_SECURITY = "sae-mixed"
-ENABLE_RADIO = "0"
-DISABLE_RADIO = "1"
-ENABLE_HIDDEN = "1"
-RADIO_2G = "radio1"
-RADIO_5G = "radio0"
-
-
-class WirelessSettingsApplier(object):
-  """Class for wireless settings.
-
-  Attributes:
-    ssh: ssh object for the AP.
-    service_manager: Object manage service configuration
-    wireless_configs: a list of
-      antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
-    channel_2g: channel for 2G band.
-    channel_5g: channel for 5G band.
-  """
-
-  def __init__(self, ssh, configs, channel_2g, channel_5g, radio_2g=RADIO_2G, radio_5g=RADIO_5G):
-    """Initialize wireless settings.
-
-    Args:
-      ssh: ssh connection object.
-      configs: a list of
-        antlion.controllers.openwrt_lib.wireless_config.WirelessConfig.
-      channel_2g: channel for 2G band.
-      channel_5g: channel for 5G band.
-    """
-    self.ssh = ssh
-    self.service_manager = ServiceManager(ssh)
-    self.wireless_configs = configs
-    self.channel_2g = channel_2g
-    self.channel_5g = channel_5g
-    self.radio_2g = radio_2g
-    self.radio_5g = radio_5g
-
-  def apply_wireless_settings(self):
-    """Configure wireless settings from a list of configs."""
-    default_2g_iface = "default_" + self.radio_2g
-    default_5g_iface = "default_" + self.radio_5g
-
-    # set channels for 2G and 5G bands
-    self.ssh.run("uci set wireless.%s.channel='%s'" % (self.radio_2g, self.channel_2g))
-    self.ssh.run("uci set wireless.%s.channel='%s'" % (self.radio_5g, self.channel_5g))
-    if self.channel_5g == 165:
-      self.ssh.run("uci set wireless.%s.htmode='VHT20'" % self.radio_5g)
-    elif self.channel_5g == 132 or self.channel_5g == 136:
-      self.ssh.run("iw reg set ZA")
-      self.ssh.run("uci set wireless.%s.htmode='VHT40'" % self.radio_5g)
-
-    if self.channel_2g == 13:
-      self.ssh.run("iw reg set AU")
-
-    # disable default OpenWrt SSID
-    self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                 (default_2g_iface, DISABLE_RADIO))
-    self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                 (default_5g_iface, DISABLE_RADIO))
-
-    # Enable radios
-    self.ssh.run("uci set wireless.%s.disabled='%s'" % (self.radio_2g, ENABLE_RADIO))
-    self.ssh.run("uci set wireless.%s.disabled='%s'" % (self.radio_5g, ENABLE_RADIO))
-
-    for config in self.wireless_configs:
-
-      # configure open network
-      if config.security == OPEN_SECURITY:
-        if config.band == hostapd_constants.BAND_2G:
-          self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                       (default_2g_iface, config.ssid))
-          self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                       (default_2g_iface, ENABLE_RADIO))
-          if config.hidden:
-            self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                         (default_2g_iface, ENABLE_HIDDEN))
-        elif config.band == hostapd_constants.BAND_5G:
-          self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                       (default_5g_iface, config.ssid))
-          self.ssh.run("uci set wireless.%s.disabled='%s'" %
-                       (default_5g_iface, ENABLE_RADIO))
-          if config.hidden:
-            self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                         (default_5g_iface, ENABLE_HIDDEN))
-        continue
-
-      self.ssh.run("uci set wireless.%s='wifi-iface'" % config.name)
-      if config.band == hostapd_constants.BAND_2G:
-        self.ssh.run("uci set wireless.%s.device='%s'" % (config.name, self.radio_2g))
-      else:
-        self.ssh.run("uci set wireless.%s.device='%s'" % (config.name, self.radio_5g))
-      self.ssh.run("uci set wireless.%s.network='%s'" %
-                   (config.name, config.iface))
-      self.ssh.run("uci set wireless.%s.mode='ap'" % config.name)
-      self.ssh.run("uci set wireless.%s.ssid='%s'" %
-                   (config.name, config.ssid))
-      self.ssh.run("uci set wireless.%s.encryption='%s'" %
-                   (config.name, config.security))
-      if config.security == PSK_SECURITY or config.security == SAE_SECURITY\
-              or config.security == PSK1_SECURITY\
-              or config.security == SAEMIXED_SECURITY:
-        self.ssh.run("uci set wireless.%s.key='%s'" %
-                     (config.name, config.password))
-      elif config.security == WEP_SECURITY:
-        self.ssh.run("uci set wireless.%s.key%s='%s'" %
-                     (config.name, config.wep_key_num, config.wep_key))
-        self.ssh.run("uci set wireless.%s.key='%s'" %
-                     (config.name, config.wep_key_num))
-      elif config.security == ENT_SECURITY:
-        self.ssh.run("uci set wireless.%s.auth_secret='%s'" %
-                     (config.name, config.radius_server_secret))
-        self.ssh.run("uci set wireless.%s.auth_server='%s'" %
-                     (config.name, config.radius_server_ip))
-        self.ssh.run("uci set wireless.%s.auth_port='%s'" %
-                     (config.name, config.radius_server_port))
-      if config.ieee80211w:
-        self.ssh.run("uci set wireless.%s.ieee80211w='%s'" %
-                     (config.name, config.ieee80211w))
-      if config.hidden:
-        self.ssh.run("uci set wireless.%s.hidden='%s'" %
-                     (config.name, ENABLE_HIDDEN))
-
-    self.ssh.run("uci commit wireless")
-    self.ssh.run("cp %s %s.tmp" % (LEASE_FILE, LEASE_FILE))
-
-  def cleanup_wireless_settings(self):
-    """Reset wireless settings to default."""
-    self.ssh.run("wifi down")
-    self.ssh.run("rm -f /etc/config/wireless")
-    self.ssh.run("wifi config")
-    if self.channel_5g == 132:
-      self.ssh.run("iw reg set US")
-    self.ssh.run("cp %s.tmp %s" % (LEASE_FILE, LEASE_FILE))
-    self.service_manager.restart(SERVICE_DNSMASQ)
-    time.sleep(9)
-
diff --git a/src/antlion/controllers/packet_capture.py b/src/antlion/controllers/packet_capture.py
deleted file mode 100755
index 706f9c4..0000000
--- a/src/antlion/controllers/packet_capture.py
+++ /dev/null
@@ -1,315 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion.controllers.ap_lib.hostapd_constants import AP_DEFAULT_CHANNEL_2G
-from antlion.controllers.ap_lib.hostapd_constants import AP_DEFAULT_CHANNEL_5G
-from antlion.controllers.ap_lib.hostapd_constants import CHANNEL_MAP
-from antlion.controllers.ap_lib.hostapd_constants import FREQUENCY_MAP
-from antlion.controllers.ap_lib.hostapd_constants import CENTER_CHANNEL_MAP
-from antlion.controllers.ap_lib.hostapd_constants import VHT_CHANNEL
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import formatter
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.libs.logging import log_stream
-from antlion.libs.proc.process import Process
-from antlion import asserts
-
-import os
-import threading
-import time
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'PacketCapture'
-ACTS_CONTROLLER_REFERENCE_NAME = 'packet_capture'
-BSS = 'BSS'
-BSSID = 'BSSID'
-FREQ = 'freq'
-FREQUENCY = 'frequency'
-LEVEL = 'level'
-MON_2G = 'mon0'
-MON_5G = 'mon1'
-BAND_IFACE = {'2G': MON_2G, '5G': MON_5G}
-SCAN_IFACE = 'wlan2'
-SCAN_TIMEOUT = 60
-SEP = ':'
-SIGNAL = 'signal'
-SSID = 'SSID'
-
-
-def create(configs):
-    return [PacketCapture(c) for c in configs]
-
-
-def destroy(pcaps):
-    for pcap in pcaps:
-        pcap.close()
-
-
-def get_info(pcaps):
-    return [pcap.ssh_settings.hostname for pcap in pcaps]
-
-
-class PcapProperties(object):
-    """Class to maintain packet capture properties after starting tcpdump.
-
-    Attributes:
-        proc: Process object of tcpdump
-        pcap_fname: File name of the tcpdump output file
-        pcap_file: File object for the tcpdump output file
-    """
-
-    def __init__(self, proc, pcap_fname, pcap_file):
-        """Initialize object."""
-        self.proc = proc
-        self.pcap_fname = pcap_fname
-        self.pcap_file = pcap_file
-
-
-class PacketCaptureError(Exception):
-    """Error related to Packet capture."""
-
-
-class PacketCapture(object):
-    """Class representing packet capturer.
-
-    An instance of this class creates and configures two interfaces for monitor
-    mode; 'mon0' for 2G and 'mon1' for 5G and one interface for scanning for
-    wifi networks; 'wlan2' which is a dual band interface.
-
-    Attributes:
-        pcap_properties: dict that specifies packet capture properties for a
-            band.
-    """
-
-    def __init__(self, configs):
-        """Initialize objects.
-
-        Args:
-            configs: config for the packet capture.
-        """
-        self.ssh_settings = settings.from_config(configs['ssh_config'])
-        self.ssh = connection.SshConnection(self.ssh_settings)
-        self.log = logger.create_logger(lambda msg: '[%s|%s] %s' % (
-            MOBLY_CONTROLLER_CONFIG_NAME, self.ssh_settings.hostname, msg))
-
-        self._create_interface(MON_2G, 'monitor')
-        self._create_interface(MON_5G, 'monitor')
-        self.managed_mode = True
-        result = self.ssh.run('ifconfig -a', ignore_status=True)
-        if result.stderr or SCAN_IFACE not in result.stdout:
-            self.managed_mode = False
-        if self.managed_mode:
-            self._create_interface(SCAN_IFACE, 'managed')
-
-        self.pcap_properties = dict()
-        self._pcap_stop_lock = threading.Lock()
-
-    def _create_interface(self, iface, mode):
-        """Create interface of monitor/managed mode.
-
-        Create mon0/mon1 for 2G/5G monitor mode and wlan2 for managed mode.
-        """
-        if mode == 'monitor':
-            self.ssh.run('ifconfig wlan%s down' % iface[-1],
-                         ignore_status=True)
-        self.ssh.run('iw dev %s del' % iface, ignore_status=True)
-        self.ssh.run('iw phy%s interface add %s type %s' %
-                     (iface[-1], iface, mode),
-                     ignore_status=True)
-        self.ssh.run('ip link set %s up' % iface, ignore_status=True)
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
-        if result.stderr or iface not in result.stdout:
-            raise PacketCaptureError('Failed to configure interface %s' %
-                                     iface)
-
-    def _cleanup_interface(self, iface):
-        """Clean up monitor mode interfaces."""
-        self.ssh.run('iw dev %s del' % iface, ignore_status=True)
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
-        if not result.stderr or 'No such device' not in result.stderr:
-            raise PacketCaptureError('Failed to cleanup monitor mode for %s' %
-                                     iface)
-
-    def _parse_scan_results(self, scan_result):
-        """Parses the scan dump output and returns list of dictionaries.
-
-        Args:
-            scan_result: scan dump output from scan on mon interface.
-
-        Returns:
-            Dictionary of found network in the scan.
-            The attributes returned are
-                a.) SSID - SSID of the network.
-                b.) LEVEL - signal level.
-                c.) FREQUENCY - WiFi band the network is on.
-                d.) BSSID - BSSID of the network.
-        """
-        scan_networks = []
-        network = {}
-        for line in scan_result.splitlines():
-            if SEP not in line:
-                continue
-            if BSS in line:
-                network[BSSID] = line.split('(')[0].split()[-1]
-            field, value = line.lstrip().rstrip().split(SEP)[0:2]
-            value = value.lstrip()
-            if SIGNAL in line:
-                network[LEVEL] = int(float(value.split()[0]))
-            elif FREQ in line:
-                network[FREQUENCY] = int(value)
-            elif SSID in line:
-                network[SSID] = value
-                scan_networks.append(network)
-                network = {}
-        return scan_networks
-
-    def get_wifi_scan_results(self):
-        """Starts a wifi scan on wlan2 interface.
-
-        Returns:
-            List of dictionaries each representing a found network.
-        """
-        if not self.managed_mode:
-            raise PacketCaptureError('Managed mode not setup')
-        result = self.ssh.run('iw dev %s scan' % SCAN_IFACE)
-        if result.stderr:
-            raise PacketCaptureError('Failed to get scan dump')
-        if not result.stdout:
-            return []
-        return self._parse_scan_results(result.stdout)
-
-    def start_scan_and_find_network(self, ssid):
-        """Start a wifi scan on wlan2 interface and find network.
-
-        Args:
-            ssid: SSID of the network.
-
-        Returns:
-            True/False if the network if found or not.
-        """
-        curr_time = time.time()
-        while time.time() < curr_time + SCAN_TIMEOUT:
-            found_networks = self.get_wifi_scan_results()
-            for network in found_networks:
-                if network[SSID] == ssid:
-                    return True
-            time.sleep(3)  # sleep before next scan
-        return False
-
-    def configure_monitor_mode(self, band, channel, bandwidth=20):
-        """Configure monitor mode.
-
-        Args:
-            band: band to configure monitor mode for.
-            channel: channel to set for the interface.
-            bandwidth : bandwidth for VHT channel as 40,80,160
-
-        Returns:
-            True if configure successful.
-            False if not successful.
-        """
-
-        band = band.upper()
-        if band not in BAND_IFACE:
-            self.log.error('Invalid band. Must be 2g/2G or 5g/5G')
-            return False
-
-        iface = BAND_IFACE[band]
-        if bandwidth == 20:
-            self.ssh.run('iw dev %s set channel %s' % (iface, channel),
-                         ignore_status=True)
-        else:
-            center_freq = None
-            for i, j in CENTER_CHANNEL_MAP[VHT_CHANNEL[bandwidth]]["channels"]:
-                if channel in range(i, j + 1):
-                    center_freq = (FREQUENCY_MAP[i] + FREQUENCY_MAP[j]) / 2
-                    break
-            asserts.assert_true(center_freq,
-                                "No match channel in VHT channel list.")
-            self.ssh.run(
-                'iw dev %s set freq %s %s %s' %
-                (iface, FREQUENCY_MAP[channel], bandwidth, center_freq),
-                ignore_status=True)
-
-        result = self.ssh.run('iw dev %s info' % iface, ignore_status=True)
-        if result.stderr or 'channel %s' % channel not in result.stdout:
-            self.log.error("Failed to configure monitor mode for %s" % band)
-            return False
-        return True
-
-    def start_packet_capture(self, band, log_path, pcap_fname):
-        """Start packet capture for band.
-
-        band = 2G starts tcpdump on 'mon0' interface.
-        band = 5G starts tcpdump on 'mon1' interface.
-
-        Args:
-            band: '2g' or '2G' and '5g' or '5G'.
-            log_path: test log path to save the pcap file.
-            pcap_fname: name of the pcap file.
-
-        Returns:
-            pcap_proc: Process object of the tcpdump.
-        """
-        band = band.upper()
-        if band not in BAND_IFACE.keys() or band in self.pcap_properties:
-            self.log.error("Invalid band or packet capture already running")
-            return None
-
-        pcap_name = '%s_%s.pcap' % (pcap_fname, band)
-        pcap_fname = os.path.join(log_path, pcap_name)
-        pcap_file = open(pcap_fname, 'w+b')
-
-        tcpdump_cmd = 'tcpdump -i %s -w - -U 2>/dev/null' % (BAND_IFACE[band])
-        cmd = formatter.SshFormatter().format_command(tcpdump_cmd,
-                                                      None,
-                                                      self.ssh_settings,
-                                                      extra_flags={'-q': None})
-        pcap_proc = Process(cmd)
-        pcap_proc.set_on_output_callback(lambda msg: pcap_file.write(msg),
-                                         binary=True)
-        pcap_proc.start()
-
-        self.pcap_properties[band] = PcapProperties(pcap_proc, pcap_fname,
-                                                    pcap_file)
-        return pcap_proc
-
-    def stop_packet_capture(self, proc):
-        """Stop the packet capture.
-
-        Args:
-            proc: Process object of tcpdump to kill.
-        """
-        for key, val in self.pcap_properties.items():
-            if val.proc is proc:
-                break
-        else:
-            self.log.error("Failed to stop tcpdump. Invalid process.")
-            return
-
-        proc.stop()
-        with self._pcap_stop_lock:
-            self.pcap_properties[key].pcap_file.close()
-            del self.pcap_properties[key]
-
-    def close(self):
-        """Cleanup.
-
-        Cleans up all the monitor mode interfaces and closes ssh connections.
-        """
-        self._cleanup_interface(MON_2G)
-        self._cleanup_interface(MON_5G)
-        self.ssh.close()
diff --git a/src/antlion/controllers/packet_sender.py b/src/antlion/controllers/packet_sender.py
deleted file mode 100644
index ddd988c..0000000
--- a/src/antlion/controllers/packet_sender.py
+++ /dev/null
@@ -1,935 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Collection of utility functions to generate and send custom packets.
-
-"""
-import logging
-import multiprocessing
-import socket
-import time
-
-import antlion.signals
-
-# http://www.secdev.org/projects/scapy/
-# On ubuntu, sudo pip3 install scapy
-import scapy.all as scapy
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'PacketSender'
-ACTS_CONTROLLER_REFERENCE_NAME = 'packet_senders'
-
-GET_FROM_LOCAL_INTERFACE = 'get_local'
-MAC_BROADCAST = 'ff:ff:ff:ff:ff:ff'
-IPV4_BROADCAST = '255.255.255.255'
-ARP_DST = '00:00:00:00:00:00'
-RA_MAC = '33:33:00:00:00:01'
-RA_IP = 'ff02::1'
-RA_PREFIX = 'd00d::'
-RA_PREFIX_LEN = 64
-DHCP_OFFER_OP = 2
-DHCP_OFFER_SRC_PORT = 67
-DHCP_OFFER_DST_PORT = 68
-DHCP_TRANS_ID = 0x01020304
-DNS_LEN = 3
-PING6_DATA = 'BEST PING6 EVER'
-PING4_TYPE = 8
-MDNS_TTL = 255
-MDNS_QTYPE = 'PTR'
-MDNS_UDP_PORT = 5353
-MDNS_V4_IP_DST = '224.0.0.251'
-MDNS_V4_MAC_DST = '01:00:5E:00:00:FB'
-MDNS_RECURSIVE = 1
-MDNS_V6_IP_DST = 'FF02::FB'
-MDNS_V6_MAC_DST = '33:33:00:00:00:FB'
-ETH_TYPE_IP = 2048
-SAP_SPANNING_TREE = 0x42
-SNAP_OUI = 12
-SNAP_SSAP = 170
-SNAP_DSAP = 170
-SNAP_CTRL = 3
-LLC_XID_CONTROL = 191
-PAD_LEN_BYTES = 128
-
-
-def create(configs):
-    """Creates PacketSender controllers from a json config.
-
-    Args:
-        The json configs that represent this controller
-
-    Returns:
-        A new PacketSender
-    """
-    return [PacketSender(c) for c in configs]
-
-
-def destroy(objs):
-    """Destroys a list of PacketSenders and stops sending (if active).
-
-    Args:
-        objs: A list of PacketSenders
-    """
-    for pkt_sender in objs:
-        pkt_sender.stop_sending(True)
-    return
-
-
-def get_info(objs):
-    """Get information on a list of packet senders.
-
-    Args:
-        objs: A list of PacketSenders
-
-    Returns:
-        Network interface name that is being used by each packet sender
-    """
-    return [pkt_sender.interface for pkt_sender in objs]
-
-
-class ThreadSendPacket(multiprocessing.Process):
-    """Creates a thread that keeps sending the same packet until a stop signal.
-
-    Attributes:
-        stop_signal: signal to stop the thread execution
-        packet: desired packet to keep sending
-        interval: interval between consecutive packets (s)
-        interface: network interface name (e.g., 'eth0')
-        log: object used for logging
-    """
-
-    def __init__(self, signal, packet, interval, interface, log):
-        multiprocessing.Process.__init__(self)
-        self.stop_signal = signal
-        self.packet = packet
-        self.interval = interval
-        self.interface = interface
-        self.log = log
-
-    def run(self):
-        self.log.info('Packet Sending Started.')
-        while True:
-            if self.stop_signal.is_set():
-                # Poison pill means shutdown
-                self.log.info('Packet Sending Stopped.')
-                break
-
-            try:
-                scapy.sendp(self.packet, iface=self.interface, verbose=0)
-                time.sleep(self.interval)
-            except Exception:
-                self.log.exception('Exception when trying to send packet')
-                return
-
-        return
-
-
-class PacketSenderError(antlion.signals.ControllerError):
-    """Raises exceptions encountered in packet sender lib."""
-
-
-class PacketSender(object):
-    """Send any custom packet over a desired interface.
-
-    Attributes:
-        log: class logging object
-        thread_active: indicates whether or not the send thread is active
-        thread_send: thread object for the concurrent packet transmissions
-        stop_signal: event to stop the thread
-        interface: network interface name (e.g., 'eth0')
-    """
-
-    def __init__(self, ifname):
-        """Initiallize the PacketGenerator class.
-
-        Args:
-            ifname: network interface name that will be used packet generator
-        """
-        self.log = logging.getLogger()
-        self.packet = None
-        self.thread_active = False
-        self.thread_send = None
-        self.stop_signal = multiprocessing.Event()
-        self.interface = ifname
-
-    def send_ntimes(self, packet, ntimes, interval):
-        """Sends a packet ntimes at a given interval.
-
-        Args:
-            packet: custom built packet from Layer 2 up to Application layer
-            ntimes: number of packets to send
-            interval: interval between consecutive packet transmissions (s)
-        """
-        if packet is None:
-            raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
-
-        for _ in range(ntimes):
-            try:
-                scapy.sendp(packet, iface=self.interface, verbose=0)
-                time.sleep(interval)
-            except socket.error as excpt:
-                self.log.exception('Caught socket exception : %s' % excpt)
-                return
-
-    def send_receive_ntimes(self, packet, ntimes, interval):
-        """Sends a packet and receives the reply ntimes at a given interval.
-
-        Args:
-            packet: custom built packet from Layer 2 up to Application layer
-            ntimes: number of packets to send
-            interval: interval between consecutive packet transmissions and
-                      the corresponding reply (s)
-        """
-        if packet is None:
-            raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
-
-        for _ in range(ntimes):
-            try:
-                scapy.srp1(
-                    packet, iface=self.interface, timeout=interval, verbose=0)
-                time.sleep(interval)
-            except socket.error as excpt:
-                self.log.exception('Caught socket exception : %s' % excpt)
-                return
-
-    def start_sending(self, packet, interval):
-        """Sends packets in parallel with the main process.
-
-        Creates a thread and keeps sending the same packet at a given interval
-        until a stop signal is received
-
-        Args:
-            packet: custom built packet from Layer 2 up to Application layer
-            interval: interval between consecutive packets (s)
-        """
-        if packet is None:
-            raise PacketSenderError(
-                'There is no packet to send. Create a packet first.')
-
-        if self.thread_active:
-            raise PacketSenderError(
-                ('There is already an active thread. Stop it'
-                 'before starting another transmission.'))
-
-        self.thread_send = ThreadSendPacket(self.stop_signal, packet, interval,
-                                            self.interface, self.log)
-        self.thread_send.start()
-        self.thread_active = True
-
-    def stop_sending(self, ignore_status=False):
-        """Stops the concurrent thread that is continuously sending packets.
-
-       """
-        if not self.thread_active:
-            if ignore_status:
-                return
-            else:
-                raise PacketSenderError(
-                    'Error: There is no acive thread running to stop.')
-
-        # Stop thread
-        self.stop_signal.set()
-        self.thread_send.join()
-
-        # Just as precaution
-        if self.thread_send.is_alive():
-            self.thread_send.terminate()
-            self.log.warning('Packet Sending forced to terminate')
-
-        self.stop_signal.clear()
-        self.thread_send = None
-        self.thread_active = False
-
-
-class ArpGenerator(object):
-    """Creates a custom ARP packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-        dst_ipv4: IPv4 address (Layer 3) of the destination node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: a dictionary with all the necessary packet fields.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv4 = scapy.get_if_addr(interf)
-        else:
-            self.src_ipv4 = config_params['src_ipv4']
-
-    def generate(self,
-                 op='who-has',
-                 ip_dst=None,
-                 ip_src=None,
-                 hwsrc=None,
-                 hwdst=None,
-                 eth_dst=None):
-        """Generates a custom ARP packet.
-
-        Args:
-            op: ARP type (request or reply)
-            ip_dst: ARP ipv4 destination (Optional)
-            ip_src: ARP ipv4 source address (Optional)
-            hwsrc: ARP hardware source address (Optional)
-            hwdst: ARP hardware destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Create IP layer
-        hw_src = (hwsrc if hwsrc is not None else self.src_mac)
-        hw_dst = (hwdst if hwdst is not None else ARP_DST)
-        ipv4_dst = (ip_dst if ip_dst is not None else self.dst_ipv4)
-        ipv4_src = (ip_src if ip_src is not None else self.src_ipv4)
-        ip4 = scapy.ARP(
-            op=op, pdst=ipv4_dst, psrc=ipv4_src, hwdst=hw_dst, hwsrc=hw_src)
-
-        # Create Ethernet layer
-        mac_dst = (eth_dst if eth_dst is not None else MAC_BROADCAST)
-        ethernet = scapy.Ether(src=self.src_mac, dst=mac_dst)
-
-        self.packet = ethernet / ip4
-        return self.packet
-
-
-class DhcpOfferGenerator(object):
-    """Creates a custom DHCP offer packet
-
-    Attributes:
-        packet: desired built custom packet
-        subnet_mask: local network subnet mask
-        src_mac: MAC address (Layer 2) of the source node
-        dst_mac: MAC address (Layer 2) of the destination node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-        dst_ipv4: IPv4 address (Layer 3) of the destination node
-        gw_ipv4: IPv4 address (Layer 3) of the Gateway
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        self.subnet_mask = config_params['subnet_mask']
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv4 = scapy.get_if_addr(interf)
-        else:
-            self.src_ipv4 = config_params['src_ipv4']
-
-        self.gw_ipv4 = config_params['gw_ipv4']
-
-    def generate(self, cha_mac=None, dst_ip=None):
-        """Generates a DHCP offer packet.
-
-        Args:
-            cha_mac: hardware target address for DHCP offer (Optional)
-            dst_ip: ipv4 address of target host for renewal (Optional)
-        """
-
-        # Create DHCP layer
-        dhcp = scapy.DHCP(options=[
-            ('message-type', 'offer'),
-            ('subnet_mask', self.subnet_mask),
-            ('server_id', self.src_ipv4),
-            ('end'),
-        ])
-
-        # Overwrite standard DHCP fields
-        sta_hw = (cha_mac if cha_mac is not None else self.dst_mac)
-        sta_ip = (dst_ip if dst_ip is not None else self.dst_ipv4)
-
-        # Create Boot
-        bootp = scapy.BOOTP(
-            op=DHCP_OFFER_OP,
-            yiaddr=sta_ip,
-            siaddr=self.src_ipv4,
-            giaddr=self.gw_ipv4,
-            chaddr=scapy.mac2str(sta_hw),
-            xid=DHCP_TRANS_ID)
-
-        # Create UDP
-        udp = scapy.UDP(sport=DHCP_OFFER_SRC_PORT, dport=DHCP_OFFER_DST_PORT)
-
-        # Create IP layer
-        ip4 = scapy.IP(src=self.src_ipv4, dst=IPV4_BROADCAST)
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(dst=MAC_BROADCAST, src=self.src_mac)
-
-        self.packet = ethernet / ip4 / udp / bootp / dhcp
-        return self.packet
-
-
-class NsGenerator(object):
-    """Creates a custom Neighbor Solicitation (NS) packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
-        src_ipv6: IPv6 address (Layer 3) of the source node
-        dst_ipv6: IPv6 address (Layer 3) of the destination node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-        self.dst_ipv6 = config_params['dst_ipv6']
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
-        else:
-            self.src_ipv6 = config_params['src_ipv6']
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a Neighbor Solicitation (NS) packet (ICMP over IPv6).
-
-        Args:
-            ip_dst: NS ipv6 destination (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Compute IP addresses
-        target_ip6 = ip_dst if ip_dst is not None else self.dst_ipv6
-        ndst_ip = socket.inet_pton(socket.AF_INET6, target_ip6)
-        nnode_mcast = scapy.in6_getnsma(ndst_ip)
-        node_mcast = socket.inet_ntop(socket.AF_INET6, nnode_mcast)
-        # Compute MAC addresses
-        hw_dst = (eth_dst
-                  if eth_dst is not None else scapy.in6_getnsmac(nnode_mcast))
-
-        # Create IPv6 layer
-        base = scapy.IPv6(dst=node_mcast, src=self.src_ipv6)
-        neighbor_solicitation = scapy.ICMPv6ND_NS(tgt=target_ip6)
-        src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac)
-        ip6 = base / neighbor_solicitation / src_ll_addr
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
-
-        self.packet = ethernet / ip6
-        return self.packet
-
-
-class RaGenerator(object):
-    """Creates a custom Router Advertisement (RA) packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
-        src_ipv6: IPv6 address (Layer 3) of the source node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
-        else:
-            self.src_ipv6 = config_params['src_ipv6']
-
-    def generate(self,
-                 lifetime,
-                 enableDNS=False,
-                 dns_lifetime=0,
-                 ip_dst=None,
-                 eth_dst=None):
-        """Generates a Router Advertisement (RA) packet (ICMP over IPv6).
-
-        Args:
-            lifetime: RA lifetime
-            enableDNS: Add RDNSS option to RA (Optional)
-            dns_lifetime: Set DNS server lifetime (Optional)
-            ip_dst: IPv6 destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Overwrite standard fields if desired
-        ip6_dst = (ip_dst if ip_dst is not None else RA_IP)
-        hw_dst = (eth_dst if eth_dst is not None else RA_MAC)
-
-        # Create IPv6 layer
-        base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
-        router_solicitation = scapy.ICMPv6ND_RA(routerlifetime=lifetime)
-        src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac)
-        prefix = scapy.ICMPv6NDOptPrefixInfo(
-            prefixlen=RA_PREFIX_LEN, prefix=RA_PREFIX)
-        if enableDNS:
-            rndss = scapy.ICMPv6NDOptRDNSS(
-                lifetime=dns_lifetime, dns=[self.src_ipv6], len=DNS_LEN)
-            ip6 = base / router_solicitation / src_ll_addr / prefix / rndss
-        else:
-            ip6 = base / router_solicitation / src_ll_addr / prefix
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
-
-        self.packet = ethernet / ip6
-        return self.packet
-
-
-class Ping6Generator(object):
-    """Creates a custom Ping v6 packet (i.e., ICMP over IPv6)
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        dst_mac: MAC address (Layer 2) of the destination node
-        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
-        src_ipv6: IPv6 address (Layer 3) of the source node
-        dst_ipv6: IPv6 address (Layer 3) of the destination node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-        self.dst_ipv6 = config_params['dst_ipv6']
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
-        else:
-            self.src_ipv6 = config_params['src_ipv6']
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a Ping6 packet (i.e., Echo Request)
-
-        Args:
-            ip_dst: IPv6 destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Overwrite standard fields if desired
-        ip6_dst = (ip_dst if ip_dst is not None else self.dst_ipv6)
-        hw_dst = (eth_dst if eth_dst is not None else self.dst_mac)
-
-        # Create IPv6 layer
-        base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
-        echo_request = scapy.ICMPv6EchoRequest(data=PING6_DATA)
-
-        ip6 = base / echo_request
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
-
-        self.packet = ethernet / ip6
-        return self.packet
-
-
-class Ping4Generator(object):
-    """Creates a custom Ping v4 packet (i.e., ICMP over IPv4)
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        dst_mac: MAC address (Layer 2) of the destination node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-        dst_ipv4: IPv4 address (Layer 3) of the destination node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-        self.dst_ipv4 = config_params['dst_ipv4']
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv4 = scapy.get_if_addr(interf)
-        else:
-            self.src_ipv4 = config_params['src_ipv4']
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a Ping4 packet (i.e., Echo Request)
-
-        Args:
-            ip_dst: IP destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-
-        # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else self.dst_ipv4)
-        sta_hw = (eth_dst if eth_dst is not None else self.dst_mac)
-
-        # Create IPv6 layer
-        base = scapy.IP(src=self.src_ipv4, dst=sta_ip)
-        echo_request = scapy.ICMP(type=PING4_TYPE)
-
-        ip4 = base / echo_request
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
-
-        self.packet = ethernet / ip4
-        return self.packet
-
-
-class Mdns6Generator(object):
-    """Creates a custom mDNS IPv6 packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
-        src_ipv6: IPv6 address (Layer 3) of the source node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-        self.src_ipv6_type = config_params['src_ipv6_type']
-        if config_params['src_ipv6'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
-        else:
-            self.src_ipv6 = config_params['src_ipv6']
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a mDNS v6 packet for multicast DNS config
-
-        Args:
-            ip_dst: IPv6 destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-
-        # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else MDNS_V6_IP_DST)
-        sta_hw = (eth_dst if eth_dst is not None else MDNS_V6_MAC_DST)
-
-        # Create mDNS layer
-        qdServer = scapy.DNSQR(qname=self.src_ipv6, qtype=MDNS_QTYPE)
-        mDNS = scapy.DNS(rd=MDNS_RECURSIVE, qd=qdServer)
-
-        # Create UDP
-        udp = scapy.UDP(sport=MDNS_UDP_PORT, dport=MDNS_UDP_PORT)
-
-        # Create IP layer
-        ip6 = scapy.IPv6(src=self.src_ipv6, dst=sta_ip)
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
-
-        self.packet = ethernet / ip6 / udp / mDNS
-        return self.packet
-
-
-class Mdns4Generator(object):
-    """Creates a custom mDNS v4 packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-        if config_params['src_ipv4'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv4 = scapy.get_if_addr(interf)
-        else:
-            self.src_ipv4 = config_params['src_ipv4']
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a mDNS v4 packet for multicast DNS config
-
-        Args:
-            ip_dst: IP destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-
-        # Overwrite standard fields if desired
-        sta_ip = (ip_dst if ip_dst is not None else MDNS_V4_IP_DST)
-        sta_hw = (eth_dst if eth_dst is not None else MDNS_V4_MAC_DST)
-
-        # Create mDNS layer
-        qdServer = scapy.DNSQR(qname=self.src_ipv4, qtype=MDNS_QTYPE)
-        mDNS = scapy.DNS(rd=MDNS_RECURSIVE, qd=qdServer)
-
-        # Create UDP
-        udp = scapy.UDP(sport=MDNS_UDP_PORT, dport=MDNS_UDP_PORT)
-
-        # Create IP layer
-        ip4 = scapy.IP(src=self.src_ipv4, dst=sta_ip, ttl=255)
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
-
-        self.packet = ethernet / ip4 / udp / mDNS
-        return self.packet
-
-
-class Dot3Generator(object):
-    """Creates a custom 802.3 Ethernet Frame
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params['interf']
-        self.packet = None
-        self.dst_mac = config_params['dst_mac']
-        if config_params['src_mac'] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params['src_mac']
-
-    def _build_ether(self, eth_dst=None):
-        """Creates the basic frame for 802.3
-
-        Args:
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Overwrite standard fields if desired
-        sta_hw = (eth_dst if eth_dst is not None else self.dst_mac)
-        # Create Ethernet layer
-        dot3_base = scapy.Dot3(src=self.src_mac, dst=sta_hw)
-
-        return dot3_base
-
-    def _pad_frame(self, frame):
-        """Pads the frame with default length and values
-
-        Args:
-            frame: Ethernet (layer 2) to be padded
-        """
-        frame.len = PAD_LEN_BYTES
-        pad = scapy.Padding()
-        pad.load = '\x00' * PAD_LEN_BYTES
-        return frame / pad
-
-    def generate(self, eth_dst=None):
-        """Generates the basic 802.3 frame and adds padding
-
-        Args:
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Create 802.3 Base
-        ethernet = self._build_ether(eth_dst)
-
-        self.packet = self._pad_frame(ethernet)
-        return self.packet
-
-    def generate_llc(self, eth_dst=None, dsap=2, ssap=3, ctrl=LLC_XID_CONTROL):
-        """Generates the 802.3 frame with LLC and adds padding
-
-        Args:
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-            dsap: Destination Service Access Point (Optional)
-            ssap: Source Service Access Point (Optional)
-            ctrl: Control (Optional)
-        """
-        # Create 802.3 Base
-        ethernet = self._build_ether(eth_dst)
-
-        # Create LLC layer
-        llc = scapy.LLC(dsap=dsap, ssap=ssap, ctrl=ctrl)
-
-        # Append and create packet
-        self.packet = self._pad_frame(ethernet / llc)
-        return self.packet
-
-    def generate_snap(self,
-                      eth_dst=None,
-                      dsap=SNAP_DSAP,
-                      ssap=SNAP_SSAP,
-                      ctrl=SNAP_CTRL,
-                      oui=SNAP_OUI,
-                      code=ETH_TYPE_IP):
-        """Generates the 802.3 frame with LLC and SNAP and adds padding
-
-        Args:
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-            dsap: Destination Service Access Point (Optional)
-            ssap: Source Service Access Point (Optional)
-            ctrl: Control (Optional)
-            oid: Protocol Id or Org Code (Optional)
-            code: EtherType (Optional)
-        """
-        # Create 802.3 Base
-        ethernet = self._build_ether(eth_dst)
-
-        # Create 802.2 LLC header
-        llc = scapy.LLC(dsap=dsap, ssap=ssap, ctrl=ctrl)
-
-        # Create 802.3 SNAP header
-        snap = scapy.SNAP(OUI=oui, code=code)
-
-        # Append and create packet
-        self.packet = self._pad_frame(ethernet / llc / snap)
-        return self.packet
-
-
-def get_if_addr6(intf, address_type):
-    """Returns the Ipv6 address from a given local interface.
-
-    Returns the desired IPv6 address from the interface 'intf' in human
-    readable form. The address type is indicated by the IPv6 constants like
-    IPV6_ADDR_LINKLOCAL, IPV6_ADDR_GLOBAL, etc. If no address is found,
-    None is returned.
-
-    Args:
-        intf: desired interface name
-        address_type: addrees typle like LINKLOCAL or GLOBAL
-
-    Returns:
-        Ipv6 address of the specified interface in human readable format
-    """
-    for if_list in scapy.in6_getifaddr():
-        if if_list[2] == intf and if_list[1] == address_type:
-            return if_list[0]
-
-    return None
-
diff --git a/src/antlion/controllers/pdu.py b/src/antlion/controllers/pdu.py
deleted file mode 100644
index d167afa..0000000
--- a/src/antlion/controllers/pdu.py
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import importlib
-import logging
-
-from antlion import tracelogger
-
-MOBLY_CONTROLLER_CONFIG_NAME = 'PduDevice'
-ACTS_CONTROLLER_REFERENCE_NAME = 'pdu_devices'
-
-
-def create(configs):
-    """Creates a PduDevice for each config in configs.
-
-    Args:
-        configs: List of configs from PduDevice field.
-            Fields:
-                device: a string "<brand>.<model>" that corresponds to module
-                    in pdu_lib/
-                host: a string of the device ip address
-                username (optional): a string of the username for device sign-in
-                password (optional): a string of the password for device sign-in
-    Return:
-        A list of PduDevice objects.
-    """
-    if configs:
-        pdus = []
-        for config in configs:
-            device = config.get('device')
-            if not device:
-                raise PduError("Config must provide a device")
-
-            host = config.get('host')
-            if not device:
-                raise PduError("Config must provide a host ip address")
-            username = config.get('username')
-            password = config.get('password')
-            pdu = _create_device(device, host, username, password)
-            pdus.append(pdu)
-        return pdus
-
-
-def destroy(pdu_list):
-    """Ensure any connections to devices are closed.
-
-    Args:
-        pdu_list: A list of PduDevice objects.
-    """
-    for pdu in pdu_list:
-        pdu.close()
-
-
-def get_info(pdu_list):
-    """Retrieves info from a list of PduDevice objects.
-
-    Args:
-        pdu_list: A list of PduDevice objects.
-    Return:
-        A list containing a dictionary for each PduDevice, with keys:
-            'host': a string of the device ip address
-            'username': a string of the username
-            'password': a string of the password
-    """
-    info = []
-    for pdu in pdu_list:
-        info.append({
-            'host': pdu.host,
-            'username': pdu.username,
-            'password': pdu.password
-        })
-    return info
-
-
-def _create_device(device, host, username, password):
-    """Factory method that returns an instance of PduDevice implementation
-    based on the device string.
-    """
-    module_name = 'antlion.controllers.pdu_lib.' + device
-    module = importlib.import_module(module_name)
-    return module.PduDevice(host, username, password)
-
-
-def get_pdu_port_for_device(device_pdu_config, pdus):
-    """Retrieves the pdu object and port of that PDU powering a given device.
-    This is especially necessary when there are multilpe devices on a single PDU
-    or multiple PDUs registered.
-
-    Args:
-        device_pdu_config: a dict, representing the config of the device.
-        pdus: a list of registered PduDevice objects.
-
-    Returns:
-        A tuple: (PduObject for the device, string port number on that PDU).
-
-    Raises:
-        ValueError, if there is no PDU matching the given host in the config.
-
-    Example ACTS config:
-        ...
-        "testbed": [
-            ...
-            "FuchsiaDevice": [
-                {
-                    "ip": "<device_ip>",
-                    "ssh_config": "/path/to/sshconfig",
-                    "PduDevice": {
-                        "host": "192.168.42.185",
-                        "port": 2
-                    }
-                }
-            ],
-            "AccessPoint": [
-                {
-                    "ssh_config": {
-                        ...
-                    },
-                    "PduDevice": {
-                        "host": "192.168.42.185",
-                        "port" 1
-                    }
-                }
-            ],
-            "PduDevice": [
-                {
-                    "device": "synaccess.np02b",
-                    "host": "192.168.42.185"
-                }
-            ]
-        ],
-        ...
-    """
-    pdu_ip = device_pdu_config['host']
-    port = device_pdu_config['port']
-    for pdu in pdus:
-        if pdu.host == pdu_ip:
-            return pdu, port
-    raise ValueError('No PduDevice with host: %s' % pdu_ip)
-
-
-class PduDevice(object):
-    """An object that defines the basic Pdu functionality and abstracts
-    the actual hardware.
-
-    This is a pure abstract class. Implementations should be of the same
-    class name (eg. class PduDevice(pdu.PduDevice)) and exist in
-    pdu_lib/<brand>/<device_name>.py. PduDevice objects should not be
-    instantiated by users directly.
-    """
-
-    def __init__(self, host, username, password):
-        if type(self) is PduDevice:
-            raise NotImplementedError(
-                "Base class: cannot be instantiated directly")
-        self.host = host
-        self.username = username
-        self.password = password
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-
-    def on_all(self):
-        """Turns on all outlets on the device."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def off_all(self):
-        """Turns off all outlets on the device."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def on(self, outlet):
-        """Turns on specific outlet on the device.
-        Args:
-            outlet: a string of the outlet to turn on.
-        """
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def off(self, outlet):
-        """Turns off specific outlet on the device.
-        Args:
-            outlet: a string of the outlet to turn off.
-        """
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def reboot(self, outlet):
-        """Toggles a specific outlet on the device to off, then to on.
-        Args:
-            outlet: a string of the outlet to reboot.
-        """
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def status(self):
-        """Retrieves the status of the outlets on the device.
-
-        Return:
-            A dictionary matching outlet string to:
-                True: if outlet is On
-                False: if outlet is Off
-        """
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def close(self):
-        """Closes connection to the device."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-
-class PduError(Exception):
-    """An exception for use within PduDevice implementations"""
diff --git a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py b/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
deleted file mode 100644
index 8a4c6d4..0000000
--- a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import signals
-from antlion.controllers import pdu
-
-# Create an optional dependency for dlipower since it has a transitive
-# dependency on beautifulsoup4. This library is difficult to maintain as a
-# third_party dependency in Fuchsia since it is hosted on launchpad.
-#
-# TODO(b/246999212): Explore alternatives to the dlipower package
-try:
-    import dlipower
-    HAS_IMPORT_DLIPOWER = True
-except ImportError:
-    HAS_IMPORT_DLIPOWER = False
-
-
-class PduDevice(pdu.PduDevice):
-    """Implementation of pure abstract PduDevice object for the Digital Loggers
-    WebPowerSwitch PDUs.
-
-    This controller supports the following Digital Loggers PDUs:
-        - Pro (VII)
-        - WebPowerSwitch V
-        - WebPowerSwitch IV
-        - WebPowerSwitch III
-        - WebPowerSwitch II
-        - Ethernet Power Controller III
-    """
-
-    def __init__(self, host, username, password):
-        """
-        Note: This may require allowing plaintext password sign in on the
-        power switch, which can be configure in the device's control panel.
-        """
-        super(PduDevice, self).__init__(host, username, password)
-
-        if not HAS_IMPORT_DLIPOWER:
-            raise signals.ControllerError(
-                'Digital Loggers PDUs are not supported with current installed '
-                'packages; install the dlipower package to add support')
-
-        self.power_switch = dlipower.PowerSwitch(hostname=host,
-                                                 userid=username,
-                                                 password=password)
-        # Connection is made at command execution, this verifies the device
-        # can be reached before continuing.
-        if not self.power_switch.statuslist():
-            raise pdu.PduError(
-                'Failed to connect get WebPowerSwitch status. Incorrect host, '
-                'userid, or password?')
-        else:
-            self.log.info('Connected to WebPowerSwitch (%s).' % host)
-
-    def on_all(self):
-        """Turn on power to all outlets."""
-        for outlet in self.power_switch:
-            outlet.on()
-            self._verify_state(outlet.name, 'ON')
-
-    def off_all(self):
-        """Turn off power to all outlets."""
-        for outlet in self.power_switch:
-            outlet.off()
-            self._verify_state(outlet.name, 'OFF')
-
-    def on(self, outlet):
-        """Turn on power to given outlet
-
-        Args:
-            outlet: string or int, the outlet name/number
-        """
-        self.power_switch.command_on_outlets('on', str(outlet))
-        self._verify_state(outlet, 'ON')
-
-    def off(self, outlet):
-        """Turn off power to given outlet
-
-        Args:
-            outlet: string or int, the outlet name/number
-        """
-        self.power_switch.command_on_outlets('off', str(outlet))
-        self._verify_state(outlet, 'OFF')
-
-    def reboot(self, outlet):
-        """Cycle the given outlet to OFF and back ON.
-
-        Args:
-            outlet: string or int, the outlet name/number
-        """
-        self.power_switch.command_on_outlets('cycle', str(outlet))
-        self._verify_state(outlet, 'ON')
-
-    def status(self):
-        """ Return the status of the switch outlets.
-
-        Return:
-            a dict mapping outlet string numbers to:
-                True if outlet is ON
-                False if outlet is OFF
-        """
-        status_list = self.power_switch.statuslist()
-        return {str(outlet): state == 'ON' for outlet, _, state in status_list}
-
-    def close(self):
-        # Since there isn't a long-running connection, close is not applicable.
-        pass
-
-    def _verify_state(self, outlet, expected_state, timeout=3):
-        """Verify that the state of a given outlet is at an expected state.
-        There can be a slight delay in when the device receives the
-        command and when the state actually changes (especially when powering
-        on). This function is used to verify the change has occurred before
-        exiting.
-
-        Args:
-            outlet: string, the outlet name or number to check state.
-            expected_state: string, 'ON' or 'OFF'
-
-        Returns if actual state reaches expected state.
-
-        Raises:
-            PduError: if state has not reached expected state at timeout.
-        """
-        for _ in range(timeout):
-            actual_state = self.power_switch.status(str(outlet))
-            if actual_state == expected_state:
-                return
-            else:
-                self.log.debug('Outlet %s not yet in state %s' %
-                               (outlet, expected_state))
-        raise pdu.PduError(
-            'Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n'
-            'Expected State: %s\n'
-            'Actual State: %s' %
-            (outlet, self.host, expected_state, actual_state))
diff --git a/src/antlion/controllers/pdu_lib/synaccess/np02b.py b/src/antlion/controllers/pdu_lib/synaccess/np02b.py
deleted file mode 100644
index ccc7e74..0000000
--- a/src/antlion/controllers/pdu_lib/synaccess/np02b.py
+++ /dev/null
@@ -1,184 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-from antlion.controllers import pdu
-
-import re
-import telnetlib
-import time
-
-
-class PduDevice(pdu.PduDevice):
-    """Implementation of pure abstract PduDevice object for the Synaccess np02b
-    Pdu.
-    """
-    def __init__(self, host, username, password):
-        super(PduDevice, self).__init__(host, username, password)
-        self.tnhelper = _TNHelperNP02B(host)
-
-    def on_all(self):
-        """ Turns on both outlets on the np02b."""
-        self.tnhelper.cmd('ps 1')
-        self._verify_state({'1': True, '2': True})
-
-    def off_all(self):
-        """ Turns off both outlets on the np02b."""
-        self.tnhelper.cmd('ps 0')
-        self._verify_state({'1': False, '2': False})
-
-    def on(self, outlet):
-        """ Turns on specific outlet on the np02b.
-
-        Args:
-            outlet: string of the outlet to turn on ('1' or '2')
-        """
-        self.tnhelper.cmd('pset %s 1' % outlet)
-        self._verify_state({outlet: True})
-
-    def off(self, outlet):
-        """ Turns off a specifc outlet on the np02b.
-
-        Args:
-            outlet: string of the outlet to turn off ('1' or '2')
-        """
-        self.tnhelper.cmd('pset %s 0' % outlet)
-        self._verify_state({outlet: False})
-
-    def reboot(self, outlet):
-        """ Toggles a specific outlet on the np02b to off, then to on.
-
-        Args:
-            outlet: string of the outlet to reboot ('1' or '2')
-        """
-        self.off(outlet)
-        self._verify_state({outlet: False})
-        self.on(outlet)
-        self._verify_state({outlet: True})
-
-    def status(self):
-        """ Returns the status of the np02b outlets.
-
-        Return:
-            a dict mapping outlet strings ('1' and '2') to:
-                True if outlet is ON
-                False if outlet is OFF
-        """
-        res = self.tnhelper.cmd('pshow')
-        status_list = re.findall('(ON|OFF)', res)
-        status_dict = {}
-        for i, status in enumerate(status_list):
-            status_dict[str(i + 1)] = (status == 'ON')
-        return status_dict
-
-    def close(self):
-        """Ensure connection to device is closed.
-
-        In this implementation, this shouldn't be necessary, but could be in
-        others that open on creation.
-        """
-        self.tnhelper.close()
-
-    def _verify_state(self, expected_state, timeout=3):
-        """Returns when expected_state is reached on device.
-
-        In order to prevent command functions from exiting until the desired
-        effect has occurred, this function verifys that the expected_state is a
-        subset of the desired state.
-
-        Args:
-            expected_state: a dict representing the expected state of one or
-                more outlets on the device. Maps outlet strings ('1' and/or '2')
-                to:
-                    True if outlet is expected to be ON.
-                    False if outlet is expected to be OFF.
-            timeout (default: 3): time in seconds until raising an exception.
-
-        Return:
-            True, if expected_state is reached.
-
-        Raises:
-            PduError if expected_state has not been reached by timeout.
-        """
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            actual_state = self.status()
-            if expected_state.items() <= actual_state.items():
-                return True
-            time.sleep(.1)
-        raise pdu.PduError('Timeout while verifying state.\n'
-                           'Expected State: %s\n'
-                           'Actual State: %s' % (expected_state, actual_state))
-
-
-class _TNHelperNP02B(object):
-    """An internal helper class for Telnet with the Synaccess NP02B Pdu. This
-    helper is specific to the idiosyncrasies of the NP02B and therefore should
-    not be used with other devices.
-    """
-    def __init__(self, host):
-        self._tn = telnetlib.Telnet()
-        self.host = host
-        self.tx_cmd_separator = '\n\r'
-        self.rx_cmd_separator = '\r\n'
-        self.prompt = '>'
-
-    """
-    Executes a command on the device via telnet.
-    Args:
-        cmd_str: A string of the command to be run.
-    Returns:
-        A string of the response from the valid command (often empty).
-    """
-
-    def cmd(self, cmd_str):
-        # Open session
-        try:
-            self._tn.open(self.host, timeout=3)
-        except:
-            raise pdu.PduError("Failed to open telnet session to host (%s)" %
-                               self.host)
-        time.sleep(.1)
-
-        # Read to end of first prompt
-        cmd_str.strip(self.tx_cmd_separator)
-        self._tn.read_eager()
-        time.sleep(.1)
-
-        # Write command and read all output text
-        self._tn.write(utils.ascii_string(cmd_str + self.tx_cmd_separator))
-        res = self._tn.read_until(utils.ascii_string(self.prompt), 2)
-
-        # Parses out the commands output
-        if res is None:
-            raise pdu.PduError("Command failed: %s" % cmd_str)
-        res = res.decode()
-        if re.search('Invalid', res):
-            raise pdu.PduError("Command Invalid: %s" % cmd_str)
-        res = res.replace(self.prompt, '')
-        res = res.replace(self.tx_cmd_separator, '')
-        res = res.replace(self.rx_cmd_separator, '')
-        res = res.replace(cmd_str, '')
-
-        # Close session
-        self._tn.close()
-
-        time.sleep(0.5)
-
-        return res
-
-    def close(self):
-        self._tn.close()
diff --git a/src/antlion/controllers/power_metrics.py b/src/antlion/controllers/power_metrics.py
deleted file mode 100644
index 0c306ce..0000000
--- a/src/antlion/controllers/power_metrics.py
+++ /dev/null
@@ -1,339 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import numpy as np
-
-# Metrics timestamp keys
-START_TIMESTAMP = 'start'
-END_TIMESTAMP = 'end'
-
-# Unit type constants
-CURRENT = 'current'
-POWER = 'power'
-TIME = 'time'
-VOLTAGE = 'voltage'
-
-# Unit constants
-MILLIVOLT = 'mV'
-VOLT = 'V'
-MILLIAMP = 'mA'
-AMP = 'A'
-AMPERE = AMP
-MILLIWATT = 'mW'
-WATT = 'W'
-MILLISECOND = 'ms'
-SECOND = 's'
-MINUTE = 'm'
-HOUR = 'h'
-
-CONVERSION_TABLES = {
-    CURRENT: {
-        MILLIAMP: 0.001,
-        AMP: 1
-    },
-    POWER: {
-        MILLIWATT: 0.001,
-        WATT: 1
-    },
-    TIME: {
-        MILLISECOND: 0.001,
-        SECOND: 1,
-        MINUTE: 60,
-        HOUR: 3600
-    },
-    VOLTAGE: {
-        MILLIVOLT: 0.001,
-        VOLT : 1
-    }
-}
-
-
-class Metric(object):
-    """Base class for describing power measurement values. Each object contains
-    an value and a unit. Enables some basic arithmetic operations with other
-    measurements of the same unit type.
-
-    Attributes:
-        value: Numeric value of the measurement
-        _unit_type: Unit type of the measurement (e.g. current, power)
-        unit: Unit of the measurement (e.g. W, mA)
-    """
-
-    def __init__(self, value, unit_type, unit, name=None):
-        if unit_type not in CONVERSION_TABLES:
-            raise TypeError(
-                '%s is not a valid unit type, valid unit types are %s' % (
-                    unit_type, str(CONVERSION_TABLES.keys)))
-        self.value = value
-        self.unit = unit
-        self.name = name
-        self._unit_type = unit_type
-
-    # Convenience constructor methods
-    @staticmethod
-    def amps(amps, name=None):
-        """Create a new current measurement, in amps."""
-        return Metric(amps, CURRENT, AMP, name=name)
-
-    @staticmethod
-    def watts(watts, name=None):
-        """Create a new power measurement, in watts."""
-        return Metric(watts, POWER, WATT, name=name)
-
-    @staticmethod
-    def seconds(seconds, name=None):
-        """Create a new time measurement, in seconds."""
-        return Metric(seconds, TIME, SECOND, name=name)
-
-    # Comparison methods
-
-    def __eq__(self, other):
-        return self.value == other.to_unit(self.unit).value
-
-    def __lt__(self, other):
-        return self.value < other.to_unit(self.unit).value
-
-    def __le__(self, other):
-        return self == other or self < other
-
-    # Addition and subtraction with other measurements
-
-    def __add__(self, other):
-        """Adds measurements of compatible unit types. The result will be in the
-        same units as self.
-        """
-        return Metric(self.value + other.to_unit(self.unit).value,
-                      self._unit_type, self.unit, name=self.name)
-
-    def __sub__(self, other):
-        """Subtracts measurements of compatible unit types. The result will be
-        in the same units as self.
-        """
-        return Metric(self.value - other.to_unit(self.unit).value,
-                      self._unit_type, self.unit, name=self.name)
-
-    # String representation
-
-    def __str__(self):
-        return '%g%s' % (self.value, self.unit)
-
-    def __repr__(self):
-        return str(self)
-
-    def to_unit(self, new_unit):
-        """Create an equivalent measurement under a different unit.
-        e.g. 0.5W -> 500mW
-
-        Args:
-            new_unit: Target unit. Must be compatible with current unit.
-
-        Returns: A new measurement with the converted value and unit.
-        """
-        try:
-            new_value = self.value * (
-                CONVERSION_TABLES[self._unit_type][self.unit] /
-                CONVERSION_TABLES[self._unit_type][new_unit])
-        except KeyError:
-            raise TypeError('Incompatible units: %s, %s' %
-                            (self.unit, new_unit))
-        return Metric(new_value, self._unit_type, new_unit, self.name)
-
-
-def import_raw_data(path):
-    """Create a generator from a Monsoon data file.
-
-    Args:
-        path: path to raw data file
-
-    Returns: generator that yields (timestamp, sample) per line
-    """
-    with open(path, 'r') as f:
-        for line in f:
-            time, sample = line.split()
-            yield float(time[:-1]), float(sample)
-
-
-def generate_percentiles(monsoon_file, timestamps, percentiles):
-    """Generates metrics .
-
-    Args:
-        monsoon_file: monsoon-like file where each line has two
-            numbers separated by a space, in the format:
-            seconds_since_epoch amperes
-            seconds_since_epoch amperes
-        timestamps: dict following the output format of
-            instrumentation_proto_parser.get_test_timestamps()
-        percentiles: percentiles to be returned
-    """
-    if timestamps is None:
-        timestamps = {}
-    test_starts = {}
-    test_ends = {}
-    for seg_name, times in timestamps.items():
-        if START_TIMESTAMP in times and END_TIMESTAMP in times:
-            test_starts[seg_name] = Metric(
-                times[START_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-            test_ends[seg_name] = Metric(
-                times[END_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-
-    arrays = {}
-    for seg_name in test_starts:
-        arrays[seg_name] = []
-
-    with open(monsoon_file, 'r') as m:
-        for line in m:
-            timestamp = float(line.strip().split()[0])
-            value = float(line.strip().split()[1])
-            for seg_name in arrays.keys():
-                if test_starts[seg_name] <= timestamp <= test_ends[seg_name]:
-                    arrays[seg_name].append(value)
-
-    results = {}
-    for seg_name in arrays:
-        if len(arrays[seg_name]) == 0:
-            continue
-
-        pairs = zip(percentiles, np.percentile(arrays[seg_name],
-                                               percentiles))
-        results[seg_name] = [
-            Metric.amps(p[1], 'percentile_%s' % p[0]).to_unit(MILLIAMP) for p in
-            pairs
-        ]
-    return results
-
-
-def generate_test_metrics(raw_data, timestamps=None,
-                          voltage=None):
-    """Split the data into individual test metrics, based on the timestamps
-    given as a dict.
-
-    Args:
-        raw_data: raw data as list or generator of (timestamp, sample)
-        timestamps: dict following the output format of
-            instrumentation_proto_parser.get_test_timestamps()
-        voltage: voltage used during measurements
-    """
-
-    # Initialize metrics for each test
-    if timestamps is None:
-        timestamps = {}
-    test_starts = {}
-    test_ends = {}
-    test_metrics = {}
-    for seg_name, times in timestamps.items():
-        if START_TIMESTAMP in times and END_TIMESTAMP in times:
-            test_metrics[seg_name] = PowerMetrics(voltage)
-            test_starts[seg_name] = Metric(
-                times[START_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-            test_ends[seg_name] = Metric(
-                times[END_TIMESTAMP], TIME, MILLISECOND).to_unit(
-                SECOND).value
-
-    # Assign data to tests based on timestamps
-    for timestamp, amps in raw_data:
-        for seg_name in test_metrics.keys():
-            if test_starts[seg_name] <= timestamp <= test_ends[seg_name]:
-                test_metrics[seg_name].update_metrics(amps)
-
-    result = {}
-    for seg_name, power_metrics in test_metrics.items():
-        result[seg_name] = [
-            power_metrics.avg_current,
-            power_metrics.max_current,
-            power_metrics.min_current,
-            power_metrics.stdev_current,
-            power_metrics.avg_power]
-    return result
-
-
-class PowerMetrics(object):
-    """Class for processing raw power metrics generated by Monsoon measurements.
-    Provides useful metrics such as average current, max current, and average
-    power. Can generate individual test metrics.
-
-    See section "Numeric metrics" below for available metrics.
-    """
-
-    def __init__(self, voltage):
-        """Create a PowerMetrics.
-
-        Args:
-            voltage: Voltage of the measurement
-        """
-        self._voltage = voltage
-        self._num_samples = 0
-        self._sum_currents = 0
-        self._sum_squares = 0
-        self._max_current = None
-        self._min_current = None
-        self.test_metrics = {}
-
-    def update_metrics(self, sample):
-        """Update the running metrics with the current sample.
-
-        Args:
-            sample: A current sample in Amps.
-        """
-        self._num_samples += 1
-        self._sum_currents += sample
-        self._sum_squares += sample ** 2
-        if self._max_current is None or sample > self._max_current:
-            self._max_current = sample
-        if self._min_current is None or sample < self._min_current:
-            self._min_current = sample
-
-    # Numeric metrics
-    @property
-    def avg_current(self):
-        """Average current, in milliamps."""
-        if not self._num_samples:
-            return Metric.amps(0).to_unit(MILLIAMP)
-        return (Metric.amps(self._sum_currents / self._num_samples,
-                            'avg_current')
-                .to_unit(MILLIAMP))
-
-    @property
-    def max_current(self):
-        """Max current, in milliamps."""
-        return Metric.amps(self._max_current or 0, 'max_current').to_unit(
-            MILLIAMP)
-
-    @property
-    def min_current(self):
-        """Min current, in milliamps."""
-        return Metric.amps(self._min_current or 0, 'min_current').to_unit(
-            MILLIAMP)
-
-    @property
-    def stdev_current(self):
-        """Standard deviation of current values, in milliamps."""
-        if self._num_samples < 2:
-            return Metric.amps(0, 'stdev_current').to_unit(MILLIAMP)
-        stdev = math.sqrt(
-            (self._sum_squares - (
-                self._num_samples * self.avg_current.to_unit(AMP).value ** 2))
-            / (self._num_samples - 1))
-        return Metric.amps(stdev, 'stdev_current').to_unit(MILLIAMP)
-
-    @property
-    def avg_power(self):
-        """Average power, in milliwatts."""
-        return Metric.watts(self.avg_current.to_unit(AMP).value * self._voltage,
-                            'avg_power').to_unit(MILLIWATT)
diff --git a/src/antlion/controllers/power_monitor.py b/src/antlion/controllers/power_monitor.py
deleted file mode 100644
index 6a229f7..0000000
--- a/src/antlion/controllers/power_monitor.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import tempfile
-
-from antlion.controllers import power_metrics
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-
-
-class ResourcesRegistryError(Exception):
-    pass
-
-
-_REGISTRY = {}
-
-
-def update_registry(registry):
-    """Updates the registry with the one passed.
-
-    Overriding a previous value is not allowed.
-
-    Args:
-        registry: A dictionary.
-    Raises:
-        ResourceRegistryError if a property is updated with a different value.
-    """
-    for k, v in registry.items():
-        if k in _REGISTRY:
-            if v == _REGISTRY[k]:
-                continue
-            raise ResourcesRegistryError(
-                'Overwriting resources_registry fields is not allowed. %s was '
-                'already defined as %s and was attempted to be overwritten '
-                'with %s.' % (k, _REGISTRY[k], v))
-        _REGISTRY[k] = v
-
-
-def get_registry():
-    return _REGISTRY
-
-
-def _write_raw_data_in_standard_format(raw_data, path, start_time):
-    """Writes the raw data to a file in (seconds since epoch, amps).
-
-    TODO(b/155294049): Deprecate this once Monsoon controller output
-        format is updated.
-
-    Args:
-        start_time: Measurement start time in seconds since epoch
-        raw_data: raw data as list or generator of (timestamp, sample)
-        path: path to write output
-    """
-    with open(path, 'w') as f:
-        for timestamp, amps in raw_data:
-            f.write('%s %s\n' %
-                    (timestamp + start_time, amps))
-
-
-class BasePowerMonitor(object):
-
-    def setup(self, **kwargs):
-        raise NotImplementedError()
-
-    def connect_usb(self, **kwargs):
-        raise NotImplementedError()
-
-    def measure(self, **kwargs):
-        raise NotImplementedError()
-
-    def release_resources(self, **kwargs):
-        raise NotImplementedError()
-
-    def disconnect_usb(self, **kwargs):
-        raise NotImplementedError()
-
-    def get_metrics(self, **kwargs):
-        raise NotImplementedError()
-
-    def get_waveform(self, **kwargs):
-        raise NotImplementedError()
-
-    def teardown(self, **kwargs):
-        raise NotImplementedError()
-
-
-class PowerMonitorMonsoonFacade(BasePowerMonitor):
-
-    def __init__(self, monsoon):
-        """Constructs a PowerMonitorFacade.
-
-        Args:
-            monsoon: delegate monsoon object, either
-                antlion.controllers.monsoon_lib.api.hvpm.monsoon.Monsoon or
-                antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon.Monsoon.
-        """
-        self.monsoon = monsoon
-        self._log = logging.getLogger()
-
-    def setup(self, monsoon_config=None, **__):
-        """Set up the Monsoon controller for this testclass/testcase."""
-
-        if monsoon_config is None:
-            raise MonsoonError('monsoon_config can not be None')
-
-        self._log.info('Setting up Monsoon %s' % self.monsoon.serial)
-        voltage = monsoon_config.get_numeric('voltage', 4.2)
-        self.monsoon.set_voltage_safe(voltage)
-        if 'max_current' in monsoon_config:
-            self.monsoon.set_max_current(
-                monsoon_config.get_numeric('max_current'))
-
-    def power_cycle(self, monsoon_config=None, **__):
-        """Power cycles the delegated monsoon controller."""
-
-        if monsoon_config is None:
-            raise MonsoonError('monsoon_config can not be None')
-
-        self._log.info('Setting up Monsoon %s' % self.monsoon.serial)
-        voltage = monsoon_config.get_numeric('voltage', 4.2)
-        self._log.info('Setting up Monsoon voltage %s' % voltage)
-        self.monsoon.set_voltage_safe(0)
-        if 'max_current' in monsoon_config:
-            self.monsoon.set_max_current(
-                monsoon_config.get_numeric('max_current'))
-            self.monsoon.set_max_initial_current(
-                monsoon_config.get_numeric('max_current'))
-        self.connect_usb()
-        self.monsoon.set_voltage_safe(voltage)
-
-    def connect_usb(self, **__):
-        self.monsoon.usb('on')
-
-    def measure(self, measurement_args=None, start_time=None,
-                monsoon_output_path=None, **__):
-        if measurement_args is None:
-            raise MonsoonError('measurement_args can not be None')
-
-        with tempfile.NamedTemporaryFile(prefix='monsoon_') as tmon:
-            self.monsoon.measure_power(**measurement_args,
-                                       output_path=tmon.name)
-
-            if monsoon_output_path and start_time is not None:
-                _write_raw_data_in_standard_format(
-                    power_metrics.import_raw_data(tmon.name),
-                    monsoon_output_path, start_time)
-
-    def release_resources(self, **__):
-        # nothing to do
-        pass
-
-    def disconnect_usb(self, **__):
-        self.monsoon.usb('off')
-
-    def get_waveform(self, file_path=None):
-        """Parses a file to obtain all current (in amps) samples.
-
-        Args:
-            file_path: Path to a monsoon file.
-
-        Returns:
-            A list of tuples in which the first element is a timestamp and the
-            second element is the sampled current at that time.
-        """
-        if file_path is None:
-            raise MonsoonError('file_path can not be None')
-
-        return list(power_metrics.import_raw_data(file_path))
-
-    def get_metrics(self, start_time=None, voltage=None, monsoon_file_path=None,
-                    timestamps=None, **__):
-        """Parses a monsoon_file_path to compute the consumed power and other
-        power related metrics.
-
-        Args:
-            start_time: Time when the measurement started, this is used to
-                correlate timestamps from the device and from the power samples.
-            voltage: Voltage used when the measurement started. Used to compute
-                power from current.
-            monsoon_file_path: Path to a monsoon file.
-            timestamps: Named timestamps delimiting the segments of interest.
-            **__:
-
-        Returns:
-            A list of power_metrics.Metric.
-        """
-        if start_time is None:
-            raise MonsoonError('start_time can not be None')
-        if voltage is None:
-            raise MonsoonError('voltage can not be None')
-        if monsoon_file_path is None:
-            raise MonsoonError('monsoon_file_path can not be None')
-        if timestamps is None:
-            raise MonsoonError('timestamps can not be None')
-
-        return power_metrics.generate_test_metrics(
-            power_metrics.import_raw_data(monsoon_file_path),
-            timestamps=timestamps, voltage=voltage)
-
-    def teardown(self, **__):
-        # nothing to do
-        pass
diff --git a/src/antlion/controllers/relay_device_controller.py b/src/antlion/controllers/relay_device_controller.py
deleted file mode 100644
index 158e484..0000000
--- a/src/antlion/controllers/relay_device_controller.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-
-from antlion.controllers.relay_lib.relay_rig import RelayRig
-
-MOBLY_CONTROLLER_CONFIG_NAME = "RelayDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "relay_devices"
-
-
-def create(config):
-    """Creates RelayDevice controller objects.
-
-        Args:
-            config: Either one of two types:
-
-            A filename to a RelayController config (json file)
-            A RelayController config/dict composed of:
-                boards: A list of controller boards (see tests).
-                devices: A list of RelayDevices attached to the boards.
-
-        Returns:
-                A list of RelayDevice objects.
-    """
-    if type(config) is str:
-        return _create_from_external_config_file(config)
-    elif type(config) is dict:
-        return _create_from_dict(config)
-
-
-def _create_from_external_config_file(config_filename):
-    """Creates RelayDevice controller objects from an external config file.
-
-    Args:
-        config_filename: The filename of the RelayController config.
-
-    Returns:
-        A list of RelayDevice objects.
-    """
-    with open(config_filename) as json_file:
-        return _create_from_dict(json.load(json_file))
-
-
-def _create_from_dict(config):
-    """Creates RelayDevice controller objects from a dictionary.
-
-    Args:
-        config: The dictionary containing the RelayController config.
-
-    Returns:
-        A list of RelayDevice objects.
-    """
-    devices = list()
-
-    relay_rig = RelayRig(config)
-    for device in relay_rig.devices.values():
-        devices.append(device)
-
-    return devices
-
-
-def destroy(relay_devices):
-    """Cleans up RelayDevice objects.
-
-        Args:
-            relay_devices: A list of AndroidDevice objects.
-    """
-    for device in relay_devices:
-        device.clean_up()
-
-
-def get_info(relay_devices):
-    """Get information on a list of RelayDevice objects.
-
-    Args:
-        relay_devices: A list of RelayDevice objects.
-
-    Returns:
-        A list of dict, each representing info for an RelayDevice objects.
-    """
-    device_info = []
-    for device in relay_devices:
-        relay_ids = list()
-        for relay in device.relays:
-            relay_ids.append(relay)
-        info = {"name": device.name, "relays": relay_ids}
-        device_info.append(info)
-    return device_info
diff --git a/src/antlion/controllers/relay_lib/__init__.py b/src/antlion/controllers/relay_lib/__init__.py
deleted file mode 100644
index 7f1a899..0000000
--- a/src/antlion/controllers/relay_lib/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/src/antlion/controllers/relay_lib/ak_xb10_speaker.py b/src/antlion/controllers/relay_lib/ak_xb10_speaker.py
deleted file mode 100644
index 465cf92..0000000
--- a/src/antlion/controllers/relay_lib/ak_xb10_speaker.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_ON_WAIT_TIME = 2
-POWER_OFF_WAIT_TIME = 6
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class AkXB10Speaker(BluetoothRelayDevice):
-    """A&K XB10 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_ON_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_OFF_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.PAIR, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py b/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py
deleted file mode 100644
index 369f72d..0000000
--- a/src/antlion/controllers/relay_lib/devices/bluetooth_relay_device.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.relay_lib.generic_relay_device import GenericRelayDevice
-from antlion.controllers.relay_lib.helpers import validate_key
-
-
-class BluetoothRelayDevice(GenericRelayDevice):
-    """A base class for bluetooth devices.
-
-    This base class is similar to GenericRelayDevice, but requires a mac_address
-    to be set from within the config taken in. This helps with type checking
-    for use of relays against bluetooth utils.
-    """
-    def __init__(self, config, relay_rig):
-        GenericRelayDevice.__init__(self, config, relay_rig)
-
-        self.mac_address = validate_key('mac_address', config, str,
-                                        self.__class__.__name__)
-
-    def get_mac_address(self):
-        """Returns the mac address of this device."""
-        return self.mac_address
-
diff --git a/src/antlion/controllers/relay_lib/dongles.py b/src/antlion/controllers/relay_lib/dongles.py
deleted file mode 100644
index 234a58b..0000000
--- a/src/antlion/controllers/relay_lib/dongles.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-# Necessary timeout in between commands
-CMD_TIMEOUT = 1.2
-# Pairing mode activation wait time
-PAIRING_MODE_WAIT_TIME = 4.5
-SINGLE_ACTION_SHORT_WAIT_TIME = 0.6
-SINGLE_ACTION_LONG_WAIT_TIME = 2.0
-MISSING_RELAY_MSG = 'Relay config for Three button "%s" missing relay "%s".'
-
-
-class Buttons(enum.Enum):
-    ACTION = 'Action'
-    NEXT = 'Next'
-    PREVIOUS = 'Previous'
-
-
-class SingleButtonDongle(BluetoothRelayDevice):
-    """A Bluetooth dongle with one generic button Normally action.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relay(Buttons.ACTION.value)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'ACTION' buttons for PAIRING_MODE_WAIT_TIME seconds.
-        """
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=PAIRING_MODE_WAIT_TIME)
-
-    def press_play_pause(self):
-        """Briefly presses the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-
-    def press_vr_mode(self):
-        """Long press the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_LONG_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-
-class ThreeButtonDongle(BluetoothRelayDevice):
-    """A Bluetooth dongle with three generic buttons Normally action, next, and
-     previous.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'ACTION' buttons for a little over 5 seconds.
-        """
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=PAIRING_MODE_WAIT_TIME)
-
-    def press_play_pause(self):
-        """Briefly presses the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_vr_mode(self):
-        """Long press the Action button."""
-        self.relays[Buttons.ACTION.value].set_nc_for(
-            seconds=SINGLE_ACTION_LONG_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_next(self):
-        """Briefly presses the Next button."""
-        self.relays[Buttons.NEXT.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
-
-    def press_previous(self):
-        """Briefly presses the Previous button."""
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(
-            seconds=SINGLE_ACTION_SHORT_WAIT_TIME)
-        time.sleep(CMD_TIMEOUT)
diff --git a/src/antlion/controllers/relay_lib/earstudio_receiver.py b/src/antlion/controllers/relay_lib/earstudio_receiver.py
deleted file mode 100644
index 50bf62d..0000000
--- a/src/antlion/controllers/relay_lib/earstudio_receiver.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-SHORT_PRESS_WAIT_TIME = 0.5
-MEDIUM_PRESS_WAIT_TIME = 3.0
-LONG_PRESS_WAIT_TIME = 4.5
-WAIT_FOR_EFFECT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class EarstudioReceiver(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def power_on(self):
-        """Power on the Earstudio device.
-
-        BLUE LED blinks once when power is on. "power-on sound" plays when it is
-        on. Automatically connects to a device that has been connected before.
-        GREEN LED blinks once every 3 seconds after the "connection sound."
-        Enters Discoverable Mode/Paring Mode when there is no device that has
-        been connected before. GREEN LED blinks twice every 0.5 seconds.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def power_off(self):
-        """Power off the Earstudio device.
-
-        RED LED blinks once right before power off. "power-off sound" plays when
-        it is off.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_play_pause(self):
-        """Toggle audio play state.
-
-        GREEN LED slowly blinks once every 3 seconds during Bluetooth/USB
-        playback.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_accept_call(self):
-        """Receive incoming call.
-
-        BLUE LED slowly blinks once every 3 seconds
-        "Call-receiving sound" when received.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_reject_call(self):
-        """Reject incoming call.
-
-        "Call-rejection sound" when refused.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_end_call(self):
-        """End ongoing call.
-
-        "Call-end sound" when ended.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_next(self):
-        """Skip to the next track."""
-        self.relays[Buttons.NEXT.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def toggle_ambient_mode(self):
-        """Turn ambient mode on/off.
-
-        Only available during playback.
-        To use it, you must set 'Ambient Shortcut Key' to 'on' in the EarStudio
-        app.
-        """
-        self.relays[Buttons.NEXT.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_previous(self):
-        """Rewind to beginning of current or previous track."""
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def enter_pairing_mode(self):
-        """Enter BlueTooth pairing mode.
-
-        GREEN LED blinks twice every 0.5 seconds after "enter paring-mode
-        sound." Disconnects from the current connected device when entering
-        this mode.
-        """
-        self.relays[Buttons.PREVIOUS.value].set_nc_for(MEDIUM_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_up(self, press_duration=SHORT_PRESS_WAIT_TIME):
-        """Turn up the volume.
-
-        Volume increases by 0.5dB for each press.
-        Press&holding the button increases the volume consistently up to 6dB.
-        Args:
-          press_duration (int|float): how long to hold button for.
-        """
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(press_duration)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_down(self, press_duration=SHORT_PRESS_WAIT_TIME):
-        """Turn down the volume.
-
-        Volume decreases by 0.5dB for each press.
-        Press&hold the button decreases the volume consistently down to -60dB.
-        Pressing the button at the minimum volume turns to a mute level.
-        Args:
-          press_duration (int|float): how long to hold button for.
-        """
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(press_duration)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
diff --git a/src/antlion/controllers/relay_lib/errors.py b/src/antlion/controllers/relay_lib/errors.py
deleted file mode 100644
index 5af5d60..0000000
--- a/src/antlion/controllers/relay_lib/errors.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion import signals
-
-
-class RelayConfigError(signals.ControllerError):
-    """An error found within the RelayRig config file."""
-
-
-class RelayDeviceConnectionError(signals.ControllerError):
-    """An error for being unable to connect to the device."""
diff --git a/src/antlion/controllers/relay_lib/fugu_remote.py b/src/antlion/controllers/relay_lib/fugu_remote.py
deleted file mode 100644
index db706c0..0000000
--- a/src/antlion/controllers/relay_lib/fugu_remote.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.relay import SynchronizeRelays
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5.2
-
-
-class Buttons(enum.Enum):
-    HOME = 'Home'
-    BACK = 'Back'
-    PLAY_PAUSE = 'Play'
-
-
-class FuguRemote(BluetoothRelayDevice):
-    """A Nexus Player (Fugu) Remote.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-        # If the Fugu remote does have a power relay attached, turn it on.
-        power = 'Power'
-        if power in self.relays:
-            self.relays[power].set_nc()
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def enter_pairing_mode(self):
-        """Enters pairing mode. Blocks the thread until pairing mode is set.
-
-        Holds down the 'Home' and 'Back' buttons for a little over 5 seconds.
-        """
-        with SynchronizeRelays():
-            self.hold_down(Buttons.HOME.value)
-            self.hold_down(Buttons.BACK.value)
-
-        time.sleep(PAIRING_MODE_WAIT_TIME)
-
-        with SynchronizeRelays():
-            self.release(Buttons.HOME.value)
-            self.release(Buttons.BACK.value)
-
-    def press_play_pause(self):
-        """Briefly presses the Play/Pause button."""
-        self.press(Buttons.PLAY_PAUSE.value)
-
-    def press_home(self):
-        """Briefly presses the Home button."""
-        self.press(Buttons.HOME.value)
-
-    def press_back(self):
-        """Briefly presses the Back button."""
-        self.press(Buttons.BACK.value)
diff --git a/src/antlion/controllers/relay_lib/generic_relay_device.py b/src/antlion/controllers/relay_lib/generic_relay_device.py
deleted file mode 100644
index cf93400..0000000
--- a/src/antlion/controllers/relay_lib/generic_relay_device.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.relay import SynchronizeRelays
-from antlion.controllers.relay_lib.relay_device import RelayDevice
-
-MISSING_RELAY_MSG = 'Relay config for %s device "%s" missing relay "%s".'
-
-
-class GenericRelayDevice(RelayDevice):
-    """A default, all-encompassing implementation of RelayDevice.
-
-    This class allows for quick access to getting relay switches through the
-    subscript ([]) operator. Note that it does not allow for re-assignment or
-    additions to the relays dictionary.
-    """
-
-    def __init__(self, config, relay_rig):
-        RelayDevice.__init__(self, config, relay_rig)
-
-    def _ensure_config_contains_relays(self, relay_names):
-        for relay_name in relay_names:
-            self._ensure_config_contains_relay(relay_name)
-
-    def _ensure_config_contains_relay(self, relay_name):
-        """Throws an error if the relay does not exist."""
-        if relay_name not in self.relays:
-            raise RelayConfigError(MISSING_RELAY_MSG % (self.__class__.__name__,
-                                                        self.name, relay_name))
-
-    def get_button_names(self):
-        """Returns the list of all button names."""
-        return list(self.relays.keys())
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        with SynchronizeRelays():
-            for relay in self.relays.values():
-                relay.set_no()
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        with SynchronizeRelays():
-            for relay in self.relays.values():
-                if relay.is_dirty():
-                    relay.set_no()
-
-    def press(self, button_name):
-        """Presses the button for a short period of time.
-
-        Args:
-            button_name: the name of the button to press.
-        """
-        self.relays[button_name].set_nc_for()
-
-    def hold_down(self, button_name):
-        """Holds down the button until release is called.
-
-        If the button is already being held, the state does not change.
-
-        Args:
-            button_name: the name of the button to hold down.
-        """
-        self.relays[button_name].set_nc()
-
-    def release(self, button_name):
-        """Releases the held down button with name 'button_name'.
-
-        If the button is already depressed, the state does not change.
-
-        Args:
-            button_name: the name of the button to release.
-        """
-        self.relays[button_name].set_no()
diff --git a/src/antlion/controllers/relay_lib/headset.py b/src/antlion/controllers/relay_lib/headset.py
deleted file mode 100644
index 119b4f6..0000000
--- a/src/antlion/controllers/relay_lib/headset.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 9
-POWER_TOGGLE_WAIT_TIME = 2
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class Headset(BluetoothRelayDevice):
-    """Headset with same Power and Pair Button.
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def turn_power_on_and_enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
diff --git a/src/antlion/controllers/relay_lib/helpers.py b/src/antlion/controllers/relay_lib/helpers.py
deleted file mode 100644
index 463946c..0000000
--- a/src/antlion/controllers/relay_lib/helpers.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from six import string_types
-
-MISSING_KEY_ERR_MSG = 'key "%s" missing from %s. Offending object:\n %s'
-TYPE_MISMATCH_ERR_MSG = 'Key "%s" is of type %s. Expecting %s.' \
-                        ' Offending object:\n %s'
-
-
-def validate_key(key, dictionary, expected_type, source):
-    """Validates if a key exists and its value is the correct type.
-    Args:
-        key: The key in dictionary.
-        dictionary: The dictionary that should contain key.
-        expected_type: the type that key's value should have.
-        source: The name of the object being checked. Used for error messages.
-
-    Returns:
-        The value of dictionary[key] if no error was raised.
-
-    Raises:
-        RelayConfigError if the key does not exist, or is not of expected_type.
-    """
-    if key not in dictionary:
-        raise RelayConfigError(MISSING_KEY_ERR_MSG % (key, source, dictionary))
-    if expected_type == str:
-        if not isinstance(dictionary[key], string_types):
-            raise RelayConfigError(TYPE_MISMATCH_ERR_MSG %
-                                   (key, dictionary[key], expected_type,
-                                    dictionary))
-    elif not isinstance(dictionary[key], expected_type):
-        raise RelayConfigError(TYPE_MISMATCH_ERR_MSG %
-                               (key, dictionary[key], expected_type,
-                                dictionary))
-    return dictionary[key]
diff --git a/src/antlion/controllers/relay_lib/i6s_headset.py b/src/antlion/controllers/relay_lib/i6s_headset.py
deleted file mode 100644
index 7de5eba..0000000
--- a/src/antlion/controllers/relay_lib/i6s_headset.py
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 3
-WAIT_TIME = 0.1
-MISSING_RELAY_MSG = 'Relay config for i6s Headset "%s" missing relay "%s".'
-
-
-class Buttons(enum.Enum):
-    POWER = "Power"
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    PAIR = "Pair"
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class I6sHeadset(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Turns off headset."""
-        self.relays[Buttons.PAIR.value].set_no_for(PAIRING_MODE_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        """Sets relay in paring mode."""
-        self.relays[Buttons.PAIR.value].set_no_for(PAIRING_MODE_WAIT_TIME)
-
-    def power_on(self):
-        """Power on relay."""
-        self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-
-    def press_play_pause(self):
-        """
-        Sets relay to
-            Play state : if there is no A2DP_streaming.
-            Pause state : if there is A2DP_streaming.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_no_for(WAIT_TIME)
-
-    def press_next(self):
-        """Skips to next song from relay_device."""
-        self.relays[Buttons.NEXT.value].set_no_for(WAIT_TIME)
-
-    def press_previous(self):
-        """Skips to previous song from relay_device."""
-        self.relays[Buttons.PREVIOUS.value].set_no_for(WAIT_TIME)
-
-    def press_volume_up(self):
-        """Increases volume from relay_device."""
-        self.relays[Buttons.VOLUME_UP.value].set_no_for(WAIT_TIME)
-
-    def press_volume_down(self):
-        """Decreases volume from relay_device."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_no_for(WAIT_TIME)
-
-    def press_initiate_call(self):
-        """Initiate call from relay device."""
-        for i in range(0, 2):
-            self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-        return True
-
-    def press_accept_call(self):
-        """Accepts call from relay device."""
-        self.relays[Buttons.POWER.value].set_no_for(WAIT_TIME)
-        return True
diff --git a/src/antlion/controllers/relay_lib/jaybird_x3.py b/src/antlion/controllers/relay_lib/jaybird_x3.py
deleted file mode 100644
index 991267a..0000000
--- a/src/antlion/controllers/relay_lib/jaybird_x3.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-SHORT_PRESS_WAIT_TIME = 0.5
-MED_PRESS_WAIT_TIME = 1.5
-POWER_ON_WAIT_TIME = 2.5
-LONG_PRESS_WAIT_TIME = 4.5
-
-WAIT_FOR_EFFECT_TIME = 2.5
-
-
-class Buttons(enum.Enum):
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-    POWER = "Power"
-
-
-class JaybirdX3Earbuds(BluetoothRelayDevice):
-    """Jaybird X3 earbuds model
-
-    A relay device class for Jaybird X3 earbuds that provides basic Bluetooth
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def power_off(self):
-        """If the device powers off, the LED will flash red before it
-        powers off. A voice prompt will say "POWER_OFF".
-        """
-        self.relays[Buttons.POWER.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def power_on(self):
-        """If the device powers on, the LED will flash green.
-        A voice prompt will say "POWER ON".
-        """
-        self.relays[Buttons.POWER.value].set_nc_for(POWER_ON_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def enter_pairing_mode(self):
-        """The Jaybird can only enter pairing mode from an OFF state.
-        """
-        self.power_on()
-        self.power_off()
-        self.relays[Buttons.POWER.value].set_nc_for(LONG_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_play_pause(self):
-        """Toggles the audio play state."""
-        self.relays[Buttons.POWER.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def activate_voice_commands(self):
-        """Activates voice commands during music streaming."""
-        self.relays[Buttons.POWER.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_accept_call(self):
-        """Receives an incoming call."""
-        self.relays[Buttons.POWER.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_reject_call(self):
-        """Rejects an incoming call."""
-        self.relays[Buttons.POWER.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_next(self):
-        """Skips to the next track."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_previous(self):
-        """Rewinds to beginning of current or previous track."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_up(self):
-        """Turns up the volume."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def press_volume_down(self):
-        """Turns down the volume."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(SHORT_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def toggle_hands_free(self):
-        """Switches call audio between the phone and X3 buds."""
-        self.relays[Buttons.VOLUME_UP.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
-
-    def mute_phone_call(self):
-        """Mutes phone call audio."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_nc_for(MED_PRESS_WAIT_TIME)
-        time.sleep(WAIT_FOR_EFFECT_TIME)
diff --git a/src/antlion/controllers/relay_lib/logitech_headset.py b/src/antlion/controllers/relay_lib/logitech_headset.py
deleted file mode 100644
index 5c95bac..0000000
--- a/src/antlion/controllers/relay_lib/logitech_headset.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Device Details:
-https://www.logitech.com/en-in/product/bluetooth-audio-adapter#specification-tabular
-"""
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-WAIT_TIME = 0.1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class LogitechAudioReceiver(BluetoothRelayDevice):
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
-
-    def power_on(self):
-        """Power on relay."""
-        self.relays[Buttons.POWER.value].set_nc()
-
-    def enter_pairing_mode(self):
-        """Sets relay in paring mode."""
-        self.relays[Buttons.PAIR.value].set_nc()
diff --git a/src/antlion/controllers/relay_lib/power_supply.py b/src/antlion/controllers/relay_lib/power_supply.py
deleted file mode 100644
index f1c6213..0000000
--- a/src/antlion/controllers/relay_lib/power_supply.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-import enum
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-POWER_TOGGLE_WAIT_TIME = 0.5
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class PowerSupply(BluetoothRelayDevice):
-    """Power Supply for Headset.
-
-    Wraps the button presses.
-    """
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def setup(self):
-        """Do nothing, since this is not a headset"""
-        return True
-
-    def clean_up(self):
-        """Do nothing, since this is not a headset"""
-        return True
diff --git a/src/antlion/controllers/relay_lib/rdl_relay_board.py b/src/antlion/controllers/relay_lib/rdl_relay_board.py
deleted file mode 100644
index e4d5c37..0000000
--- a/src/antlion/controllers/relay_lib/rdl_relay_board.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.usb_relay_board_base import UsbRelayBoardBase
-from pylibftdi import BitBangDevice
-
-
-class RdlRelayBoard(UsbRelayBoardBase):
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            if value == RelayState.NO:
-                bb.port |= self.address[relay_position]
-            else:
-                bb.port &= ~(self.address[relay_position])
-        self.status_dict[relay_position] = value
diff --git a/src/antlion/controllers/relay_lib/relay.py b/src/antlion/controllers/relay_lib/relay.py
deleted file mode 100644
index fbac1de..0000000
--- a/src/antlion/controllers/relay_lib/relay.py
+++ /dev/null
@@ -1,207 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-from time import sleep
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-
-
-class RelayState(Enum):
-    """Enum for possible Relay States."""
-    # Pretend this means 'OFF'
-    NO = 'NORMALLY_OPEN'
-    # Pretend this means 'ON'
-    NC = 'NORMALLY_CLOSED'
-
-
-class SynchronizeRelays:
-    """A class that allows for relays to change state nearly simultaneously.
-
-    Can be used with the 'with' statement in Python:
-
-    with SynchronizeRelays():
-        relay1.set_no()
-        relay2.set_nc()
-
-    Note that the thread will still wait for RELAY_TRANSITION_WAIT_TIME
-    after execution leaves the 'with' statement.
-    """
-    _sync_sleep_flag = False
-
-    def __enter__(self):
-        self.prev_toggle_time = Relay.transition_wait_time
-        self.prev_sync_flag = SynchronizeRelays._sync_sleep_flag
-        Relay.transition_wait_time = 0
-        SynchronizeRelays._sync_sleep_flag = False
-
-    def __exit__(self, type, value, traceback):
-        if SynchronizeRelays._sync_sleep_flag:
-            sleep(Relay.transition_wait_time)
-
-        Relay.transition_wait_time = self.prev_toggle_time
-        SynchronizeRelays._sync_sleep_flag = self.prev_sync_flag
-
-
-class Relay(object):
-    """A class representing a single relay switch on a RelayBoard.
-
-    References to these relays are stored in both the RelayBoard and the
-    RelayDevice classes under the variable "relays". GenericRelayDevice can also
-    access these relays through the subscript ([]) operator.
-
-    At the moment, relays only have a valid state of 'ON' or 'OFF'. This may be
-    extended in a subclass if needed. Keep in mind that if this is done, changes
-    will also need to be made in the RelayRigParser class to initialize the
-    relays.
-
-    """
-    """How long to wait for relays to transition state."""
-    transition_wait_time = .2
-    button_press_time = .25
-
-    def __init__(self, relay_board, position):
-        self.relay_board = relay_board
-        self.position = position
-        self._original_state = None
-        self.relay_id = "%s/%s" % (self.relay_board.name, self.position)
-
-    def set_no(self):
-        """Sets the relay to the 'NO' state. Shorthand for set(RelayState.NO).
-
-        Blocks the thread for Relay.transition_wait_time.
-        """
-        self.set(RelayState.NO)
-
-    def set_nc(self):
-        """Sets the relay to the 'NC' state. Shorthand for set(RelayState.NC).
-
-        Blocks the thread for Relay.transition_wait_time.
-
-        """
-        self.set(RelayState.NC)
-
-    def toggle(self):
-        """Swaps the state from 'NO' to 'NC' or 'NC' to 'NO'.
-        Blocks the thread for Relay.transition_wait_time.
-        """
-        if self.get_status() == RelayState.NO:
-            self.set(RelayState.NC)
-        else:
-            self.set(RelayState.NO)
-
-    def set(self, state):
-        """Sets the relay to the 'NO' or 'NC' state.
-
-        Blocks the thread for Relay.transition_wait_time.
-
-        Args:
-            state: either 'NO' or 'NC'.
-
-        Raises:
-            ValueError if state is not 'NO' or 'NC'.
-
-        """
-        if self._original_state is None:
-            self._original_state = self.relay_board.get_relay_status(
-                self.position)
-
-        if state is not RelayState.NO and state is not RelayState.NC:
-            raise ValueError(
-                'Invalid state. Received "%s". Expected any of %s.' %
-                (state, [state for state in RelayState]))
-        if self.get_status() != state:
-            self.relay_board.set(self.position, state)
-            SynchronizeRelays._sync_sleep_flag = True
-            sleep(Relay.transition_wait_time)
-
-    def set_no_for(self, seconds=button_press_time):
-        """Sets the relay to 'NORMALLY_OPEN' for seconds. Blocks the thread.
-
-        Args:
-            seconds: The number of seconds to sleep for.
-        """
-        self.set_no()
-        sleep(seconds)
-        self.set_nc()
-
-    def set_nc_for(self, seconds=button_press_time):
-        """Sets the relay to 'NORMALLY_CLOSED' for seconds. Blocks the thread.
-
-        Respects Relay.transition_wait_time for toggling state.
-
-        Args:
-            seconds: The number of seconds to sleep for.
-        """
-        self.set_nc()
-        sleep(seconds)
-        self.set_no()
-
-    def get_status(self):
-        return self.relay_board.get_relay_status(self.position)
-
-    def clean_up(self):
-        """Does any clean up needed to allow the next series of tests to run.
-
-        For now, all this does is switches to its previous state. Inheriting
-        from this class and overriding this method would be the best course of
-        action to allow a more complex clean up to occur. If you do this, be
-        sure to make the necessary modifications in RelayRig.initialize_relay
-        and RelayRigParser.parse_json_relays.
-        """
-        if self._original_state is not None:
-            self.set(self._original_state)
-
-    def is_dirty(self):
-        return self._original_state is not None
-
-
-class RelayDict(object):
-    """A wrapped dictionary that gives config errors upon failure.
-
-    Has the same interface as a dictionary, but when getting the key fails, the
-    dictionary returns a RelayConfigError, letting the user know that the reason
-    the dict failed to return a relay is because the relay was not found in the
-    config.
-
-    Also prevents modification of elements, because changing the relays here
-    does not change what they are in hardware.
-    """
-    ERROR_MESSAGE = ('Error: Attempted to get relay "%s" in %s "%s" but the '
-                     'relay does not exist.\nExisting relays are: %s.\nMake '
-                     'sure the missing relay is added to the config file, and '
-                     'is properly setup.')
-
-    def __init__(self, relay_device, input_dict):
-        self.relay_device = relay_device
-        self._store = input_dict
-
-    def __getitem__(self, key):
-        try:
-            return self._store[key]
-        except KeyError:
-            raise RelayConfigError(self.ERROR_MESSAGE %
-                                   (key, type(self.relay_device),
-                                    self.relay_device.name, self._store))
-
-    def __iter__(self):
-        return iter(self._store)
-
-    def __len__(self):
-        return len(self._store)
-
-    def __repr__(self):
-        return repr(self._store)
diff --git a/src/antlion/controllers/relay_lib/relay_board.py b/src/antlion/controllers/relay_lib/relay_board.py
deleted file mode 100644
index 464326d..0000000
--- a/src/antlion/controllers/relay_lib/relay_board.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.relay import Relay
-
-
-class RelayBoard(object):
-    """Handles interfacing with the Relays and RelayDevices.
-
-    This is the base class for all RelayBoards.
-    """
-
-    def __init__(self, config):
-        """Creates a RelayBoard instance. Handles naming and relay creation.
-
-        Args:
-            config: A configuration dictionary, usually pulled from an element
-            under in "boards" list in the relay rig config file.
-        """
-        self.name = validate_key('name', config, str, 'config')
-        if '/' in self.name:
-            raise RelayConfigError('RelayBoard name cannot contain a "/".')
-        self.relays = dict()
-        for pos in self.get_relay_position_list():
-            self.relays[pos] = Relay(self, pos)
-
-    def set(self, relay_position, state):
-        """Sets the relay to the given state.
-
-        Args:
-            relay_position: the relay having its state modified.
-            state: the state to set the relay to. Currently only states NO and
-                   NC are supported.
-        """
-        raise NotImplementedError()
-
-    def get_relay_position_list(self):
-        """Returns a list of all possible relay positions."""
-        raise NotImplementedError()
-
-    def get_relay_status(self, relay):
-        """Returns the state of the given relay."""
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/relay_lib/relay_device.py b/src/antlion/controllers/relay_lib/relay_device.py
deleted file mode 100644
index 06bf42f..0000000
--- a/src/antlion/controllers/relay_lib/relay_device.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.helpers import validate_key
-
-
-class RelayDevice(object):
-    """The base class for all relay devices.
-
-    RelayDevice has access to both its relays as well as the relay rig it is
-    a part of. Note that you can receive references to the relay_boards
-    through relays[0...n].board. The relays are not guaranteed to be on
-    the same relay board.
-    """
-
-    def __init__(self, config, relay_rig):
-        """Creates a RelayDevice.
-
-        Args:
-            config: The dictionary found in the config file for this device.
-            You can add your own params to the config file if needed, and they
-            will be found in this dictionary.
-            relay_rig: The RelayRig the device is attached to. This won't be
-            useful for classes that inherit from RelayDevice, so just pass it
-            down to this __init__.
-        """
-        self.rig = relay_rig
-        self.relays = dict()
-
-        validate_key('name', config, str, '"devices" element')
-        self.name = config['name']
-
-        relays = validate_key('relays', config, dict, '"devices" list element')
-        if len(relays) < 1:
-            raise RelayConfigError(
-                'Key "relays" must have at least 1 element.')
-
-        for name, relay_id in relays.items():
-            self.relays[name] = relay_rig.relays[relay_id]
-
-    def setup(self):
-        """Sets up the relay device to be ready for commands."""
-
-    def clean_up(self):
-        """Sets the relay device back to its inert state."""
diff --git a/src/antlion/controllers/relay_lib/relay_rig.py b/src/antlion/controllers/relay_lib/relay_rig.py
deleted file mode 100644
index 835dd66..0000000
--- a/src/antlion/controllers/relay_lib/relay_rig.py
+++ /dev/null
@@ -1,177 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import collections
-
-from antlion.controllers.relay_lib.ak_xb10_speaker import AkXB10Speaker
-from antlion.controllers.relay_lib.dongles import SingleButtonDongle
-from antlion.controllers.relay_lib.dongles import ThreeButtonDongle
-from antlion.controllers.relay_lib.earstudio_receiver import EarstudioReceiver
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.fugu_remote import FuguRemote
-from antlion.controllers.relay_lib.generic_relay_device import GenericRelayDevice
-from antlion.controllers.relay_lib.headset import Headset
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.i6s_headset import I6sHeadset
-from antlion.controllers.relay_lib.jaybird_x3 import JaybirdX3Earbuds
-from antlion.controllers.relay_lib.logitech_headset import LogitechAudioReceiver
-from antlion.controllers.relay_lib.power_supply import PowerSupply
-from antlion.controllers.relay_lib.rdl_relay_board import RdlRelayBoard
-from antlion.controllers.relay_lib.sain_smart_board import SainSmartBoard
-from antlion.controllers.relay_lib.sain_smart_8_channel_usb_relay_board import SainSmart8ChannelUsbRelayBoard
-from antlion.controllers.relay_lib.skullcandy import Skullcandy
-from antlion.controllers.relay_lib.sony_xb2_speaker import SonyXB2Speaker
-from antlion.controllers.relay_lib.sony_xb20_speaker import SonyXB20Speaker
-from antlion.controllers.relay_lib.tao_tronics_headset import TaoTronicsCarkit
-
-
-class RelayRig:
-    """A group of relay boards and their connected devices.
-
-    This class is also responsible for handling the creation of the relay switch
-    boards, as well as the devices and relays associated with them.
-
-    The boards dict can contain different types of relay boards. They share a
-    common interface through inheriting from RelayBoard. This layer can be
-    ignored by the user.
-
-    The relay devices are stored in a dict of (device_name: device). These
-    device references should be used by the user when they want to directly
-    interface with the relay switches. See RelayDevice or GeneralRelayDevice for
-    implementation.
-
-    """
-    DUPLICATE_ID_ERR_MSG = 'The {} "{}" is not unique. Duplicated in:\n {}'
-
-    # A dict of lambdas that instantiate relay board upon invocation.
-    # The key is the class type name, the value is the lambda.
-    _board_constructors = {
-        'SainSmartBoard':
-        lambda x: SainSmartBoard(x),
-        'RdlRelayBoard':
-        lambda x: RdlRelayBoard(x),
-        'SainSmart8ChannelUsbRelayBoard':
-        lambda x: SainSmart8ChannelUsbRelayBoard(x),
-    }
-
-    # Similar to the dict above, except for devices.
-    _device_constructors = {
-        'GenericRelayDevice': lambda x, rig: GenericRelayDevice(x, rig),
-        'FuguRemote': lambda x, rig: FuguRemote(x, rig),
-        'I6sHeadset': lambda x, rig: I6sHeadset(x, rig),
-        'JaybirdX3Earbuds': lambda x, rig: JaybirdX3Earbuds(x, rig),
-        "LogitechAudioReceiver" :lambda x, rig: LogitechAudioReceiver(x, rig),
-        'SonyXB2Speaker': lambda x, rig: SonyXB2Speaker(x, rig),
-        'SonyXB20Speaker': lambda x, rig: SonyXB20Speaker(x, rig),
-        'TaoTronicsCarkit': lambda x, rig: TaoTronicsCarkit(x, rig),
-        'AkXB10Speaker': lambda x, rig: AkXB10Speaker(x, rig),
-        'SingleButtonDongle': lambda x, rig: SingleButtonDongle(x, rig),
-        'ThreeButtonDongle': lambda x, rig: ThreeButtonDongle(x, rig),
-        'EarstudioReceiver': lambda x, rig: EarstudioReceiver(x, rig),
-        'Headset': lambda x, rig: Headset(x, rig),
-        'Skullcandy': lambda x, rig: Skullcandy(x, rig),
-        'PowerSupply': lambda x, rig: PowerSupply(x, rig),
-    }
-
-    def __init__(self, config):
-        self.relays = dict()
-        self.boards = dict()
-        self.devices = collections.OrderedDict()
-
-        validate_key('boards', config, list, 'relay config file')
-
-        for elem in config['boards']:
-            board = self.create_relay_board(elem)
-            if board.name in self.boards:
-                raise RelayConfigError(
-                    self.DUPLICATE_ID_ERR_MSG.format('name', elem['name'],
-                                                     elem))
-            self.boards[board.name] = board
-
-        # Note: 'boards' is a necessary value, 'devices' is not.
-        if 'devices' in config:
-            for elem in config['devices']:
-                relay_device = self.create_relay_device(elem)
-                if relay_device.name in self.devices:
-                    raise RelayConfigError(
-                        self.DUPLICATE_ID_ERR_MSG.format(
-                            'name', elem['name'], elem))
-                self.devices[relay_device.name] = relay_device
-        else:
-            device_config = dict()
-            device_config['name'] = 'GenericRelayDevice'
-            device_config['relays'] = dict()
-            for relay_id in self.relays:
-                device_config['relays'][relay_id] = relay_id
-            self.devices['device'] = self.create_relay_device(device_config)
-
-    def create_relay_board(self, config):
-        """Builds a RelayBoard from the given config.
-
-        Args:
-            config: An object containing 'type', 'name', 'relays', and
-            (optionally) 'properties'. See the example json file.
-
-        Returns:
-            A RelayBoard with the given type found in the config.
-
-        Raises:
-            RelayConfigError if config['type'] doesn't exist or is not a string.
-
-        """
-        validate_key('type', config, str, '"boards" element')
-        try:
-            ret = self._board_constructors[config['type']](config)
-        except LookupError:
-            raise RelayConfigError(
-                'RelayBoard with type {} not found. Has it been added '
-                'to the _board_constructors dict?'.format(config['type']))
-        for _, relay in ret.relays.items():
-            self.relays[relay.relay_id] = relay
-        return ret
-
-    def create_relay_device(self, config):
-        """Builds a RelayDevice from the given config.
-
-        When given no 'type' key in the config, the function will default to
-        returning a GenericRelayDevice with the relays found in the 'relays'
-        array.
-
-        Args:
-            config: An object containing 'name', 'relays', and (optionally)
-            type.
-
-        Returns:
-            A RelayDevice with the given type found in the config. If no type is
-            found, it will default to GenericRelayDevice.
-
-        Raises:
-            RelayConfigError if the type given does not match any from the
-            _device_constructors dictionary.
-
-        """
-        if 'type' in config:
-            if config['type'] not in RelayRig._device_constructors:
-                raise RelayConfigError(
-                    'Device with type {} not found. Has it been added '
-                    'to the _device_constructors dict?'.format(config['type']))
-            else:
-                device = self._device_constructors[config['type']](config,
-                                                                   self)
-
-        else:
-            device = GenericRelayDevice(config, self)
-
-        return device
diff --git a/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py b/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
deleted file mode 100644
index 33d7f43..0000000
--- a/src/antlion/controllers/relay_lib/sain_smart_8_channel_usb_relay_board.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.usb_relay_board_base import UsbRelayBoardBase
-from pylibftdi import BitBangDevice
-""" This library is to control the sainsmart board.
-
-Device:
-    https://www.sainsmart.com/products/8-channel-12v-usb-relay-module
-
-Additional setup steps:
-Change out pip/pip3 and python2.7/3.4 based on python version
-1. pip install pylibftdi
-2. pip install libusb1
-3. sudo apt-get install libftdi-dev
-4. Make this file /etc/udev/rules.d/99-libftdi.rules with root and add the lines below:
-SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", GROUP="plugdev", MODE="0660"
-SUBSYSTEMS=="usb", ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6014", GROUP="plugdev", MODE="0660"
-5. Connect USB relay to computer and power board with necessary connectors
-6. Verify device is found by: python -m pylibftdi.examples.list_devices
-6a. Example output: FTDI:FT245R USB FIFO:A9079L5D
-7. The FIFO value is going to be your device name in the config
-8. Your config should look something like this (note FIFO name is used here):
-
-{
-    "_description": "This is an example skeleton of a ficticious relay.",
-    "testbed": [{
-        "_description": "A testbed with one relay",
-        "name": "relay_test",
-        "RelayDevice": {
-            "boards": [{
-                "type": "SainSmart8ChannelUsbRelayBoard",
-                "name": "ttyUSB0",
-                "device": "A9079L5D"
-            }],
-            "devices": [{
-                "type": "SingleButtonDongle",
-                "name": "aukey",
-                "mac_address": "e9:08:ef:2b:47:a1",
-                "relays": {
-                    "Action": "ttyUSB0/1"
-                }
-
-            }]
-        }
-    }],
-    "logpath": "/tmp/logs",
-    "testpaths": ["../tests"]
-}
-"""
-
-
-class SainSmart8ChannelUsbRelayBoard(UsbRelayBoardBase):
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Note that this board acts in reverse of normal relays.
-        EG: NO = NC and NC = NO
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            if value == RelayState.NO:
-                bb.port &= ~(self.address[relay_position])
-            else:
-                bb.port |= self.address[relay_position]
-        self.status_dict[relay_position] = value
diff --git a/src/antlion/controllers/relay_lib/sain_smart_board.py b/src/antlion/controllers/relay_lib/sain_smart_board.py
deleted file mode 100644
index b5bc310..0000000
--- a/src/antlion/controllers/relay_lib/sain_smart_board.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-from urllib.request import urlopen
-
-from antlion.controllers.relay_lib.errors import RelayDeviceConnectionError
-from antlion.controllers.relay_lib.helpers import validate_key
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.relay_board import RelayBoard
-
-BASE_URL = 'http://192.168.1.4/30000/'
-
-
-class SainSmartBoard(RelayBoard):
-    """Controls and queries SainSmart Web Relay Board.
-
-    Controls and queries SainSmart Web Relay Board, found here:
-    http://www.sainsmart.com/sainsmart-rj45-tcp-ip-remote-controller-board-with-8-channels-relay-integrated.html
-    this uses a web interface to toggle relays.
-
-    There is an unmentioned hidden status page that can be found at <root>/99/.
-    """
-
-    # No longer used. Here for debugging purposes.
-    #
-    # Old status pages. Used before base_url/99 was found.
-    # STATUS_1 = '40'
-    # STATUS_2 = '43'
-    #
-    # This is the regex used to parse the old status pages:
-    # r'y-\d(?P<relay>\d).+?> (?:&nbsp)?(?P<status>.*?)&'
-    #
-    # Pages that will turn all switches on or off, even the ghost switches.
-    # ALL_RELAY_OFF = '44'
-    # ALL_RELAY_ON = '45'
-
-    HIDDEN_STATUS_PAGE = '99'
-
-    VALID_RELAY_POSITIONS = [0, 1, 2, 3, 4, 5, 6, 7]
-    NUM_RELAYS = 8
-
-    def __init__(self, config):
-        # This will be lazy loaded
-        self.status_dict = None
-        self.base_url = validate_key('base_url', config, str, 'config')
-        if not self.base_url.endswith('/'):
-            self.base_url += '/'
-        super(SainSmartBoard, self).__init__(config)
-
-    def get_relay_position_list(self):
-        return self.VALID_RELAY_POSITIONS
-
-    def _load_page(self, relative_url):
-        """Loads a web page at self.base_url + relative_url.
-
-        Properly opens and closes the web page.
-
-        Args:
-            relative_url: The string appended to the base_url.
-
-        Returns:
-            the contents of the web page.
-
-        Raises:
-            A RelayDeviceConnectionError is raised if the page cannot be loaded.
-
-        """
-        try:
-            page = urlopen(self.base_url + relative_url)
-            result = page.read().decode('utf-8')
-            page.close()
-        except IOError:
-            raise RelayDeviceConnectionError(
-                'Unable to connect to board "{}" through {}'.format(
-                    self.name, self.base_url + relative_url))
-        return result
-
-    def _sync_status_dict(self):
-        """Returns a dictionary of relays and there current state."""
-        result = self._load_page(self.HIDDEN_STATUS_PAGE)
-        if 'TUX' not in result:
-            raise RelayDeviceConnectionError(
-                'Sainsmart board with URL %s has not completed initialization '
-                'after its IP was set, and must be power-cycled to prevent '
-                'random disconnections. After power-cycling, make sure %s/%s '
-                'has TUX appear in its output.' %
-                (self.base_url, self.base_url, self.HIDDEN_STATUS_PAGE))
-        status_string = re.search(r'">([01]*)TUX', result).group(1)
-
-        self.status_dict = {}
-        for index, char in enumerate(status_string):
-            self.status_dict[index] = (
-                RelayState.NC if char == '1' else RelayState.NO)
-
-    def _print_status(self):
-        """Prints out the list of relays and their current state."""
-        for i in range(0, 8):
-            print('Relay {}: {}'.format(i, self.status_dict[i]))
-
-    def get_relay_status(self, relay_position):
-        """Returns the current status of the passed in relay."""
-        if self.status_dict is None:
-            self._sync_status_dict()
-        return self.status_dict[relay_position]
-
-    def set(self, relay_position, value):
-        """Sets the given relay to be either ON or OFF, indicated by value."""
-        if self.status_dict is None:
-            self._sync_status_dict()
-        self._load_page(self._get_relay_url_code(relay_position, value))
-        self.status_dict[relay_position] = value
-
-    @staticmethod
-    def _get_relay_url_code(relay_position, no_or_nc):
-        """Returns the two digit code corresponding to setting the relay."""
-        if no_or_nc == RelayState.NC:
-            on_modifier = 1
-        else:
-            on_modifier = 0
-        return '{:02d}'.format(relay_position * 2 + on_modifier)
diff --git a/src/antlion/controllers/relay_lib/skullcandy.py b/src/antlion/controllers/relay_lib/skullcandy.py
deleted file mode 100644
index 078bbfd..0000000
--- a/src/antlion/controllers/relay_lib/skullcandy.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_TOGGLE_WAIT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class Skullcandy(BluetoothRelayDevice):
-    """Skullcandy Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def turn_power_on_and_enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/sony_xb20_speaker.py b/src/antlion/controllers/relay_lib/sony_xb20_speaker.py
deleted file mode 100644
index 942a812..0000000
--- a/src/antlion/controllers/relay_lib/sony_xb20_speaker.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 6
-POWER_TOGGLE_WAIT_TIME = 1
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-
-
-class SonyXB20Speaker(BluetoothRelayDevice):
-    """Sony XB20 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_TOGGLE_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.POWER, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/sony_xb2_speaker.py b/src/antlion/controllers/relay_lib/sony_xb2_speaker.py
deleted file mode 100644
index 9c97c35..0000000
--- a/src/antlion/controllers/relay_lib/sony_xb2_speaker.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-PAIRING_MODE_WAIT_TIME = 5
-POWER_ON_WAIT_TIME = 2
-POWER_OFF_WAIT_TIME = 6
-
-
-class Buttons(enum.Enum):
-    POWER = 'Power'
-    PAIR = 'Pair'
-
-
-class SonyXB2Speaker(BluetoothRelayDevice):
-    """Sony XB2 Bluetooth Speaker model
-
-    Wraps the button presses, as well as the special features like pairing.
-    """
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def _hold_button(self, button, seconds):
-        self.hold_down(button.value)
-        time.sleep(seconds)
-        self.release(button.value)
-
-    def power_on(self):
-        self._hold_button(Buttons.POWER, POWER_ON_WAIT_TIME)
-
-    def power_off(self):
-        self._hold_button(Buttons.POWER, POWER_OFF_WAIT_TIME)
-
-    def enter_pairing_mode(self):
-        self._hold_button(Buttons.PAIR, PAIRING_MODE_WAIT_TIME)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def clean_up(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.clean_up(self)
diff --git a/src/antlion/controllers/relay_lib/tao_tronics_headset.py b/src/antlion/controllers/relay_lib/tao_tronics_headset.py
deleted file mode 100644
index 88bb61f..0000000
--- a/src/antlion/controllers/relay_lib/tao_tronics_headset.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-import time
-
-from antlion.controllers.relay_lib.devices.bluetooth_relay_device import BluetoothRelayDevice
-
-WAIT_TIME = 0.05
-
-
-class Buttons(enum.Enum):
-    NEXT = 'Next'
-    PREVIOUS = "Previous"
-    PLAY_PAUSE = 'Play_pause'
-    VOLUME_UP = "Volume_up"
-    VOLUME_DOWN = "Volume_down"
-
-
-class TaoTronicsCarkit(BluetoothRelayDevice):
-
-    def __init__(self, config, relay_rig):
-        BluetoothRelayDevice.__init__(self, config, relay_rig)
-        self._ensure_config_contains_relays(button.value for button in Buttons)
-
-    def setup(self):
-        """Sets all relays to their default state (off)."""
-        BluetoothRelayDevice.setup(self)
-
-    def press_play_pause(self):
-        """
-        Sets relay to
-            Play state : if there is no A2DP_streaming.
-            Pause state : if there is A2DP_streaming.
-        """
-        self.relays[Buttons.PLAY_PAUSE.value].set_no_for(WAIT_TIME)
-
-    def press_next(self):
-        """Skips to next song from relay_device."""
-        self.relays[Buttons.NEXT.value].set_no_for(WAIT_TIME)
-
-    def press_previous(self):
-        """Skips to previous song from relay_device."""
-        self.relays[Buttons.PREVIOUS.value].set_no_for(WAIT_TIME)
-
-    def press_volume_up(self):
-        """Increases volume from relay_device."""
-        self.relays[Buttons.VOLUME_UP.value].set_no_for(WAIT_TIME)
-
-    def press_volume_down(self):
-        """Decreases volume from relay_device."""
-        self.relays[Buttons.VOLUME_DOWN.value].set_no_for(WAIT_TIME)
-
-    def press_initiate_call(self):
-        """Initiate call from relay device."""
-        for i in range(0, 2):
-            self.press(Buttons.PLAY_PAUSE.value)
-            time.sleep(0.2)
-        return True
-
-    def press_accept_call(self):
-        """Accepts call from relay device."""
-        self.press(Buttons.PLAY_PAUSE.value)
-        return True
diff --git a/src/antlion/controllers/relay_lib/usb_relay_board_base.py b/src/antlion/controllers/relay_lib/usb_relay_board_base.py
deleted file mode 100644
index 45422eb..0000000
--- a/src/antlion/controllers/relay_lib/usb_relay_board_base.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.relay_board import RelayBoard
-from pylibftdi import BitBangDevice
-
-
-class UsbRelayBoardBase(RelayBoard):
-
-    VALID_RELAY_POSITIONS = [1, 2, 3, 4, 5, 6, 7, 8]
-    NUM_RELAYS = 8
-
-    def __init__(self, config):
-        self.status_dict = dict()
-        self.device = config["device"]
-        super(UsbRelayBoardBase, self).__init__(config)
-        self.address = {
-            1: 0x1,
-            2: 0x2,
-            3: 0x4,
-            4: 0x8,
-            5: 0x10,
-            6: 0x20,
-            7: 0x40,
-            8: 0x80,
-            "select_all": 0xFF
-        }
-
-    def get_relay_position_list(self):
-        return self.VALID_RELAY_POSITIONS
-
-    def test_bit(self, int_type, offset):
-        """Function to get status for the given relay position.
-
-        Args:
-            int_type: Port value for given relay.
-            offset: offset for given Relay_position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        mask = 1 << offset
-        return (int_type & mask)
-
-    def _get_relay_state(self, data, relay):
-        """Function to get status for the given relay position.
-
-        Args:
-            data: Port value for given relay.
-            relay: Relay_position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        if relay == 1:
-            return self.test_bit(data, 1)
-        if relay == 2:
-            return self.test_bit(data, 3)
-        if relay == 3:
-            return self.test_bit(data, 5)
-        if relay == 4:
-            return self.test_bit(data, 7)
-        if relay == 5:
-            return self.test_bit(data, 2)
-        if relay == 6:
-            return self.test_bit(data, 4)
-        if relay == 7:
-            return self.test_bit(data, 6)
-        if relay == 8:
-            return self.test_bit(data, 8)
-
-    def get_relay_status(self, relay_position):
-        """Get relay status for the given relay position.
-
-        Args:
-            relay_position: Status for given Relay position.
-
-        Returns:
-            returns current status for given relay_position.
-        """
-        with BitBangDevice(self.device) as bb:
-            self.status_dict[relay_position] = self._get_relay_state(
-                bb.port, relay_position)
-        return self.status_dict[relay_position]
-
-    def set(self, relay_position, value):
-        """Returns the current status of the passed in relay.
-
-        Args:
-            relay_position: Relay position.
-            value: Turn_on or Turn_off the relay for the given relay_position.
-        """
-        raise NotImplementedError
diff --git a/src/antlion/controllers/rohdeschwarz_lib/OWNERS b/src/antlion/controllers/rohdeschwarz_lib/OWNERS
deleted file mode 100644
index e4010df..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-iguarna@google.com
-chaoyangf@google.com
-yixiang@google.com
-codycaldwell@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/rohdeschwarz_lib/__init__.py b/src/antlion/controllers/rohdeschwarz_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmw500.py b/src/antlion/controllers/rohdeschwarz_lib/cmw500.py
deleted file mode 100644
index 978600d..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmw500.py
+++ /dev/null
@@ -1,1167 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from enum import Enum
-
-from antlion.controllers import abstract_inst
-
-LTE_ATTACH_RESP = 'ATT'
-LTE_CONN_RESP = 'CONN'
-LTE_IDLE_RESP = 'IDLE'
-LTE_PSWITCHED_ON_RESP = 'ON'
-LTE_PSWITCHED_OFF_RESP = 'OFF'
-
-STATE_CHANGE_TIMEOUT = 20
-
-
-class LteState(Enum):
-    """LTE ON and OFF"""
-    LTE_ON = 'ON'
-    LTE_OFF = 'OFF'
-
-
-class BtsNumber(Enum):
-    """Base station Identifiers."""
-    BTS1 = 'PCC'
-    BTS2 = 'SCC1'
-    BTS3 = 'SCC2'
-    BTS4 = 'SCC3'
-    BTS5 = 'SCC4'
-    BTS6 = 'SCC6'
-    BTS7 = 'SCC7'
-
-
-class LteBandwidth(Enum):
-    """Supported LTE bandwidths."""
-    BANDWIDTH_1MHz = 'B014'
-    BANDWIDTH_3MHz = 'B030'
-    BANDWIDTH_5MHz = 'B050'
-    BANDWIDTH_10MHz = 'B100'
-    BANDWIDTH_15MHz = 'B150'
-    BANDWIDTH_20MHz = 'B200'
-
-
-class DuplexMode(Enum):
-    """Duplex Modes"""
-    FDD = 'FDD'
-    TDD = 'TDD'
-
-
-class SchedulingMode(Enum):
-    """Supported scheduling modes."""
-    RMC = 'RMC'
-    USERDEFINEDCH = 'UDCHannels'
-
-
-class TransmissionModes(Enum):
-    """Supported transmission modes."""
-    TM1 = 'TM1'
-    TM2 = 'TM2'
-    TM3 = 'TM3'
-    TM4 = 'TM4'
-    TM7 = 'TM7'
-    TM8 = 'TM8'
-    TM9 = 'TM9'
-
-
-class UseCarrierSpecific(Enum):
-    """Enable or disable carrier specific."""
-    UCS_ON = 'ON'
-    UCS_OFF = 'OFF'
-
-
-class RbPosition(Enum):
-    """Supported RB positions."""
-    LOW = 'LOW'
-    HIGH = 'HIGH'
-    P5 = 'P5'
-    P10 = 'P10'
-    P23 = 'P23'
-    P35 = 'P35'
-    P48 = 'P48'
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    QPSK = 'QPSK'
-    Q16 = 'Q16'
-    Q64 = 'Q64'
-    Q256 = 'Q256'
-
-
-class DciFormat(Enum):
-    """Support DCI Formats for MIMOs"""
-    D1 = 'D1'
-    D1A = 'D1A'
-    D1B = 'D1B'
-    D2 = 'D2'
-    D2A = 'D2A'
-    D2B = 'D2B'
-    D2C = 'D2C'
-
-
-class MimoModes(Enum):
-    """MIMO Modes dl antennas"""
-    MIMO1x1 = 'ONE'
-    MIMO2x2 = 'TWO'
-    MIMO4x4 = 'FOUR'
-
-
-class MimoScenario(Enum):
-    """Supported mimo scenarios"""
-    SCEN1x1 = 'SCELl:FLEXible SUA1,RF1C,RX1,RF1C,TX1'
-    SCEN2x2 = 'TRO:FLEXible SUA1,RF1C,RX1,RF1C,TX1,RF3C,TX2'
-    SCEN4x4 = 'FRO FLEXible SUA1,RF1C,RX1,RF1C,TX1,RF3C,TX2,RF2C,TX3,RF4C,TX4'
-
-
-class RrcState(Enum):
-    """States to enable/disable rrc."""
-    RRC_ON = 'ON'
-    RRC_OFF = 'OFF'
-
-
-class MacPadding(Enum):
-    """Enables/Disables Mac Padding."""
-    ON = 'ON'
-    OFF = 'OFF'
-
-
-class ConnectionType(Enum):
-    """Supported Connection Types."""
-    TEST = 'TESTmode'
-    DAU = 'DAPPlication'
-
-
-class RepetitionMode(Enum):
-    """Specifies LTE Measurement Repetition Mode."""
-    SINGLESHOT = 'SINGleshot'
-    CONTINUOUS = 'CONTinuous'
-
-
-class TpcPowerControl(Enum):
-    """Specifies Up Link power control types."""
-    MIN_POWER = 'MINPower'
-    MAX_POWER = 'MAXPower'
-    CONSTANT = 'CONStant'
-    SINGLE = 'SINGle'
-    UDSINGLE = 'UDSingle'
-    UDCONTINUOUS = 'UDContinuous'
-    ALTERNATE = 'ALT0'
-    CLOSED_LOOP = 'CLOop'
-    RP_CONTROL = 'RPControl'
-    FLEX_POWER = 'FULPower'
-
-
-class ReducedPdcch(Enum):
-    """Enables/disables the reduction of PDCCH resources."""
-    ON = 'ON'
-    OFF = 'OFF'
-
-
-class Cmw500(abstract_inst.SocketInstrument):
-
-    def __init__(self, ip_addr, port):
-        """Init method to setup variables for controllers.
-
-        Args:
-              ip_addr: Controller's ip address.
-              port: Port
-        """
-        super(Cmw500, self).__init__(ip_addr, port)
-        self._connect_socket()
-        self._send('*CLS')
-        self._send('*ESE 0;*SRE 0')
-        self._send('*CLS')
-        self._send('*ESE 1;*SRE 4')
-        self._send('SYST:DISP:UPD ON')
-
-    def switch_lte_signalling(self, state):
-        """ Turns LTE signalling ON/OFF.
-
-        Args:
-              state: an instance of LteState indicating the state to which LTE
-                signal has to be set.
-        """
-        if not isinstance(state, LteState):
-            raise ValueError('state should be the instance of LteState.')
-
-        state = state.value
-
-        cmd = 'SOURce:LTE:SIGN:CELL:STATe {}'.format(state)
-        self.send_and_recv(cmd)
-
-        time_elapsed = 0
-        while time_elapsed < STATE_CHANGE_TIMEOUT:
-            response = self.send_and_recv('SOURce:LTE:SIGN:CELL:STATe:ALL?')
-
-            if response == state + ',ADJ':
-                self._logger.info('LTE signalling is now {}.'.format(state))
-                break
-
-            # Wait for a second and increase time count by one
-            time.sleep(1)
-            time_elapsed += 1
-        else:
-            raise CmwError('Failed to turn {} LTE signalling.'.format(state))
-
-    def enable_packet_switching(self):
-        """Enable packet switching in call box."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion CONNect')
-        self.wait_for_pswitched_state()
-
-    def disable_packet_switching(self):
-        """Disable packet switching in call box."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion DISConnect')
-        self.wait_for_pswitched_state()
-
-    @property
-    def use_carrier_specific(self):
-        """Gets current status of carrier specific duplex configuration."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:DMODe:UCSPECific?')
-
-    @use_carrier_specific.setter
-    def use_carrier_specific(self, state):
-        """Sets the carrier specific duplex configuration.
-
-        Args:
-            state: ON/OFF UCS configuration.
-        """
-        cmd = 'CONFigure:LTE:SIGN:DMODe:UCSPECific {}'.format(state)
-        self.send_and_recv(cmd)
-
-    def send_and_recv(self, cmd):
-        """Send and recv the status of the command.
-
-        Args:
-            cmd: Command to send.
-
-        Returns:
-            status: returns the status of the command sent.
-        """
-
-        self._send(cmd)
-        if '?' in cmd:
-            status = self._recv()
-            return status
-
-    def configure_mimo_settings(self, mimo):
-        """Sets the mimo scenario for the test.
-
-        Args:
-            mimo: mimo scenario to set.
-        """
-        cmd = 'ROUTe:LTE:SIGN:SCENario:{}'.format(mimo.value)
-        self.send_and_recv(cmd)
-
-    def wait_for_pswitched_state(self, timeout=10):
-        """Wait until pswitched state.
-
-        Args:
-            timeout: timeout for lte pswitched state.
-
-        Raises:
-            CmwError on timeout.
-        """
-        while timeout > 0:
-            state = self.send_and_recv('FETCh:LTE:SIGN:PSWitched:STATe?')
-            if state == LTE_PSWITCHED_ON_RESP:
-                self._logger.debug('Connection to setup initiated.')
-                break
-            elif state == LTE_PSWITCHED_OFF_RESP:
-                self._logger.debug('Connection to setup detached.')
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Failure in setting up/detaching connection')
-
-    def wait_for_attached_state(self, timeout=120):
-        """Attach the controller with device.
-
-        Args:
-            timeout: timeout for phone to get attached.
-
-        Raises:
-            CmwError on time out.
-        """
-        while timeout > 0:
-            state = self.send_and_recv('FETCh:LTE:SIGN:PSWitched:STATe?')
-
-            if state == LTE_ATTACH_RESP:
-                self._logger.debug('Call box attached with device')
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Device could not be attached')
-
-    def wait_for_rrc_state(self, state, timeout=120):
-        """ Waits until a certain RRC state is set.
-
-        Args:
-            state: the RRC state that is being waited for.
-            timeout: timeout for phone to be in connected state.
-
-        Raises:
-            CmwError on time out.
-        """
-        if state not in [LTE_CONN_RESP, LTE_IDLE_RESP]:
-            raise ValueError(
-                'The allowed values for state are {} and {}.'.format(
-                    LTE_CONN_RESP, LTE_IDLE_RESP))
-
-        while timeout > 0:
-            new_state = self.send_and_recv('SENSe:LTE:SIGN:RRCState?')
-
-            if new_state == state:
-                self._logger.debug('The RRC state is {}.'.format(new_state))
-                break
-
-            # Wait for a second and decrease count by one
-            time.sleep(1)
-            timeout -= 1
-        else:
-            raise CmwError('Timeout before RRC state was {}.'.format(state))
-
-    def reset(self):
-        """System level reset"""
-        self.send_and_recv('*RST; *OPC')
-
-    @property
-    def get_instrument_id(self):
-        """Gets instrument identification number"""
-        return self.send_and_recv('*IDN?')
-
-    def disconnect(self):
-        """Disconnect controller from device and switch to local mode."""
-        self.switch_lte_signalling(LteState.LTE_OFF)
-        self.close_remote_mode()
-        self._close_socket()
-
-    def close_remote_mode(self):
-        """Exits remote mode to local mode."""
-        self.send_and_recv('&GTL')
-
-    def detach(self):
-        """Detach callbox and controller."""
-        self.send_and_recv('CALL:LTE:SIGN:PSWitched:ACTion DETach')
-
-    @property
-    def rrc_connection(self):
-        """Gets the RRC connection state."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:KRRC?')
-
-    @rrc_connection.setter
-    def rrc_connection(self, state):
-        """Selects whether the RRC connection is kept or released after attach.
-
-        Args:
-            mode: RRC State ON/OFF.
-        """
-        if not isinstance(state, RrcState):
-            raise ValueError('state should be the instance of RrcState.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:KRRC {}'.format(state.value)
-        self.send_and_recv(cmd)
-
-    @property
-    def rrc_connection_timer(self):
-        """Gets the inactivity timeout for disabled rrc connection."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:RITimer?')
-
-    @rrc_connection_timer.setter
-    def rrc_connection_timer(self, time_in_secs):
-        """Sets the inactivity timeout for disabled rrc connection. By default
-        the timeout is set to 5.
-
-        Args:
-            time_in_secs: timeout of inactivity in rrc connection.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:RITimer {}'.format(time_in_secs)
-        self.send_and_recv(cmd)
-
-    @property
-    def dl_mac_padding(self):
-        """Gets the state of mac padding."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:DLPadding?')
-
-    @dl_mac_padding.setter
-    def dl_mac_padding(self, state):
-        """Enables/Disables downlink padding at the mac layer.
-
-        Args:
-            state: ON/OFF
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:DLPadding {}'.format(state.value)
-        self.send_and_recv(cmd)
-
-    @property
-    def connection_type(self):
-        """Gets the connection type applied in callbox."""
-        return self.send_and_recv('CONFigure:LTE:SIGN:CONNection:CTYPe?')
-
-    @connection_type.setter
-    def connection_type(self, ctype):
-        """Sets the connection type to be applied.
-
-        Args:
-            ctype: Connection type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:CTYPe {}'.format(ctype.value)
-        self.send_and_recv(cmd)
-
-    def get_base_station(self, bts_num=BtsNumber.BTS1):
-        """Gets the base station object based on bts num. By default
-        bts_num set to PCC
-
-        Args:
-            bts_num: base station identifier
-
-        Returns:
-            base station object.
-        """
-        return BaseStation(self, bts_num)
-
-    def init_lte_measurement(self):
-        """Gets the class object for lte measurement which can be used to
-        initiate measurements.
-
-        Returns:
-            lte measurement object.
-        """
-        return LteMeasurement(self)
-
-
-class BaseStation(object):
-    """Class to interact with different base stations"""
-
-    def __init__(self, cmw, bts_num):
-        if not isinstance(bts_num, BtsNumber):
-            raise ValueError('bts_num should be an instance of BtsNumber.')
-        self._bts = bts_num.value
-        self._cmw = cmw
-
-    @property
-    def duplex_mode(self):
-        """Gets current duplex of cell."""
-        cmd = 'CONFigure:LTE:SIGN:{}:DMODe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @duplex_mode.setter
-    def duplex_mode(self, mode):
-        """Sets the Duplex mode of cell.
-
-        Args:
-            mode: String indicating FDD or TDD.
-        """
-        if not isinstance(mode, DuplexMode):
-            raise ValueError('mode should be an instance of DuplexMode.')
-
-        cmd = 'CONFigure:LTE:SIGN:{}:DMODe {}'.format(self._bts, mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def band(self):
-        """Gets the current band of cell."""
-        cmd = 'CONFigure:LTE:SIGN:{}:BAND?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @band.setter
-    def band(self, band):
-        """Sets the Band of cell.
-
-        Args:
-            band: band of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:{}:BAND {}'.format(self._bts, band)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_channel.setter
-    def dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL {}'.format(
-            self._bts, channel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def ul_channel(self):
-        """Gets the uplink channel of cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @ul_channel.setter
-    def ul_channel(self, channel):
-        """Sets the up link channel number of cell.
-
-        Args:
-            channel: up link channel number of cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL {}'.format(
-            self._bts, channel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def bandwidth(self):
-        """Get the channel bandwidth of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:BANDwidth:{}:DL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @bandwidth.setter
-    def bandwidth(self, bandwidth):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell.
-        """
-        if not isinstance(bandwidth, LteBandwidth):
-            raise ValueError('bandwidth should be an instance of '
-                             'LteBandwidth.')
-        cmd = 'CONFigure:LTE:SIGN:CELL:BANDwidth:{}:DL {}'.format(
-            self._bts, bandwidth.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def ul_frequency(self):
-        """Get the uplink frequency of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL? MHZ'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @ul_frequency.setter
-    def ul_frequency(self, freq):
-        """Get the uplink frequency of the cell.
-
-        Args:
-            freq: uplink frequency of the cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:UL {} MHZ'.format(
-            self._bts, freq)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_frequency(self):
-        """Get the downlink frequency of the cell"""
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL? MHZ'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_frequency.setter
-    def dl_frequency(self, freq):
-        """Get the downlink frequency of the cell.
-
-        Args:
-            freq: downlink frequency of the cell.
-        """
-        cmd = 'CONFigure:LTE:SIGN:RFSettings:{}:CHANnel:DL {} MHZ'.format(
-            self._bts, freq)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def transmode(self):
-        """Gets the TM of cell."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:TRANsmission?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @transmode.setter
-    def transmode(self, tm_mode):
-        """Sets the TM of cell.
-
-        Args:
-            tm_mode: TM of cell.
-        """
-        if not isinstance(tm_mode, TransmissionModes):
-            raise ValueError('tm_mode should be an instance of '
-                             'Transmission modes.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:TRANsmission {}'.format(
-            self._bts, tm_mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def downlink_power_level(self):
-        """Gets RSPRE level."""
-        cmd = 'CONFigure:LTE:SIGN:DL:{}:RSEPre:LEVel?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @downlink_power_level.setter
-    def downlink_power_level(self, pwlevel):
-        """Modifies RSPRE level.
-
-        Args:
-            pwlevel: power level in dBm.
-        """
-        cmd = 'CONFigure:LTE:SIGN:DL:{}:RSEPre:LEVel {}'.format(
-            self._bts, pwlevel)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def uplink_power_control(self):
-        """Gets open loop nominal power directly."""
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:OLNPower?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @uplink_power_control.setter
-    def uplink_power_control(self, ul_power):
-        """Sets open loop nominal power directly.
-
-        Args:
-            ul_power: uplink power level.
-        """
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:OLNPower {}'.format(
-            self._bts, ul_power)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def uldl_configuration(self):
-        """Gets uldl configuration of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:ULDL?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @uldl_configuration.setter
-    def uldl_configuration(self, uldl):
-        """Sets the ul-dl configuration.
-
-        Args:
-            uldl: Configuration value ranging from 0 to 6.
-        """
-        if uldl not in range(0, 7):
-            raise ValueError('uldl configuration value should be between'
-                             ' 0 and 6 inclusive.')
-
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:ULDL {}'.format(self._bts, uldl)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def tdd_special_subframe(self):
-        """Gets special subframe of the cell."""
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:SSUBframe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @tdd_special_subframe.setter
-    def tdd_special_subframe(self, sframe):
-        """Sets the tdd special subframe of the cell.
-
-        Args:
-            sframe: Integer value ranging from 1 to 9.
-        """
-        if sframe not in range(0, 10):
-            raise ValueError('tdd special subframe should be between 0 and 9'
-                             ' inclusive.')
-
-        cmd = 'CONFigure:LTE:SIGN:CELL:{}:SSUBframe {}'.format(
-            self._bts, sframe)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def scheduling_mode(self):
-        """Gets the current scheduling mode."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:STYPe?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @scheduling_mode.setter
-    def scheduling_mode(self, mode):
-        """Sets the scheduling type for the cell.
-
-        Args:
-            mode: Selects the channel mode to be scheduled.
-        """
-        if not isinstance(mode, SchedulingMode):
-            raise ValueError('mode should be the instance of scheduling mode.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:STYPe {}'.format(
-            self._bts, mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_configuration_dl(self):
-        """Gets rmc's rb configuration for down link. This function returns
-        Number of Resource blocks, Resource block position and Modulation type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:{}:DL?'.format(
-            self._bts, self.scheduling_mode)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_configuration_dl.setter
-    def rb_configuration_dl(self, rb_config):
-        """Sets the rb configuration for down link for scheduling type.
-
-        Args:
-            rb_config: Tuple containing Number of resource blocks, resource
-            block position and modulation type.
-
-        Raises:
-            ValueError: If tuple unpacking fails.
-        """
-        if self.scheduling_mode == 'RMC':
-            rb, rb_pos, modulation = rb_config
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:RMC:DL {},{},'
-                   '{}'.format(self._bts, rb, rb_pos, modulation))
-            self._cmw.send_and_recv(cmd)
-
-        elif self.scheduling_mode == 'UDCH':
-            rb, start_rb, modulation, tbs = rb_config
-
-            self.validate_rb(rb)
-
-            if not isinstance(modulation, ModulationType):
-                raise ValueError('Modulation should be of type '
-                                 'ModulationType.')
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:UDCHannels:DL {},{},'
-                   '{},{}'.format(self._bts, rb, start_rb, modulation.value,
-                                  tbs))
-            self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_configuration_ul(self):
-        """Gets rb configuration for up link. This function returns
-        Number of Resource blocks, Resource block position and Modulation type.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:{}:UL?'.format(
-            self._bts, self.scheduling_mode)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_configuration_ul.setter
-    def rb_configuration_ul(self, rb_config):
-        """Sets the rb configuration for down link for scheduling mode.
-
-        Args:
-            rb_config: Tuple containing Number of resource blocks, resource
-            block position and modulation type.
-
-        Raises:
-            ValueError: If tuple unpacking fails.
-        """
-        if self.scheduling_mode == 'RMC':
-            rb, rb_pos, modulation = rb_config
-
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:RMC:UL {},{},'
-                   '{}'.format(self._bts, rb, rb_pos, modulation))
-            self._cmw.send_and_recv(cmd)
-
-        elif self.scheduling_mode == 'UDCH':
-            rb, start_rb, modulation, tbs = rb_config
-
-            self.validate_rb(rb)
-
-            if not isinstance(modulation, ModulationType):
-                raise ValueError('Modulation should be of type '
-                                 'ModulationType.')
-            cmd = ('CONFigure:LTE:SIGN:CONNection:{}:UDCHannels:UL {},{},'
-                   '{},{}'.format(self._bts, rb, start_rb, modulation.value,
-                                  tbs))
-            self._cmw.send_and_recv(cmd)
-
-    def validate_rb(self, rb):
-        """Validates if rb is within the limits for bandwidth set.
-
-        Args:
-            rb: No. of resource blocks.
-
-        Raises:
-            ValueError if rb out of range.
-        """
-        bandwidth = self.bandwidth
-
-        if bandwidth == LteBandwidth.BANDWIDTH_1MHz.value:
-            if not 0 <= rb <= 6:
-                raise ValueError('RB should be between 0 to 6 inclusive'
-                                 ' for 1.4Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_3MHz.value:
-            if not 0 <= rb <= 10:
-                raise ValueError('RB should be between 0 to 10 inclusive'
-                                 ' for 3 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_5MHz.value:
-            if not 0 <= rb <= 25:
-                raise ValueError('RB should be between 0 to 25 inclusive'
-                                 ' for 5 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_10MHz.value:
-            if not 0 <= rb <= 50:
-                raise ValueError('RB should be between 0 to 50 inclusive'
-                                 ' for 10 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_15MHz.value:
-            if not 0 <= rb <= 75:
-                raise ValueError('RB should be between 0 to 75 inclusive'
-                                 ' for 15 Mhz.')
-        elif bandwidth == LteBandwidth.BANDWIDTH_20MHz.value:
-            if not 0 <= rb <= 100:
-                raise ValueError('RB should be between 0 to 100 inclusive'
-                                 ' for 20 Mhz.')
-
-    @property
-    def rb_position_dl(self):
-        """Gets the position of the allocated down link resource blocks within
-        the channel band-width.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:DL?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_position_dl.setter
-    def rb_position_dl(self, rbpos):
-        """Selects the position of the allocated down link resource blocks
-        within the channel band-width
-
-        Args:
-            rbpos: position of resource blocks.
-        """
-        if not isinstance(rbpos, RbPosition):
-            raise ValueError('rbpos should be the instance of RbPosition.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:DL {}'.format(
-            self._bts, rbpos.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def rb_position_ul(self):
-        """Gets the position of the allocated up link resource blocks within
-        the channel band-width.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:UL?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @rb_position_ul.setter
-    def rb_position_ul(self, rbpos):
-        """Selects the position of the allocated up link resource blocks
-        within the channel band-width.
-
-        Args:
-            rbpos: position of resource blocks.
-        """
-        if not isinstance(rbpos, RbPosition):
-            raise ValueError('rbpos should be the instance of RbPosition.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:RMC:RBPosition:UL {}'.format(
-            self._bts, rbpos.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dci_format(self):
-        """Gets the downlink control information (DCI) format."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:DCIFormat?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dci_format.setter
-    def dci_format(self, dci_format):
-        """Selects the downlink control information (DCI) format.
-
-        Args:
-            dci_format: supported dci.
-        """
-        if not isinstance(dci_format, DciFormat):
-            raise ValueError('dci_format should be the instance of DciFormat.')
-
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:DCIFormat {}'.format(
-            self._bts, dci_format)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def dl_antenna(self):
-        """Gets dl antenna count of cell."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:NENBantennas?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @dl_antenna.setter
-    def dl_antenna(self, num_antenna):
-        """Sets the dl antenna count of cell.
-
-        Args:
-            num_antenna: Count of number of dl antennas to use.
-        """
-        if not isinstance(num_antenna, MimoModes):
-            raise ValueError('num_antenna should be an instance of MimoModes.')
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:NENBantennas {}'.format(
-            self._bts, num_antenna)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def reduced_pdcch(self):
-        """Gets the reduction of PDCCH resources state."""
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:PDCCh:RPDCch?'.format(
-            self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @reduced_pdcch.setter
-    def reduced_pdcch(self, state):
-        """Sets the reduction of PDCCH resources state.
-
-        Args:
-            state: ON/OFF.
-        """
-        cmd = 'CONFigure:LTE:SIGN:CONNection:{}:PDCCh:RPDCch {}'.format(
-            self._bts, state.value)
-        self._cmw.send_and_recv(cmd)
-
-    def tpc_power_control(self, set_type):
-        """Set and execute the Up Link Power Control via TPC.
-
-        Args:
-            set_type: Type of tpc power control.
-        """
-
-        if not isinstance(set_type, TpcPowerControl):
-            raise ValueError('set_type should be the instance of '
-                             'TpCPowerControl.')
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:SET {}'.format(
-            self._bts, set_type.value)
-        self._cmw.send_and_recv(cmd)
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:PEXecute'.format(self._bts)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def tpc_closed_loop_target_power(self):
-        """Gets the target powers for power control with the TPC setup."""
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:CLTPower?'.format(self._bts)
-        return self._cmw.send_and_recv(cmd)
-
-    @tpc_closed_loop_target_power.setter
-    def tpc_closed_loop_target_power(self, cltpower):
-        """Sets the target powers for power control with the TPC setup.
-
-        Args:
-            tpower: Target power.
-        """
-        cmd = 'CONFigure:LTE:SIGN:UL:{}:PUSCh:TPC:CLTPower {}'.format(
-            self._bts, cltpower)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def drx_connected_mode(self):
-        """ Gets the Connected DRX LTE cell parameter
-
-        Args:
-            None
-
-        Returns:
-            DRX connected mode (OFF, AUTO, MANUAL)
-        """
-        raise NotImplementedError()
-
-    @drx_connected_mode.setter
-    def drx_connected_mode(self, mode):
-        """  Sets the Connected DRX LTE cell parameter
-
-        Args:
-            mode: DRX Connected mode
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_on_duration_timer(self):
-        """ Gets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            None
-
-        Returns:
-            DRX mode duration timer
-        """
-        raise NotImplementedError()
-
-    @drx_on_duration_timer.setter
-    def drx_on_duration_timer(self, time):
-        """ Sets the amount of PDCCH subframes to wait for data after
-            waking up from a DRX cycle
-
-        Args:
-            timer: Length of interval to wait for user data to be transmitted
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_inactivity_timer(self):
-        """ Gets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            None
-
-        Returns:
-            DRX mode inactivity timer
-        """
-        raise NotImplementedError()
-
-    @drx_inactivity_timer.setter
-    def drx_inactivity_timer(self, time):
-        """ Sets the number of PDCCH subframes to wait before entering DRX mode
-
-        Args:
-            timer: Length of the interval to wait
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_retransmission_timer(self):
-        """ Gets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            None
-
-        Returns:
-            Number of PDCCH subframes to wait for retransmission
-        """
-        raise NotImplementedError()
-
-    @drx_retransmission_timer.setter
-    def drx_retransmission_timer(self, time):
-        """ Sets the number of consecutive PDCCH subframes to wait
-        for retransmission
-
-        Args:
-            time: Number of PDCCH subframes to wait
-            for retransmission
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_long_cycle(self):
-        """ Gets the amount of subframes representing a DRX long cycle
-
-        Args:
-            None
-
-        Returns:
-            The amount of subframes representing one long DRX cycle.
-            One cycle consists of DRX sleep + DRX on duration
-        """
-        raise NotImplementedError()
-
-    @drx_long_cycle.setter
-    def drx_long_cycle(self, time):
-        """ Sets the amount of subframes representing a DRX long cycle
-
-        Args:
-            long_cycle: The amount of subframes representing one long DRX cycle.
-                One cycle consists of DRX sleep + DRX on duration
-
-        Returns:
-            None
-        """
-        raise NotImplementedError()
-
-    @property
-    def drx_long_cycle_offset(self):
-        """ Gets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            None
-
-        Returns:
-            Long cycle offset
-        """
-        raise NotImplementedError()
-
-    @drx_long_cycle_offset.setter
-    def drx_long_cycle_offset(self, offset):
-        """ Sets the offset used to determine long cycle starting
-        subframe
-
-        Args:
-            offset: Number in range 0...(long cycle - 1)
-        """
-        raise NotImplementedError()
-
-
-
-class LteMeasurement(object):
-
-    def __init__(self, cmw):
-        self._cmw = cmw
-
-    def intitilize_measurement(self):
-        """Initialize measurement modules."""
-        self._cmw.send_and_recv('INIT:LTE:MEAS:MEValuation')
-
-    @property
-    def measurement_repetition(self):
-        """Returns the measurement repetition mode that has been set."""
-        return self._cmw.send_and_recv(
-            'CONFigure:LTE:MEAS:MEValuation:REPetition?')
-
-    @measurement_repetition.setter
-    def measurement_repetition(self, mode):
-        """Sets the mode for measuring power levels.
-
-        Args:
-            mode: Single shot/continuous.
-        """
-        if not isinstance(mode, RepetitionMode):
-            raise ValueError('mode should be the instance of Repetition Mode')
-
-        cmd = 'CONFigure:LTE:MEAS:MEValuation:REPetition {}'.format(mode.value)
-        self._cmw.send_and_recv(cmd)
-
-    @property
-    def query_measurement_state(self):
-        """Returns the states and sub states of measurement."""
-        return self._cmw.send_and_recv('FETCh:LTE:MEAS:MEValuation:STATe:ALL?')
-
-    @property
-    def measure_tx_power(self):
-        """Return the current Tx power measurement."""
-        return self._cmw.send_and_recv(
-            'FETCh:LTE:MEAS:MEValuation:PMONitor:AVERage?')
-
-    def stop_measurement(self):
-        """Stops the on-going measurement.
-        This function call does not free up resources allocated for
-        measurement. Instead it moves from RUN to RDY state.
-        """
-        self._cmw.send_and_recv('STOP:LTE:MEAS:MEValuation')
-
-    def abort_measurement(self):
-        """Aborts the measurement abruptly.
-        This function call will free up the resources allocated for
-        measurement and all the results will be wiped off.
-        """
-        self._cmw.send_and_recv('ABORt:LTE:MEAS:MEValuation')
-
-
-class CmwError(Exception):
-    """Class to raise exceptions related to cmw."""
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py b/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py
deleted file mode 100644
index a65042d..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmw500_cellular_simulator.py
+++ /dev/null
@@ -1,579 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import time
-
-from antlion.controllers.rohdeschwarz_lib import cmw500
-from antlion.controllers import cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-
-CMW_TM_MAPPING = {
-    LteSimulation.TransmissionMode.TM1: cmw500.TransmissionModes.TM1,
-    LteSimulation.TransmissionMode.TM2: cmw500.TransmissionModes.TM2,
-    LteSimulation.TransmissionMode.TM3: cmw500.TransmissionModes.TM3,
-    LteSimulation.TransmissionMode.TM4: cmw500.TransmissionModes.TM4,
-    LteSimulation.TransmissionMode.TM7: cmw500.TransmissionModes.TM7,
-    LteSimulation.TransmissionMode.TM8: cmw500.TransmissionModes.TM8,
-    LteSimulation.TransmissionMode.TM9: cmw500.TransmissionModes.TM9
-}
-
-CMW_SCH_MAPPING = {
-    LteSimulation.SchedulingMode.STATIC: cmw500.SchedulingMode.USERDEFINEDCH
-}
-
-CMW_MIMO_MAPPING = {
-    LteSimulation.MimoMode.MIMO_1x1: cmw500.MimoModes.MIMO1x1,
-    LteSimulation.MimoMode.MIMO_2x2: cmw500.MimoModes.MIMO2x2,
-    LteSimulation.MimoMode.MIMO_4x4: cmw500.MimoModes.MIMO4x4
-}
-
-# get mcs vs tbsi map with 256-qam disabled(downlink)
-get_mcs_tbsi_map_dl = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 1,
-        2: 2,
-        3: 3,
-        4: 4,
-        5: 5,
-        6: 6,
-        7: 7,
-        8: 8,
-        9: 9
-    },
-    cmw500.ModulationType.Q16: {
-        10: 9,
-        11: 10,
-        12: 11,
-        13: 12,
-        14: 13,
-        15: 14,
-        16: 15
-    },
-    cmw500.ModulationType.Q64: {
-        17: 15,
-        18: 16,
-        19: 17,
-        20: 18,
-        21: 19,
-        22: 20,
-        23: 21,
-        24: 22,
-        25: 23,
-        26: 24,
-        27: 25,
-        28: 26
-    }
-}
-
-# get mcs vs tbsi map with 256-qam enabled(downlink)
-get_mcs_tbsi_map_for_256qam_dl = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 2,
-        2: 4,
-        3: 6,
-        4: 8,
-    },
-    cmw500.ModulationType.Q16: {
-        5: 10,
-        6: 11,
-        7: 12,
-        8: 13,
-        9: 14,
-        10: 15
-    },
-    cmw500.ModulationType.Q64: {
-        11: 16,
-        12: 17,
-        13: 18,
-        14: 19,
-        15: 20,
-        16: 21,
-        17: 22,
-        18: 23,
-        19: 24
-    },
-    cmw500.ModulationType.Q256: {
-        20: 25,
-        21: 27,
-        22: 28,
-        23: 29,
-        24: 30,
-        25: 31,
-        26: 32,
-        27: 33
-    }
-}
-
-# get mcs vs tbsi map (uplink)
-get_mcs_tbsi_map_ul = {
-    cmw500.ModulationType.QPSK: {
-        0: 0,
-        1: 1,
-        2: 2,
-        3: 3,
-        4: 4,
-        5: 5,
-        6: 6,
-        7: 7,
-        8: 8,
-        9: 9
-    },
-    cmw500.ModulationType.Q16: {
-        10: 10,
-        11: 10,
-        12: 11,
-        13: 12,
-        14: 13,
-        15: 14,
-        16: 15,
-        17: 16,
-        18: 17,
-        19: 18,
-        20: 19,
-        21: 19,
-        22: 20,
-        23: 21,
-        24: 22,
-        25: 23,
-        26: 24,
-        27: 25,
-        28: 26
-    }
-}
-
-
-class CMW500CellularSimulator(cc.AbstractCellularSimulator):
-    """ A cellular simulator for telephony simulations based on the CMW 500
-    controller. """
-
-    # The maximum number of carriers that this simulator can support for LTE
-    LTE_MAX_CARRIERS = 1
-
-    def __init__(self, ip_address, port):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the CMW500
-            port: the port number for the CMW500 controller
-        """
-        super().__init__()
-
-        try:
-            self.cmw = cmw500.Cmw500(ip_address, port)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('Could not connect to CMW500.')
-
-        self.bts = None
-        self.dl_modulation = None
-        self.ul_modulation = None
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.cmw.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        self.cmw.connection_type = cmw500.ConnectionType.DAU
-        self.bts = [self.cmw.get_base_station()]
-        self.cmw.switch_lte_signalling(cmw500.LteState.LTE_ON)
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        if enabled:
-            self.cmw.rrc_connection = cmw500.RrcState.RRC_OFF
-            self.cmw.rrc_connection_timer = time
-        else:
-            self.cmw.rrc_connection = cmw500.RrcState.RRC_ON
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        bts = self.bts[bts_index]
-        bts.duplex_mode = self.get_duplex_mode(band)
-        band = 'OB' + band
-        bts.band = band
-        self.log.debug('Band set to {}'.format(band))
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(band) <= 46:
-            return cmw500.DuplexMode.TDD
-        else:
-            return cmw500.DuplexMode.FDD
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        bts = self.bts[bts_index]
-        if input_power > 23:
-            self.log.warning('Open loop supports-50dBm to 23 dBm. '
-                             'Setting it to max power 23 dBm')
-            input_power = 23
-        bts.uplink_power_control = input_power
-        bts.tpc_power_control = cmw500.TpcPowerControl.CLOSED_LOOP
-        bts.tpc_closed_loop_target_power = input_power
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        bts = self.bts[bts_index]
-        bts.downlink_power_level = output_power
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number
-        """
-        self.bts[bts_index].uldl_configuration = tdd_config
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number
-        """
-        if not 0 <= ssf_config <= 9:
-            raise ValueError('The Special Sub-Frame configuration has to be a '
-                             'number between 0 and 9.')
-
-        self.bts[bts_index].tdd_special_subframe = ssf_config
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        bts = self.bts[bts_index]
-
-        if bandwidth == 20:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_20MHz
-        elif bandwidth == 15:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_15MHz
-        elif bandwidth == 10:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_10MHz
-        elif bandwidth == 5:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_5MHz
-        elif bandwidth == 3:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_3MHz
-        elif bandwidth == 1.4:
-            bts.bandwidth = cmw500.LteBandwidth.BANDWIDTH_1MHz
-        else:
-            msg = 'Bandwidth {} MHz is not valid for LTE'.format(bandwidth)
-            raise ValueError(msg)
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number
-        """
-        bts = self.bts[bts_index]
-        bts.dl_channel = channel_number
-        self.log.debug('Downlink Channel set to {}'.format(bts.dl_channel))
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        bts = self.bts[bts_index]
-        mimo_mode = CMW_MIMO_MAPPING[mimo_mode]
-        if mimo_mode == cmw500.MimoModes.MIMO1x1:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN1x1)
-            bts.dl_antenna = cmw500.MimoModes.MIMO1x1
-
-        elif mimo_mode == cmw500.MimoModes.MIMO2x2:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN2x2)
-            bts.dl_antenna = cmw500.MimoModes.MIMO2x2
-
-        elif mimo_mode == cmw500.MimoModes.MIMO4x4:
-            self.cmw.configure_mimo_settings(cmw500.MimoScenario.SCEN4x4)
-            bts.dl_antenna = cmw500.MimoModes.MIMO4x4
-        else:
-            raise RuntimeError('The requested MIMO mode is not supported.')
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tmode: the new transmission mode
-        """
-        bts = self.bts[bts_index]
-
-        tmode = CMW_TM_MAPPING[tmode]
-
-        if (tmode in [
-                cmw500.TransmissionModes.TM1, cmw500.TransmissionModes.TM7
-        ] and bts.dl_antenna == cmw500.MimoModes.MIMO1x1.value):
-            bts.transmode = tmode
-        elif (tmode.value in cmw500.TransmissionModes.__members__
-              and bts.dl_antenna == cmw500.MimoModes.MIMO2x2.value):
-            bts.transmode = tmode
-        elif (tmode in [
-                cmw500.TransmissionModes.TM2, cmw500.TransmissionModes.TM3,
-                cmw500.TransmissionModes.TM4, cmw500.TransmissionModes.TM9
-        ] and bts.dl_antenna == cmw500.MimoModes.MIMO4x4.value):
-            bts.transmode = tmode
-
-        else:
-            raise ValueError('Transmission modes should support the current '
-                             'mimo mode')
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        bts = self.bts[bts_index]
-        bts.reduced_pdcch = cmw500.ReducedPdcch.ON
-
-        scheduling = CMW_SCH_MAPPING[scheduling]
-        bts.scheduling_mode = scheduling
-
-        if not (self.ul_modulation and self.dl_modulation):
-            raise ValueError('Modulation should be set prior to scheduling '
-                             'call')
-
-        if scheduling == cmw500.SchedulingMode.RMC:
-
-            if not nrb_ul and nrb_dl:
-                raise ValueError('nrb_ul and nrb dl should not be none')
-
-            bts.rb_configuration_ul = (nrb_ul, self.ul_modulation, 'KEEP')
-            self.log.info('ul rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-            time.sleep(1)
-
-            self.log.debug('Setting rb configurations for down link')
-            bts.rb_configuration_dl = (nrb_dl, self.dl_modulation, 'KEEP')
-            self.log.info('dl rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-        elif scheduling == cmw500.SchedulingMode.USERDEFINEDCH:
-
-            if not all([nrb_ul, nrb_dl, mcs_dl, mcs_ul]):
-                raise ValueError('All parameters are mandatory.')
-
-            tbs = get_mcs_tbsi_map_ul[self.ul_modulation][mcs_ul]
-
-            bts.rb_configuration_ul = (nrb_ul, 0, self.ul_modulation, tbs)
-            self.log.info('ul rb configurations set to {}'.format(
-                bts.rb_configuration_ul))
-
-            time.sleep(1)
-
-            if self.dl_256_qam_enabled:
-                tbs = get_mcs_tbsi_map_for_256qam_dl[
-                    self.dl_modulation][mcs_dl]
-            else:
-                tbs = get_mcs_tbsi_map_dl[self.dl_modulation][mcs_dl]
-
-            bts.rb_configuration_dl = (nrb_dl, 0, self.dl_modulation, tbs)
-            self.log.info('dl rb configurations set to {}'.format(
-                bts.rb_configuration_dl))
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        self.log.info('Set 256 QAM DL MCS enabled: ' + str(enabled))
-        self.dl_modulation = cmw500.ModulationType.Q256 if enabled \
-            else cmw500.ModulationType.Q64
-        self.dl_256_qam_enabled = enabled
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.log.info('Set 64 QAM UL MCS enabled: ' + str(enabled))
-        self.ul_modulation = cmw500.ModulationType.Q64 if enabled \
-            else cmw500.ModulationType.Q16
-        self.ul_64_qam_enabled = enabled
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        # TODO (b/143918664): CMW500 doesn't have an equivalent setting.
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        # TODO (b/143497738): implement.
-        self.log.error('Setting CFI is not yet implemented in the CMW500 '
-                       'controller.')
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        # TODO (b/146068532): implement.
-        self.log.error('Setting the paging cycle duration is not yet '
-                       'implemented in the CMW500 controller.')
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.log.error('Configuring the PHICH resource setting is not yet '
-                       'implemented in the CMW500 controller.')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_attached_state(timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'attached state before '
-                                            'the timeout period ended.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_rrc_state(cmw500.LTE_CONN_RESP, timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Communication state before '
-                                            'the timeout period ended.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        try:
-            self.cmw.wait_for_rrc_state(cmw500.LTE_IDLE_RESP, timeout=timeout)
-        except cmw500.CmwError:
-            raise cc.CellularSimulatorError('The phone was not in '
-                                            'Idle state before '
-                                            'the timeout period ended.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        self.cmw.detach()
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        raise NotImplementedError()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError()
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmx500.py b/src/antlion/controllers/rohdeschwarz_lib/cmx500.py
deleted file mode 100644
index ebdc9f9..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmx500.py
+++ /dev/null
@@ -1,1067 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-import sys
-
-from enum import Enum
-from os import path
-from antlion.controllers import abstract_inst
-
-DEFAULT_XLAPI_PATH = '/home/mobileharness/Rohde-Schwarz/XLAPI/latest/venv/lib/python3.7/site-packages'
-DEFAULT_LTE_STATE_CHANGE_TIMER = 10
-DEFAULT_CELL_SWITCH_ON_TIMER = 60
-DEFAULT_ENDC_TIMER = 300
-
-logger = logging.getLogger('Xlapi_cmx500')
-
-LTE_CELL_PROPERTIES = [
-    'band',
-    'bandwidth',
-    'dl_earfcn',
-    'ul_earfcn',
-    'total_dl_power',
-    'p_b',
-    'dl_epre',
-    'ref_signal_power',
-    'm',
-    'beamforming_antenna_ports',
-    'p0_nominal_pusch',
-]
-
-LTE_MHZ_UPPER_BOUND_TO_RB = [
-    (1.5, 6),
-    (4.0, 15),
-    (7.5, 25),
-    (12.5, 50),
-    (17.5, 75),
-]
-
-class DciFormat(Enum):
-    """Support DCI Formats for MIMOs."""
-    DCI_FORMAT_0 = 1
-    DCI_FORMAT_1 = 2
-    DCI_FORMAT_1A = 3
-    DCI_FORMAT_1B = 4
-    DCI_FORMAT_1C = 5
-    DCI_FORMAT_2 = 6
-    DCI_FORMAT_2A = 7
-    DCI_FORMAT_2B = 8
-    DCI_FORMAT_2C = 9
-    DCI_FORMAT_2D = 10
-
-
-class DuplexMode(Enum):
-    """Duplex Modes."""
-    FDD = 'FDD'
-    TDD = 'TDD'
-    DL_ONLY = 'DL_ONLY'
-
-
-class LteBandwidth(Enum):
-    """Supported LTE bandwidths."""
-    BANDWIDTH_1MHz = 6 # MHZ_1 is RB_6
-    BANDWIDTH_3MHz = 15 # MHZ_3 is RB_15
-    BANDWIDTH_5MHz = 25 # MHZ_5 is RB_25
-    BANDWIDTH_10MHz = 50 # MHZ_10 is RB_50
-    BANDWIDTH_15MHz = 75 # MHZ_15 is RB_75
-    BANDWIDTH_20MHz = 100 # MHZ_20 is RB_100
-
-
-class LteState(Enum):
-    """LTE ON and OFF."""
-    LTE_ON = 'ON'
-    LTE_OFF = 'OFF'
-
-
-class MimoModes(Enum):
-    """MIMO Modes dl antennas."""
-    MIMO1x1 = 1
-    MIMO2x2 = 2
-    MIMO4x4 = 4
-
-
-class ModulationType(Enum):
-    """Supported Modulation Types."""
-    Q16 = 0
-    Q64 = 1
-    Q256 = 2
-
-
-class NasState(Enum):
-    """NAS state between callbox and dut."""
-    DEREGISTERED = 'OFF'
-    EMM_REGISTERED = 'EMM'
-    MM5G_REGISTERED = 'NR'
-
-
-class RrcState(Enum):
-    """States to enable/disable rrc."""
-    RRC_ON = 'ON'
-    RRC_OFF = 'OFF'
-
-
-class RrcConnectionState(Enum):
-    """RRC Connection states, describes possible DUT RRC connection states."""
-    IDLE = 1
-    IDLE_PAGING = 2
-    IDLE_CONNECTION_ESTABLISHMENT = 3
-    CONNECTED = 4
-    CONNECTED_CONNECTION_REESTABLISHMENT = 5
-    CONNECTED_SCG_FAILURE = 6
-    CONNECTED_HANDOVER = 7
-    CONNECTED_CONNECTION_RELEASE = 8
-
-
-class SchedulingMode(Enum):
-    """Supported scheduling modes."""
-    USERDEFINEDCH = 'UDCHannels'
-
-
-class TransmissionModes(Enum):
-    """Supported transmission modes."""
-    TM1 = 1
-    TM2 = 2
-    TM3 = 3
-    TM4 = 4
-    TM7 = 7
-    TM8 = 8
-    TM9 = 9
-
-
-# For mimo 1x1, also set_num_crs_antenna_ports to 1
-MIMO_MAX_LAYER_MAPPING = {
-    MimoModes.MIMO1x1: 2,
-    MimoModes.MIMO2x2: 2,
-    MimoModes.MIMO4x4: 4,
-}
-
-
-class Cmx500(abstract_inst.SocketInstrument):
-
-    def __init__(self, ip_addr, port, xlapi_path=DEFAULT_XLAPI_PATH):
-        """Init method to setup variables for the controller.
-
-        Args:
-              ip_addr: Controller's ip address.
-              port: Port.
-        """
-
-        # keeps the socket connection for debug purpose for now
-        super().__init__(ip_addr, port)
-        if not xlapi_path in sys.path:
-            sys.path.insert(0, xlapi_path)
-        self._initial_xlapi()
-        self._settings.system.set_instrument_address(ip_addr)
-        logger.info('The instrument address is {}'.format(
-                self._settings.system.get_instrument_address()))
-
-        self.bts = []
-
-        # Stops all active cells if there is any
-        self.disconnect()
-
-        # loads cell default settings from parameter file if there is one
-        default_setup_path = 'default_cell_setup.rsxp'
-        if path.exists(default_setup_path):
-            self._settings.session.set_test_param_files(default_setup_path)
-
-        self.dut = self._network.get_dut()
-        self.lte_cell = self._network.create_lte_cell('ltecell0')
-        self.nr_cell = self._network.create_nr_cell('nrcell0')
-        self._config_antenna_ports()
-        self.lte_rrc_state_change_timer = DEFAULT_LTE_STATE_CHANGE_TIMER
-        self.rrc_state_change_time_enable = False
-        self.cell_switch_on_timer = DEFAULT_CELL_SWITCH_ON_TIMER
-
-    # _config_antenna_ports for the special RF connection with cmw500 + cmx500.
-    def _config_antenna_ports(self):
-        from rs_mrt.testenvironment.signaling.sri.rat.common import CsiRsAntennaPorts
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import CrsAntennaPorts
-
-        max_csi_rs_ports = CsiRsAntennaPorts.NUMBER_CSI_RS_ANTENNA_PORTS_FOUR
-        max_crs_ports = CrsAntennaPorts.NUMBER_CRS_ANTENNA_PORTS_FOUR
-
-        lte_cell_max_config = self.lte_cell.stub.GetMaximumConfiguration()
-        lte_cell_max_config.csi_rs_antenna_ports = max_csi_rs_ports
-        lte_cell_max_config.crs_antenna_ports = max_crs_ports
-        self.lte_cell.stub.SetMaximumConfiguration(lte_cell_max_config)
-
-        nr_cell_max_config = self.nr_cell.stub.GetMaximumConfiguration()
-        nr_cell_max_config.csi_rs_antenna_ports = max_csi_rs_ports
-        self.nr_cell.stub.SetMaximumConfiguration(nr_cell_max_config)
-
-    def _initial_xlapi(self):
-        import xlapi
-        import mrtype
-        from xlapi import network
-        from xlapi import settings
-
-        self._xlapi = xlapi
-        self._network = network
-        self._settings = settings
-
-    def configure_mimo_settings(self, mimo, bts_index=0):
-        """Sets the mimo scenario for the test.
-
-        Args:
-            mimo: mimo scenario to set.
-        """
-        self.bts[bts_index].set_mimo_mode(mimo)
-
-    @property
-    def connection_type(self):
-        """Gets the connection type applied in callbox."""
-        state = self.dut.state.rrc_connection_state
-        return RrcConnectionState(state.value)
-
-    def create_base_station(self, cell):
-        """Creates the base station object with cell and current object.
-
-        Args:
-            cell: the XLAPI cell.
-
-        Returns:
-            base station object.
-        Raise:
-            CmxError if the cell is neither LTE nor NR.
-        """
-        from xlapi.lte_cell import LteCell
-        from xlapi.nr_cell import NrCell
-        if isinstance(cell, LteCell):
-            return LteBaseStation(self, cell)
-        elif isinstance(cell, NrCell):
-            return NrBaseStation(self, cell)
-        else:
-            raise CmxError('The cell type is neither LTE nor NR')
-
-    def detach(self):
-        """Detach callbox and controller."""
-        for bts in self.bts:
-            bts.stop()
-
-    def disable_packet_switching(self):
-        """Disable packet switching in call box."""
-        raise NotImplementedError()
-
-    def disconnect(self):
-        """Disconnect controller from device and switch to local mode."""
-
-        # Stops all lte and nr_cell
-        for cell in self._network.get_all_lte_cells():
-            if cell.is_on():
-                cell.stop()
-
-        for cell in self._network.get_all_nr_cells():
-            if cell.is_on():
-                cell.stop()
-        self.bts.clear()
-        self._network.reset()
-
-    def enable_packet_switching(self):
-        """Enable packet switching in call box."""
-        raise NotImplementedError()
-
-    def get_base_station(self, bts_index=0):
-        """Gets the base station object based on bts num. By default
-        bts_index set to 0 (PCC).
-
-        Args:
-            bts_num: base station identifier
-
-        Returns:
-            base station object.
-        """
-        return self.bts[bts_index]
-
-    def get_network(self):
-        """ Gets the network object from cmx500 object."""
-        return self._network
-
-    def init_lte_measurement(self):
-        """Gets the class object for lte measurement which can be used to
-        initiate measurements.
-
-        Returns:
-            lte measurement object.
-        """
-        raise NotImplementedError()
-
-    def reset(self):
-        """System level reset."""
-
-        self.disconnect()
-
-    @property
-    def rrc_connection(self):
-        """Gets the RRC connection state."""
-        return self.dut.state.rrc.is_connected
-
-    def set_timer(self, timeout):
-        """Sets timer for the Cmx500 class."""
-        self.rrc_state_change_time_enable = True
-        self.lte_rrc_state_change_timer = timeout
-
-    def switch_lte_signalling(self, state):
-        """ Turns LTE signalling ON/OFF.
-
-        Args:
-            state: an instance of LteState indicating the state to which LTE
-                   signal has to be set.
-        """
-        if not isinstance(state, LteState):
-            raise ValueError('state should be the instance of LteState.')
-
-        if self.bts:
-            self.disconnect()
-        self.bts.append(LteBaseStation(self, self.lte_cell))
-        # Switch on the primary Lte cell for on state and switch all lte cells
-        # if the state is off state
-        if state.value == 'ON':
-            self.bts[0].start()
-            cell_status = self.bts[0].wait_cell_on(self.cell_switch_on_timer)
-            if cell_status:
-                logger.info('The LTE pcell status is on')
-            else:
-                raise CmxError('The LTE pcell cannot be switched on')
-        else:
-            for bts in self.bts:
-                if isinstance(bts, LteBaseStation):
-                    bts.stop()
-                logger.info(
-                    'The LTE cell status is {} after stop'.format(bts.is_on()))
-
-    def switch_on_nsa_signalling(self):
-        if self.bts:
-            self.disconnect()
-        logger.info('Switches on NSA signalling')
-        self.bts.append(LteBaseStation(self, self.lte_cell))
-        self.bts.append(NrBaseStation(self, self.nr_cell))
-        self.bts[0].start()
-        lte_cell_status = self.bts[0].wait_cell_on(self.cell_switch_on_timer)
-        if lte_cell_status:
-            logger.info('The LTE pcell status is on')
-        else:
-            raise CmxError('The LTE pcell cannot be switched on')
-
-        self.bts[1].start()
-        nr_cell_status = self.bts[1].wait_cell_on(self.cell_switch_on_timer)
-        if nr_cell_status:
-            logger.info('The NR cell status is on')
-        else:
-            raise CmxError('The NR cell cannot be switched on')
-
-    def update_lte_cell_config(self, config):
-        """Updates lte cell settings with config."""
-        set_counts = 0
-        for property in LTE_CELL_PROPERTIES:
-            if property in config:
-                setter_name = 'set_' + property
-                setter = getattr(self.lte_cell, setter_name)
-                setter(config[property])
-                set_counts += 1
-        if set_counts < len(config):
-            logger.warning('Not all configs were set in update_cell_config')
-
-    @property
-    def use_carrier_specific(self):
-        """Gets current status of carrier specific duplex configuration."""
-        raise NotImplementedError()
-
-    @use_carrier_specific.setter
-    def use_carrier_specific(self, state):
-        """Sets the carrier specific duplex configuration.
-
-        Args:
-            state: ON/OFF UCS configuration.
-        """
-        raise NotImplementedError()
-
-    def wait_for_rrc_state(self, state, timeout=120):
-        """ Waits until a certain RRC state is set.
-
-        Args:
-            state: the RRC state that is being waited for.
-            timeout: timeout for phone to be in connected state.
-
-        Raises:
-            CmxError on time out.
-        """
-        is_idle = (state.value == 'OFF')
-        for idx in range(timeout):
-            time.sleep(1)
-            if self.dut.state.rrc.is_idle == is_idle:
-                logger.info('{} reached at {} s'.format(state.value, idx))
-                return True
-        error_message = 'Waiting for {} state timeout after {}'.format(
-                state.value, timeout)
-        logger.error(error_message)
-        raise CmxError(error_message)
-
-    def wait_until_attached(self, timeout=120):
-        """Waits until Lte attached.
-
-        Args:
-            timeout: timeout for phone to get attached.
-
-        Raises:
-            CmxError on time out.
-        """
-        try:
-            self.dut.signaling.wait_for_lte_attach(self.lte_cell, timeout)
-        except:
-            raise CmxError(
-                    'wait_until_attached timeout after {}'.format(timeout))
-
-
-class BaseStation(object):
-    """Class to interact with different the base stations."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the base station.
-        """
-
-        self._cell = cell
-        self._cmx = cmx
-        self._cc = cmx.dut.cc(cell)
-        self._network = cmx.get_network()
-
-    @property
-    def band(self):
-        """Gets the current band of cell.
-
-        Return:
-            the band number in int.
-        """
-        cell_band = self._cell.get_band()
-        return int(cell_band)
-
-    @property
-    def dl_power(self):
-        """Gets RSPRE level.
-
-        Return:
-            the power level in dbm.
-        """
-        return self._cell.get_total_dl_power().in_dBm()
-
-    @property
-    def duplex_mode(self):
-        """Gets current duplex of cell."""
-        band = self._cell.get_band()
-        if band.is_fdd():
-            return DuplexMode.FDD
-        if band.is_tdd():
-            return DuplexMode.TDD
-        if band.is_dl_only():
-            return DuplexMode.DL_ONLY
-
-    def is_on(self):
-        """Verifies if the cell is turned on.
-
-            Return:
-                boolean (if the cell is on).
-        """
-        return self._cell.is_on()
-
-    def set_band(self, band):
-        """Sets the Band of cell.
-
-        Args:
-            band: band of cell.
-        """
-        self._cell.set_band(band)
-        logger.info('The band is set to {} and is {} after setting'.format(
-                band, self.band))
-
-    def set_dl_mac_padding(self, state):
-        """Enables/Disables downlink padding at the mac layer.
-
-        Args:
-            state: a boolean
-        """
-        self._cc.set_dl_mac_padding(state)
-
-    def set_dl_power(self, pwlevel):
-        """Modifies RSPRE level.
-
-        Args:
-            pwlevel: power level in dBm.
-        """
-        self._cell.set_total_dl_power(pwlevel)
-
-    def set_ul_power(self, ul_power):
-        """Sets ul power
-
-        Args:
-            ul_power: the uplink power in dbm
-        """
-        self._cc.set_target_ul_power(ul_power)
-
-    def start(self):
-        """Starts the cell."""
-        self._cell.start()
-
-    def stop(self):
-        """Stops the cell."""
-        self._cell.stop()
-
-    def wait_cell_on(self, timeout):
-        """Waits the cell on.
-
-        Args:
-            timeout: the time for waiting the cell on.
-
-        Raises:
-            CmxError on time out.
-        """
-        waiting_time = 0
-        while waiting_time < timeout:
-            if self._cell.is_on():
-                return True
-            waiting_time += 1
-            time.sleep(1)
-        return self._cell.is_on()
-
-
-class LteBaseStation(BaseStation):
-    """ LTE base station."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for the LTE base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the LTE base station.
-        """
-        from xlapi.lte_cell import LteCell
-        if not isinstance(cell, LteCell):
-            raise CmxError('The cell is not a LTE cell, LTE base station  fails'
-                           ' to create.')
-        super().__init__(cmx, cell)
-
-    def _config_scheduler(self, dl_mcs=None, dl_rb_alloc=None, dl_dci_ncce=None,
-        dl_dci_format=None, dl_tm=None, dl_num_layers=None, dl_mcs_table=None,
-        ul_mcs=None, ul_rb_alloc=None, ul_dci_ncce=None):
-
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import DciFormat
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import DlTransmissionMode
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import MaxLayersMIMO
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import McsTable
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import PdcchFormat
-
-        log_list = []
-        if dl_mcs:
-            log_list.append('dl_mcs: {}'.format(dl_mcs))
-        if ul_mcs:
-            log_list.append('ul_mcs: {}'.format(ul_mcs))
-        if dl_rb_alloc:
-            log_list.append('dl_rb_alloc: {}'.format(dl_rb_alloc))
-        if ul_rb_alloc:
-            log_list.append('ul_rb_alloc: {}'.format(ul_rb_alloc))
-        if dl_dci_ncce:
-            dl_dci_ncce = PdcchFormat(dl_dci_ncce)
-            log_list.append('dl_dci_ncce: {}'.format(dl_dci_ncce))
-        if ul_dci_ncce:
-            ul_dci_ncce = PdcchFormat(ul_dci_ncce)
-            log_list.append('ul_dci_ncce: {}'.format(ul_dci_ncce))
-        if dl_dci_format:
-            dl_dci_format = DciFormat(dl_dci_format)
-            log_list.append('dl_dci_format: {}'.format(dl_dci_format))
-        if dl_tm:
-            dl_tm = DlTransmissionMode(dl_tm.value)
-            log_list.append('dl_tm: {}'.format(dl_tm))
-        if dl_num_layers:
-            dl_num_layers = MaxLayersMIMO(dl_num_layers)
-            log_list.append('dl_num_layers: {}'.format(dl_num_layers))
-        if dl_mcs_table:
-            dl_mcs_table = McsTable(dl_mcs_table)
-            log_list.append('dl_mcs_table: {}'.format(dl_mcs_table))
-
-        is_on = self._cell.is_on()
-        num_crs_antenna_ports = self._cell.get_num_crs_antenna_ports()
-
-        # Sets num of crs antenna ports to 4 for configuring
-        if is_on:
-            self._cell.stop()
-            time.sleep(1)
-        self._cell.set_num_crs_antenna_ports(4)
-        scheduler = self._cmx.dut.get_scheduler(self._cell)
-        logger.info('configure scheduler for {}'.format(','.join(log_list)))
-        scheduler.configure_scheduler(
-                dl_mcs=dl_mcs, dl_rb_alloc=dl_rb_alloc, dl_dci_ncce=dl_dci_ncce,
-                dl_dci_format=dl_dci_format, dl_tm=dl_tm,
-                dl_num_layers=dl_num_layers, dl_mcs_table=dl_mcs_table,
-                ul_mcs=ul_mcs, ul_rb_alloc=ul_rb_alloc, ul_dci_ncce=ul_dci_ncce)
-        logger.info('Configure scheduler succeeds')
-
-        # Sets num of crs antenna ports back to previous value
-        self._cell.set_num_crs_antenna_ports(num_crs_antenna_ports)
-        self._network.apply_changes()
-
-        if is_on:
-            self._cell.start()
-
-    @property
-    def bandwidth(self):
-        """Get the channel bandwidth of the cell.
-
-        Return:
-            the number rb of the bandwidth.
-        """
-        return self._cell.get_bandwidth().num_rb
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell.
-
-        Return:
-            the downlink channel (earfcn) in int.
-        """
-        return int(self._cell.get_dl_earfcn())
-
-    @property
-    def dl_frequency(self):
-        """Get the downlink frequency of the cell."""
-        from mrtype.frequency import Frequency
-        return self._cell.get_dl_earfcn().to_freq().in_units(
-                Frequency.Units.GHz)
-
-    def _to_rb_bandwidth(self, bandwidth):
-        for idx in range(5):
-            if bandwidth < LTE_MHZ_UPPER_BOUND_TO_RB[idx][0]:
-                return LTE_MHZ_UPPER_BOUND_TO_RB[idx][1]
-        return 100
-
-    def set_bandwidth(self, bandwidth):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell in MHz.
-        """
-        self._cell.set_bandwidth(self._to_rb_bandwidth(bandwidth))
-
-    def set_cell_frequency_band(self, tdd_cfg=None, ssf_cfg=None):
-        """Sets cell frequency band with tdd and ssf config.
-
-        Args:
-            tdd_cfg: the tdd subframe assignment config in number (from 0-6).
-            ssf_cfg: the special subframe pattern config in number (from 1-9).
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import SpecialSubframePattern
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import SubFrameAssignment
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import CellFrequencyBand
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import Tdd
-        tdd_subframe = None
-        ssf_pattern = None
-        if tdd_cfg:
-            tdd_subframe = SubFrameAssignment(tdd_cfg + 1)
-        if ssf_cfg:
-            ssf_pattern = SpecialSubframePattern(ssf_cfg)
-        tdd = Tdd(tdd_config=Tdd.TddConfigSignaling(
-                subframe_assignment=tdd_subframe,
-                special_subframe_pattern=ssf_pattern))
-        self._cell.stub.SetCellFrequencyBand(CellFrequencyBand(tdd=tdd))
-        self._network.apply_changes()
-
-    def set_cfi(self, cfi):
-        """Sets number of pdcch symbols (cfi).
-
-        Args:
-            cfi: the value of NumberOfPdcchSymbols
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.lte import NumberOfPdcchSymbols
-        from rs_mrt.testenvironment.signaling.sri.rat.lte.config import PdcchRegionReq
-
-        logger.info('The cfi enum to set is {}'.format(
-                NumberOfPdcchSymbols(cfi)))
-        req = PdcchRegionReq()
-        req.num_pdcch_symbols = NumberOfPdcchSymbols(cfi)
-        self._cell.stub.SetPdcchControlRegion(req)
-
-    def set_dci_format(self, dci_format):
-        """Selects the downlink control information (DCI) format.
-
-        Args:
-            dci_format: supported dci.
-        """
-        if not isinstance(dci_format, DciFormat):
-            raise CmxError('Wrong type for dci_format')
-        self._config_scheduler(dl_dci_format=dci_format.value)
-
-    def set_dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        if self.dl_channel == channel:
-            logger.info('The dl_channel was at {}'.format(self.dl_channel))
-            return
-        self._cell.set_earfcn(channel)
-        logger.info('The dl_channel was set to {}'.format(self.dl_channel))
-
-    def set_dl_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(dl_mcs_table=modulation.value)
-
-    def set_mimo_mode(self, mimo):
-        """Sets mimo mode for Lte scenario.
-
-        Args:
-            mimo: the mimo mode.
-        """
-        if not isinstance(mimo, MimoModes):
-            raise CmxError("Wrong type of mimo mode")
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-        self._cell.set_num_crs_antenna_ports(mimo.value)
-        self._config_scheduler(dl_num_layers=MIMO_MAX_LAYER_MAPPING[mimo])
-        if is_on:
-            self._cell.start()
-
-    def set_scheduling_mode(
-        self, mcs_dl=None, mcs_ul=None, nrb_dl=None, nrb_ul=None):
-        """Sets scheduling mode.
-
-        Args:
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        self._config_scheduler(dl_mcs=mcs_dl, ul_mcs=mcs_ul, dl_rb_alloc=nrb_dl,
-                ul_rb_alloc=nrb_ul)
-
-    def set_ssf_config(self, ssf_config):
-        """Sets ssf subframe assignment with tdd_config.
-
-        Args:
-            ssf_config: the special subframe pattern config (from 1-9).
-        """
-        self.set_cell_frequency_band(ssf_cfg=ssf_config)
-
-    def set_tdd_config(self, tdd_config):
-        """Sets tdd subframe assignment with tdd_config.
-
-        Args:
-            tdd_config: the subframe assignemnt config (from 0-6).
-        """
-        self.set_cell_frequency_band(tdd_cfg=tdd_config)
-
-    def set_transmission_mode(self, transmission_mode):
-        """Sets transmission mode with schedular.
-
-        Args:
-            transmission_mode: the download link transmission mode.
-        """
-        if not isinstance(transmission_mode, TransmissionModes):
-            raise CmxError('Wrong type of the trasmission mode')
-        self._config_scheduler(dl_tm=transmission_mode)
-
-    def set_ul_channel(self, channel):
-        """Sets the up link channel number of cell.
-
-        Args:
-            channel: up link channel number of cell.
-        """
-        if self.ul_channel == channel:
-            logger.info('The ul_channel is at {}'.format(self.ul_channel))
-            return
-        self._cell.set_earfcn(channel)
-        logger.info('The dl_channel was set to {}'.format(self.ul_channel))
-
-    @property
-    def ul_channel(self):
-        """Gets the uplink channel of cell.
-
-        Return:
-            the uplink channel (earfcn) in int
-        """
-        return int(self._cell.get_ul_earfcn())
-
-    @property
-    def ul_frequency(self):
-        """Get the uplink frequency of the cell.
-
-        Return:
-            The uplink frequency in GHz.
-        """
-        from mrtype.frequency import Frequency
-        return self._cell.get_ul_earfcn().to_freq().in_units(
-                Frequency.Units.GHz)
-
-    def set_ul_modulation_table(self, modulation):
-        """Sets up link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        if modulation == ModulationType.Q16:
-            self._cell.stub.SetPuschCommonConfig(False)
-        else:
-            self._cell.stub.SetPuschCommonConfig(True)
-
-
-class NrBaseStation(BaseStation):
-    """ NR base station."""
-
-    def __init__(self, cmx, cell):
-        """Init method to setup variables for the NR base station.
-
-        Args:
-            cmx: Controller (Cmx500) object.
-            cell: The cell for the NR base station.
-        """
-        from xlapi.nr_cell import NrCell
-        if not isinstance(cell, NrCell):
-            raise CmxError('the cell is not a NR cell, NR base station  fails'
-                           ' to creat.')
-
-        super().__init__(cmx, cell)
-
-    def _config_scheduler(self, dl_mcs=None, dl_mcs_table=None,
-                          dl_rb_alloc=None, dl_mimo_mode=None,
-                          ul_mcs=None, ul_mcs_table=None, ul_rb_alloc=None,
-                          ul_mimo_mode=None):
-
-        from rs_mrt.testenvironment.signaling.sri.rat.nr import McsTable
-
-        log_list = []
-        if dl_mcs:
-            log_list.append('dl_mcs: {}'.format(dl_mcs))
-        if ul_mcs:
-            log_list.append('ul_mcs: {}'.format(ul_mcs))
-
-        # If rb alloc is not a tuple, add 0 as start RBs for XLAPI NR scheduler
-        if dl_rb_alloc:
-            if not isinstance(dl_rb_alloc, tuple):
-                dl_rb_alloc = (0, dl_rb_alloc)
-            log_list.append('dl_rb_alloc: {}'.format(dl_rb_alloc))
-        if ul_rb_alloc:
-            if not isinstance(ul_rb_alloc, tuple):
-                ul_rb_alloc = (0, ul_rb_alloc)
-            log_list.append('ul_rb_alloc: {}'.format(ul_rb_alloc))
-        if dl_mcs_table:
-            dl_mcs_table = McsTable(dl_mcs_table)
-            log_list.append('dl_mcs_table: {}'.format(dl_mcs_table))
-        if ul_mcs_table:
-            ul_mcs_table = McsTable(ul_mcs_table)
-            log_list.append('ul_mcs_table: {}'.format(ul_mcs_table))
-        if dl_mimo_mode:
-            log_list.append('dl_mimo_mode: {}'.format(dl_mimo_mode))
-        if ul_mimo_mode:
-            log_list.append('ul_mimo_mode: {}'.format(ul_mimo_mode))
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-            time.sleep(1)
-        scheduler = self._cmx.dut.get_scheduler(self._cell)
-        logger.info('configure scheduler for {}'.format(','.join(log_list)))
-
-        scheduler.configure_ue_scheduler(
-                dl_mcs=dl_mcs, dl_mcs_table=dl_mcs_table,
-                dl_rb_alloc=dl_rb_alloc, dl_mimo_mode=dl_mimo_mode,
-                ul_mcs=ul_mcs, ul_mcs_table=ul_mcs_table,
-                ul_rb_alloc=ul_rb_alloc, ul_mimo_mode=ul_mimo_mode)
-        logger.info('Configure scheduler succeeds')
-        self._network.apply_changes()
-
-        if is_on:
-            self._cell.start()
-
-    def attach_as_secondary_cell(self, endc_timer=DEFAULT_ENDC_TIMER):
-        """Enable endc mode for NR cell.
-
-        Args:
-            endc_timer: timeout for endc state
-        """
-        logger.info('enable endc mode for nsa dual connection')
-        self._cmx.dut.signaling.nsa_dual_connect(self._cell)
-        time_count = 0
-        while time_count < endc_timer:
-            if str(self._cmx.dut.state.radio_connectivity) == \
-                    'RadioConnectivityMode.EPS_LTE_NR':
-                logger.info('enter endc mode')
-                return
-            time.sleep(1)
-            time_count += 1
-            if time_count % 30 == 0:
-                logger.info('did not reach endc at {} s'.format(time_count))
-        raise CmxError('Cannot reach endc after {} s'.format(endc_timer))
-
-    @property
-    def dl_channel(self):
-        """Gets the downlink channel of cell.
-
-        Return:
-            the downlink channel (nr_arfcn) in int.
-        """
-        return int(self._cell.get_dl_ref_a())
-
-    def _bandwidth_to_carrier_bandwidth(self, bandwidth):
-        """Converts bandwidth in MHz to CarrierBandwidth.
-            CarrierBandwidth Enum in XLAPI:
-                MHZ_5 = 0
-                MHZ_10 = 1
-                MHZ_15 = 2
-                MHZ_20 = 3
-                MHZ_25 = 4
-                MHZ_30 = 5
-                MHZ_40 = 6
-                MHZ_50 = 7
-                MHZ_60 = 8
-                MHZ_70 = 9
-                MHZ_80 = 10
-                MHZ_90 = 11
-                MHZ_100 = 12
-                MHZ_200 = 13
-                MHZ_400 = 14
-        Args:
-            bandwidth: channel bandwidth in MHz.
-
-        Return:
-            the corresponding NR Carrier Bandwidth.
-        """
-        from mrtype.nr.frequency import CarrierBandwidth
-        if bandwidth > 100:
-            return CarrierBandwidth(12 + bandwidth // 200)
-        elif bandwidth > 30:
-            return CarrierBandwidth(2 + bandwidth // 10)
-        else:
-            return CarrierBandwidth(bandwidth // 5 - 1)
-
-    def set_bandwidth(self, bandwidth, scs=None):
-        """Sets the channel bandwidth of the cell.
-
-        Args:
-            bandwidth: channel bandwidth of cell.
-            scs: subcarrier spacing (SCS) of resource grid 0
-        """
-        if not scs:
-            scs = self._cell.get_scs()
-        self._cell.set_carrier_bandwidth_and_scs(
-                self._bandwidth_to_carrier_bandwidth(bandwidth), scs)
-        logger.info('The bandwidth in MHz is {}. After setting, the value is {}'
-                    .format(bandwidth, str(self._cell.get_carrier_bandwidth())))
-
-    def set_dl_channel(self, channel):
-        """Sets the downlink channel number of cell.
-
-        Args:
-            channel: downlink channel number of cell.
-        """
-        from mrtype.nr.frequency import NrArfcn
-        if self.dl_channel == channel:
-            logger.info('The dl_channel was at {}'.format(self.dl_channel))
-            return
-        self._cell.set_dl_ref_a_offset(self.band, NrArfcn(channel))
-        logger.info('The dl_channel was set to {}'.format(self.dl_channel))
-
-    def set_dl_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(dl_mcs_table=modulation.value)
-
-    def set_mimo_mode(self, mimo):
-        """Sets mimo mode for NR nsa scenario.
-
-        Args:
-            mimo: the mimo mode.
-        """
-        from rs_mrt.testenvironment.signaling.sri.rat.nr import DownlinkMimoMode
-        if not isinstance(mimo, MimoModes):
-            raise CmxError("Wrong type of mimo mode")
-
-        is_on = self._cell.is_on()
-        if is_on:
-            self._cell.stop()
-        self._config_scheduler(dl_mimo_mode=DownlinkMimoMode.Enum(mimo.value))
-        if is_on:
-            self._cell.start()
-
-    def set_scheduling_mode(
-        self, mcs_dl=None, mcs_ul=None, nrb_dl=None, nrb_ul=None):
-        """Sets scheduling mode.
-
-        Args:
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        self._config_scheduler(dl_mcs=mcs_dl, ul_mcs=mcs_ul, dl_rb_alloc=nrb_dl,
-                ul_rb_alloc=nrb_ul)
-
-    def set_ssf_config(self, ssf_config):
-        """Sets ssf subframe assignment with tdd_config.
-
-        Args:
-            ssf_config: the special subframe pattern config (from 1-9).
-        """
-        raise CmxError('the set ssf config for nr did not implemente yet')
-
-    def set_tdd_config(self, tdd_config):
-        """Sets tdd subframe assignment with tdd_config.
-
-        Args:
-            tdd_config: the subframe assignemnt config (from 0-6).
-        """
-        raise CmxError('the set tdd config for nr did not implemente yet')
-
-    def set_transmission_mode(self, transmission_mode):
-        """Sets transmission mode with schedular.
-
-        Args:
-            transmission_mode: the download link transmission mode.
-        """
-        logger.info('The set transmission mode for nr is set by mimo mode')
-
-    def set_ul_modulation_table(self, modulation):
-        """Sets down link modulation table.
-
-        Args:
-            modulation: modulation table setting (ModulationType).
-        """
-        if not isinstance(modulation, ModulationType):
-            raise CmxError('The modulation is not the type of Modulation')
-        self._config_scheduler(ul_mcs_table=modulation.value)
-
-
-class CmxError(Exception):
-    """Class to raise exceptions related to cmx."""
diff --git a/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py b/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py
deleted file mode 100644
index e8a7871..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/cmx500_cellular_simulator.py
+++ /dev/null
@@ -1,389 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.rohdeschwarz_lib import cmx500
-from antlion.controllers.rohdeschwarz_lib.cmx500 import LteBandwidth
-from antlion.controllers.rohdeschwarz_lib.cmx500 import LteState
-from antlion.controllers import cellular_simulator as cc
-from antlion.controllers.cellular_lib import LteSimulation
-
-CMX_TM_MAPPING = {
-    LteSimulation.TransmissionMode.TM1: cmx500.TransmissionModes.TM1,
-    LteSimulation.TransmissionMode.TM2: cmx500.TransmissionModes.TM2,
-    LteSimulation.TransmissionMode.TM3: cmx500.TransmissionModes.TM3,
-    LteSimulation.TransmissionMode.TM4: cmx500.TransmissionModes.TM4,
-    LteSimulation.TransmissionMode.TM7: cmx500.TransmissionModes.TM7,
-    LteSimulation.TransmissionMode.TM8: cmx500.TransmissionModes.TM8,
-    LteSimulation.TransmissionMode.TM9: cmx500.TransmissionModes.TM9,
-}
-
-CMX_SCH_MAPPING = {
-    LteSimulation.SchedulingMode.STATIC: cmx500.SchedulingMode.USERDEFINEDCH
-}
-
-CMX_MIMO_MAPPING = {
-    LteSimulation.MimoMode.MIMO_1x1: cmx500.MimoModes.MIMO1x1,
-    LteSimulation.MimoMode.MIMO_2x2: cmx500.MimoModes.MIMO2x2,
-    LteSimulation.MimoMode.MIMO_4x4: cmx500.MimoModes.MIMO4x4,
-}
-
-
-class CMX500CellularSimulator(cc.AbstractCellularSimulator):
-    """ A cellular simulator for telephony simulations based on the CMX 500
-    controller. """
-
-    def __init__(self, ip_address, port='5025'):
-        """ Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of the CMX500
-            port: the port number for the CMX500 controller
-        """
-        super().__init__()
-        try:
-            self.cmx = cmx500.Cmx500(ip_address, port)
-        except:
-            raise cc.CellularSimulatorError('Error when Initializes CMX500.')
-
-        self.bts = self.cmx.bts
-
-    def destroy(self):
-        """ Sends finalization commands to the cellular equipment and closes
-        the connection. """
-        self.log.info('destroy the cmx500 simulator')
-        self.cmx.disconnect()
-
-    def setup_lte_scenario(self):
-        """ Configures the equipment for an LTE simulation. """
-        self.log.info('setup lte scenario')
-        self.cmx.switch_lte_signalling(cmx500.LteState.LTE_ON)
-
-    def setup_nr_sa_scenario(self):
-        """ Configures the equipment for an NR stand alone simulation. """
-        raise NotImplementedError()
-
-    def setup_nr_nsa_scenario(self):
-        """ Configures the equipment for an NR non stand alone simulation. """
-        self.log.info('setup nsa scenario (start lte cell and nr cell')
-        self.cmx.switch_on_nsa_signalling()
-
-    def set_band_combination(self, bands):
-        """ Prepares the test equipment for the indicated band combination.
-
-        Args:
-            bands: a list of bands represented as ints or strings
-        """
-        self.num_carriers = len(bands)
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """ Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire
-        """
-        self.log.info('set timer enabled to {} and the time to {}'.format(
-            enabled, time))
-        self.cmx.rrc_state_change_time_enable = enabled
-        self.cmx.lte_rrc_state_change_timer = time
-
-    def set_band(self, bts_index, band):
-        """ Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            band: the new band
-        """
-        self.log.info('set band to {}'.format(band))
-        self.bts[bts_index].set_band(int(band))
-
-    def get_duplex_mode(self, band):
-        """ Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD
-        """
-        if 33 <= int(band) <= 46:
-            return cmx500.DuplexMode.TDD
-        else:
-            return cmx500.DuplexMode.FDD
-
-    def set_input_power(self, bts_index, input_power):
-        """ Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            input_power: the new input power
-        """
-        if input_power > 23:
-            self.log.warning('Open loop supports -50dBm to 23 dBm. '
-                             'Setting it to max power 23 dBm')
-            input_power = 23
-        self.log.info('set input power to {}'.format(input_power))
-        self.bts[bts_index].set_ul_power(input_power)
-
-    def set_output_power(self, bts_index, output_power):
-        """ Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            output_power: the new output power
-        """
-        self.log.info('set output power to {}'.format(output_power))
-        self.bts[bts_index].set_dl_power(output_power)
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """ Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tdd_config: the new tdd configuration number (from 0 to 6)
-        """
-        self.log.info('set tdd config to {}'.format(tdd_config))
-        self.bts[bts_index].set_tdd_config(tdd_config)
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """ Sets the Special Sub-Frame config number for the indicated
-        base station.
-
-        Args:
-            bts_index: the base station number
-            ssf_config: the new ssf config number (from 0 to 9)
-        """
-        self.log.info('set ssf config to {}'.format(ssf_config))
-        self.bts[bts_index].set_ssf_config(ssf_config)
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """ Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth in MHz
-        """
-        self.log.info('set bandwidth of bts {} to {}'.format(
-            bts_index, bandwidth))
-        self.bts[bts_index].set_bandwidth(int(bandwidth))
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """ Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            channel_number: the new channel number (earfcn)
-        """
-        self.log.info(
-            'Sets the downlink channel number to {}'.format(channel_number))
-        self.bts[bts_index].set_dl_channel(channel_number)
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """ Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        self.log.info('set mimo mode to {}'.format(mimo_mode))
-        mimo_mode = CMX_MIMO_MAPPING[mimo_mode]
-        self.bts[bts_index].set_mimo_mode(mimo_mode)
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """ Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            tmode: the new transmission mode
-        """
-        self.log.info('set TransmissionMode to {}'.format(tmode))
-        tmode = CMX_TM_MAPPING[tmode]
-        self.bts[bts_index].set_transmission_mode(tmode)
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """ Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        if scheduling not in CMX_SCH_MAPPING:
-            raise cc.CellularSimulatorError(
-                "This scheduling mode is not supported")
-        log_list = []
-        if mcs_dl:
-            log_list.append('mcs_dl: {}'.format(mcs_dl))
-        if mcs_ul:
-            log_list.append('mcs_ul: {}'.format(mcs_ul))
-        if nrb_dl:
-            log_list.append('nrb_dl: {}'.format(nrb_dl))
-        if nrb_ul:
-            log_list.append('nrb_ul: {}'.format(nrb_ul))
-
-        self.log.info('set scheduling mode to {}'.format(','.join(log_list)))
-        self.bts[bts_index].set_scheduling_mode(mcs_dl=mcs_dl,
-                                                mcs_ul=mcs_ul,
-                                                nrb_dl=nrb_dl,
-                                                nrb_ul=nrb_ul)
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the downlink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 256 QAM should be used
-        """
-        self.log.info('Set 256 QAM DL MCS enabled: ' + str(enabled))
-        self.bts[bts_index].set_dl_modulation_table(
-            cmx500.ModulationType.Q256 if enabled else cmx500.ModulationType.
-            Q64)
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """ Determines what MCS table should be used for the uplink.
-
-        Args:
-            bts_index: the base station number
-            enabled: whether 64 QAM should be used
-        """
-        self.log.info('Set 64 QAM UL MCS enabled: ' + str(enabled))
-        self.bts[bts_index].set_ul_modulation_table(
-            cmx500.ModulationType.Q64 if enabled else cmx500.ModulationType.Q16
-        )
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """ Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mac_padding: the new MAC padding setting
-        """
-        self.log.info('set mac pad on {}'.format(mac_padding))
-        self.bts[bts_index].set_dl_mac_padding(mac_padding)
-
-    def set_cfi(self, bts_index, cfi):
-        """ Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cfi: the new CFI setting
-        """
-        if cfi == 'BESTEFFORT':
-            self.log.info('The cfi is BESTEFFORT, use default value')
-            return
-        try:
-            index = int(cfi) + 1
-        except Exception as e:
-            index = 1
-        finally:
-            self.log.info('set the cfi and the cfi index is {}'.format(index))
-            self.bts[bts_index].set_cfi(index)
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """ Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            cycle_duration: the new paging cycle duration in milliseconds
-        """
-        self.log.warning('The set_paging_cycle method is not implememted, '
-                         'use default value')
-
-    def set_phich_resource(self, bts_index, phich):
-        """ Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            phich: the new PHICH resource setting
-        """
-        self.log.warning('The set_phich_resource method is not implememted, '
-                         'use default value')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """ Activates the secondary carriers for CA. Requires the DUT to be
-        attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-        the UE before starting carrier aggregation.
-        """
-        self.wait_until_communication_state()
-        self.bts[1].attach_as_secondary_cell()
-
-    def wait_until_attached(self, timeout=120):
-        """ Waits until the DUT is attached to the primary carrier.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        self.log.info('wait until attached')
-        self.cmx.wait_until_attached(timeout)
-
-    def wait_until_communication_state(self, timeout=120):
-        """ Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        Return:
-            True if cmx reach rrc state within timeout
-        Raise:
-            CmxError if tiemout
-        """
-        self.log.info('wait for rrc on state')
-        return self.cmx.wait_for_rrc_state(cmx500.RrcState.RRC_ON, timeout)
-
-    def wait_until_idle_state(self, timeout=120):
-        """ Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        Return:
-            True if cmx reach rrc state within timeout
-        Raise:
-            CmxError if tiemout
-        """
-        self.log.info('wait for rrc off state')
-        return self.cmx.wait_for_rrc_state(cmx500.RrcState.RRC_OFF, timeout)
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        self.log.info('Bypass simulator detach step for now')
-
-    def stop(self):
-        """ Stops current simulation. After calling this method, the simulator
-        will need to be set up again. """
-        self.log.info('Stops current simulation and disconnect cmx500')
-        self.cmx.disconnect()
-
-    def start_data_traffic(self):
-        """ Starts transmitting data from the instrument to the DUT. """
-        self.log.warning('The start_data_traffic is not implemented yet')
-
-    def stop_data_traffic(self):
-        """ Stops transmitting data from the instrument to the DUT. """
-        self.log.warning('The stop_data_traffic is not implemented yet')
diff --git a/src/antlion/controllers/rohdeschwarz_lib/contest.py b/src/antlion/controllers/rohdeschwarz_lib/contest.py
deleted file mode 100644
index 39308e9..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/contest.py
+++ /dev/null
@@ -1,422 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from zeep import client
-from antlion.libs.proc import job
-from xml.etree import ElementTree
-import requests
-import asyncio
-import time
-import threading
-import re
-import os
-import logging
-
-
-class Contest(object):
-    """ Controller interface for Rohde Schwarz CONTEST sequencer software. """
-
-    # Remote Server parameter / operation names
-    TESTPLAN_PARAM = 'Testplan'
-    TESTPLAN_VERSION_PARAM = 'TestplanVersion'
-    KEEP_ALIVE_PARAM = 'KeepContestAlive'
-    START_TESTPLAN_OPERATION = 'StartTestplan'
-
-    # Results dictionary keys
-    POS_ERROR_KEY = 'pos_error'
-    TTFF_KEY = 'ttff'
-    SENSITIVITY_KEY = 'sensitivity'
-
-    # Waiting times
-    OUTPUT_WAITING_INTERVAL = 5
-
-    # Maximum number of times to retry if the Contest system is not responding
-    MAXIMUM_OUTPUT_READ_RETRIES = 25
-
-    # Root directory for the FTP server in the remote computer
-    FTP_ROOT = 'D:\\Logs\\'
-
-    def __init__(self, logger, remote_ip, remote_port, automation_listen_ip,
-                 automation_port, dut_on_func, dut_off_func, ftp_usr, ftp_pwd):
-        """
-        Initializes the Contest software controller.
-
-        Args:
-            logger: a logger handle.
-            remote_ip: the Remote Server's IP address.
-            remote_port: port number used by the Remote Server.
-            automation_listen_ip: local IP address in which to listen for
-                Automation Server connections.
-            automation_port: port used for Contest's DUT automation requests.
-            dut_on_func: function to turn the DUT on.
-            dut_off_func: function to turn the DUT off.
-            ftp_usr: username to login to the FTP server on the remote host
-            ftp_pwd: password to authenticate ftp_user in the ftp server
-        """
-        self.log = logger
-        self.ftp_user = ftp_usr
-        self.ftp_pass = ftp_pwd
-
-        self.remote_server_ip = remote_ip
-
-        server_url = 'http://{}:{}/RemoteServer'.format(remote_ip, remote_port)
-
-        # Initialize the SOAP client to interact with Contest's Remote Server
-        try:
-            self.soap_client = client.Client(server_url + '/RemoteServer?wsdl')
-        except requests.exceptions.ConnectionError:
-            self.log.error('Could not connect to the remote endpoint. Is '
-                           'Remote Server running on the Windows computer?')
-            raise
-
-        # Assign a value to asyncio_loop in case the automation server is not
-        # started
-        self.asyncio_loop = None
-
-        # Start the automation server if an IP and port number were passed
-        if automation_listen_ip and automation_port:
-            self.start_automation_server(automation_port, automation_listen_ip,
-                                         dut_on_func, dut_off_func)
-
-    def start_automation_server(self, automation_port, automation_listen_ip,
-                                dut_on_func, dut_off_func):
-        """ Starts the Automation server in a separate process.
-
-        Args:
-            automation_listen_ip: local IP address in which to listen for
-                Automation Server connections.
-            automation_port: port used for Contest's DUT automation requests.
-            dut_on_func: function to turn the DUT on.
-            dut_off_func: function to turn the DUT off.
-        """
-
-        # Start an asyncio event loop to run the automation server
-        self.asyncio_loop = asyncio.new_event_loop()
-
-        # Start listening for automation requests on a separate thread. This
-        # will start a new thread in which a socket will listen for incoming
-        # connections and react to Contest's automation commands
-
-        def start_automation_server(asyncio_loop):
-            AutomationServer(self.log, automation_port, automation_listen_ip,
-                             dut_on_func, dut_off_func, asyncio_loop)
-
-        automation_daemon = threading.Thread(
-            target=start_automation_server, args=[self.asyncio_loop])
-        automation_daemon.start()
-
-    def execute_testplan(self, testplan):
-        """ Executes a test plan with Contest's Remote Server sequencer.
-
-        Waits until and exit code is provided in the output. Logs the output with
-        the class logger and pulls the json report from the server if the test
-        succeeds.
-
-        Arg:
-            testplan: the test plan's name in the Contest system
-
-        Returns:
-            a dictionary with test results if the test finished successfully,
-            and None if it finished with an error exit code.
-        """
-
-        self.soap_client.service.DoSetParameterValue(self.TESTPLAN_PARAM,
-                                                     testplan)
-        self.soap_client.service.DoSetParameterValue(
-            self.TESTPLAN_VERSION_PARAM, 16)
-        self.soap_client.service.DoSetParameterValue(self.KEEP_ALIVE_PARAM,
-                                                     'true')
-
-        # Remote Server sometimes doesn't respond to the request immediately and
-        # frequently times out producing an exception. A shorter timeout will
-        # throw the exception earlier and allow the script to continue.
-        with self.soap_client.options(timeout=5):
-            try:
-                self.soap_client.service.DoStartOperation(
-                    self.START_TESTPLAN_OPERATION)
-            except requests.exceptions.ReadTimeout:
-                pass
-
-        self.log.info('Started testplan {} in Remote Server.'.format(testplan))
-
-        testplan_directory = None
-        read_retries = 0
-
-        while True:
-
-            time.sleep(self.OUTPUT_WAITING_INTERVAL)
-            output = self.soap_client.service.DoGetOutput()
-
-            # Output might be None while the instrument is busy.
-            if output:
-                self.log.debug(output)
-
-                # Obtain the path to the folder where reports generated by the
-                # test equipment will be stored in the remote computer
-                if not testplan_directory:
-                    prefix = re.escape('Testplan Directory: ' + self.FTP_ROOT)
-                    match = re.search('(?<={}).+(?=\\\\)'.format(prefix),
-                                      output)
-                    if match:
-                        testplan_directory = match.group(0)
-
-                # An exit code in the output indicates that the measurement is
-                # completed.
-                match = re.search('(?<=Exit code: )-?\d+', output)
-                if match:
-                    exit_code = int(match.group(0))
-                    break
-
-                # Reset the not-responding counter
-                read_retries = 0
-
-            else:
-                # If the output has been None for too many retries in a row,
-                # the testing instrument is assumed to be unresponsive.
-                read_retries += 1
-                if read_retries == self.MAXIMUM_OUTPUT_READ_RETRIES:
-                    raise RuntimeError('The Contest test sequencer is not '
-                                       'responding.')
-
-        self.log.info(
-            'Contest testplan finished with exit code {}.'.format(exit_code))
-
-        if exit_code in [0, 1]:
-            self.log.info('Testplan reports are stored in {}.'.format(
-                testplan_directory))
-
-            return self.pull_test_results(testplan_directory)
-
-    def pull_test_results(self, testplan_directory):
-        """ Downloads the test reports from the remote host and parses the test
-        summary to obtain the results.
-
-        Args:
-            testplan_directory: directory where to look for reports generated
-                by the test equipment in the remote computer
-
-        Returns:
-             a JSON object containing the test results
-        """
-
-        if not testplan_directory:
-            raise ValueError('Invalid testplan directory.')
-
-        # Download test reports from the remote host
-        job.run('wget -r --user={} --password={} -P {} ftp://{}/{}'.format(
-            self.ftp_user, self.ftp_pass, logging.log_path,
-            self.remote_server_ip, testplan_directory))
-
-        # Open the testplan directory
-        testplan_path = os.path.join(logging.log_path, self.remote_server_ip,
-                                     testplan_directory)
-
-        # Find the report.json file in the testcase folder
-        dir_list = os.listdir(testplan_path)
-        xml_path = None
-
-        for dir in dir_list:
-            if 'TestCaseName' in dir:
-                xml_path = os.path.join(testplan_path, dir,
-                                        'SummaryReport.xml')
-                break
-
-        if not xml_path:
-            raise RuntimeError('Could not find testcase directory.')
-
-        # Return the obtained report as a dictionary
-        xml_tree = ElementTree.ElementTree()
-        xml_tree.parse(source=xml_path)
-
-        results_dictionary = {}
-
-        col_iterator = xml_tree.iter('column')
-        for col in col_iterator:
-            # Look in the text of the first child for the required metrics
-            if col.text == '2D position error [m]':
-                results_dictionary[self.POS_ERROR_KEY] = {
-                    'min': float(next(col_iterator).text),
-                    'med': float(next(col_iterator).text),
-                    'avg': float(next(col_iterator).text),
-                    'max': float(next(col_iterator).text)
-                }
-            elif col.text == 'Time to first fix [s]':
-                results_dictionary[self.TTFF_KEY] = {
-                    'min': float(next(col_iterator).text),
-                    'med': float(next(col_iterator).text),
-                    'avg': float(next(col_iterator).text),
-                    'max': float(next(col_iterator).text)
-                }
-
-        message_iterator = xml_tree.iter('message')
-        for message in message_iterator:
-            # Look for the line showing sensitivity
-            if message.text:
-                # The typo in 'successfull' is intended as it is present in the
-                # test logs generated by the Contest system.
-                match = re.search('(?<=Margin search completed, the lowest '
-                                  'successfull output power is )-?\d+.?\d+'
-                                  '(?= dBm)', message.text)
-                if match:
-                    results_dictionary[self.SENSITIVITY_KEY] = float(
-                        match.group(0))
-                    break
-
-        return results_dictionary
-
-    def destroy(self):
-        """ Closes all open connections and kills running threads. """
-        if self.asyncio_loop:
-            # Stopping the asyncio loop will let the Automation Server exit
-            self.asyncio_loop.call_soon_threadsafe(self.asyncio_loop.stop)
-
-
-class AutomationServer:
-    """ Server object that handles DUT automation requests from Contest's Remote
-    Server.
-    """
-
-    def __init__(self, logger, port, listen_ip, dut_on_func, dut_off_func,
-                 asyncio_loop):
-        """ Initializes the Automation Server.
-
-        Opens a listening socket using a asyncio and waits for incoming
-        connections.
-
-        Args:
-            logger: a logger handle
-            port: port used for Contest's DUT automation requests
-            listen_ip: local IP in which to listen for connections
-            dut_on_func: function to turn the DUT on
-            dut_off_func: function to turn the DUT off
-            asyncio_loop: asyncio event loop to listen and process incoming
-                data asynchronously
-        """
-
-        self.log = logger
-
-        # Define a protocol factory that will provide new Protocol
-        # objects to the server created by asyncio. This Protocol
-        # objects will handle incoming commands
-        def aut_protocol_factory():
-            return self.AutomationProtocol(logger, dut_on_func, dut_off_func)
-
-        # Each client connection will create a new protocol instance
-        coro = asyncio_loop.create_server(aut_protocol_factory, listen_ip,
-                                          port)
-
-        self.server = asyncio_loop.run_until_complete(coro)
-
-        # Serve requests until Ctrl+C is pressed
-        self.log.info('Automation Server listening on {}'.format(
-            self.server.sockets[0].getsockname()))
-        asyncio_loop.run_forever()
-
-    class AutomationProtocol(asyncio.Protocol):
-        """ Defines the protocol for communication with Contest's Automation
-        client. """
-
-        AUTOMATION_DUT_ON = 'DUT_SWITCH_ON'
-        AUTOMATION_DUT_OFF = 'DUT_SWITCH_OFF'
-        AUTOMATION_OK = 'OK'
-
-        NOTIFICATION_TESTPLAN_START = 'AtTestplanStart'
-        NOTIFICATION_TESTCASE_START = 'AtTestcaseStart'
-        NOTIFICATION_TESCASE_END = 'AfterTestcase'
-        NOTIFICATION_TESTPLAN_END = 'AfterTestplan'
-
-        def __init__(self, logger, dut_on_func, dut_off_func):
-            """ Keeps the function handles to be used upon incoming requests.
-
-            Args:
-                logger: a logger handle
-                dut_on_func: function to turn the DUT on
-                dut_off_func: function to turn the DUT off
-            """
-
-            self.log = logger
-            self.dut_on_func = dut_on_func
-            self.dut_off_func = dut_off_func
-
-        def connection_made(self, transport):
-            """ Called when a connection has been established.
-
-            Args:
-                transport: represents the socket connection.
-            """
-
-            # Keep a reference to the transport as it will allow to write
-            # data to the socket later.
-            self.transport = transport
-
-            peername = transport.get_extra_info('peername')
-            self.log.info('Connection from {}'.format(peername))
-
-        def data_received(self, data):
-            """ Called when some data is received.
-
-            Args:
-                 data: non-empty bytes object containing the incoming data
-             """
-            command = data.decode()
-
-            # Remove the line break and newline characters at the end
-            command = re.sub('\r?\n$', '', command)
-
-            self.log.info("Command received from Contest's Automation "
-                          "client: {}".format(command))
-
-            if command == self.AUTOMATION_DUT_ON:
-                self.log.info("Contest's Automation client requested to set "
-                              "DUT to on state.")
-                self.send_ok()
-                self.dut_on_func()
-                return
-            elif command == self.AUTOMATION_DUT_OFF:
-                self.log.info("Contest's Automation client requested to set "
-                              "DUT to off state.")
-                self.dut_off_func()
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTPLAN_START):
-                self.log.info('Test plan is starting.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTCASE_START):
-                self.log.info('Test case is starting.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESCASE_END):
-                self.log.info('Test case finished.')
-                self.send_ok()
-            elif command.startswith(self.NOTIFICATION_TESTPLAN_END):
-                self.log.info('Test plan finished.')
-                self.send_ok()
-            else:
-                self.log.error('Unhandled automation command: ' + command)
-                raise ValueError()
-
-        def send_ok(self):
-            """ Sends an OK message to the Automation server. """
-            self.log.info("Sending OK response to Contest's Automation client")
-            self.transport.write(
-                bytearray(
-                    self.AUTOMATION_OK + '\n',
-                    encoding='utf-8',
-                    ))
-
-        def eof_received(self):
-            """ Called when the other end signals it won’t send any more
-            data.
-            """
-            self.log.info('Received EOF from Contest Automation client.')
diff --git a/src/antlion/controllers/rohdeschwarz_lib/smbv100.py b/src/antlion/controllers/rohdeschwarz_lib/smbv100.py
deleted file mode 100644
index 10ec98c..0000000
--- a/src/antlion/controllers/rohdeschwarz_lib/smbv100.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Rohde & Schwarz SMBV100 Vector Signal Generator."""
-
-import numbers
-from antlion.controllers import abstract_inst
-
-
-class SMBV100Error(abstract_inst.SocketInstrumentError):
-    """SMBV100 Instrument Error Class."""
-
-
-class SMBV100(abstract_inst.SocketInstrument):
-    """SMBV100 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for SMBV100.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super(SMBV100, self).__init__(ip_addr, ip_port)
-
-        self.idn = ''
-
-    def connect(self):
-        """Init and Connect to SMBV100."""
-        self._connect_socket()
-
-        self.get_idn()
-
-        infmsg = 'Connected to SMBV100, with ID: {}'.format(self.idn)
-        self._logger.debug(infmsg)
-
-    def close(self):
-        """Close SMBV100."""
-        self._close_socket()
-
-        self._logger.debug('Closed connection to SMBV100')
-
-    def get_idn(self):
-        """Get the Idenification of SMBV100.
-
-        Returns:
-            SMBV100 Identifier
-        """
-        self.idn = self._query('*IDN?')
-
-        return self.idn
-
-    def preset(self):
-        """Preset SMBV100 to default status."""
-        self._send('*RST')
-
-        self._logger.debug('Preset SMBV100')
-
-    def set_rfout_state(self, state):
-        """set SMBV100 RF output state.
-
-        Args:
-            state: RF output state.
-                Type, str. Option, ON/OFF.
-
-        Raises:
-            SMBV100Error: raise when state is not ON/OFF.
-        """
-
-        if state not in ['ON', 'OFF']:
-            raise SMBV100Error(error='"state" input must be "ON" or "OFF"',
-                               command='set_rfout')
-
-        self._send(':OUTP ' + state)
-
-        infmsg = 'set SMBV100 RF output to "{}"'.format(state)
-        self._logger.debug(infmsg)
-
-    def set_rfout_freq(self, freq):
-        """set SMBV100 RF output frequency.
-
-        Args:
-            freq: RF output frequency.
-                Type, num.
-
-        Raises:
-            SMBV100Error: raise when 'freq' is not numerical value.
-        """
-
-        if not isinstance(freq, numbers.Number):
-            raise SMBV100Error(error='"freq" input must be numerical value',
-                               command='set_rfoutfreq')
-
-        self._send(':SOUR:FREQ:CW ' + str(freq))
-
-        infmsg = 'set SMBV100 RF output frequency to {} Hz'.format(freq)
-        self._logger.debug(infmsg)
-
-    def get_rfout_freq(self):
-        """get SMBV100 RF output frequency.
-
-        Return:
-            freq: RF output frequency.
-                Type, num.
-        """
-        resp = self._query(':SOUR:FREQ:CW?')
-
-        freq = float(resp.split(';')[0])
-
-        infmsg = 'get SMBV100 RF output frequency as {} Hz'.format(freq)
-        self._logger.debug(infmsg)
-
-        return freq
-
-    def set_rfout_level(self, level):
-        """set SMBV100 RF output level.
-
-        Args:
-            level: RF Level.
-                Type, num.
-
-        Raises:
-            SMBV100Error: raise when 'level' is not numerical value.
-        """
-
-        if not isinstance(level, numbers.Number):
-            raise SMBV100Error(error='"level" input must be numerical value',
-                               command='set_rflevel')
-
-        self._send(':SOUR:POW:LEV:IMM:AMPL ' + str(level))
-
-        infmsg = 'set SMBV100 RF level to {} dBm'.format(level)
-        self._logger.debug(infmsg)
-
-    def get_rfout_level(self):
-        """get SMBV100 RF out level.
-
-        Return:
-            level: RF Level.
-                Type, num.
-        """
-        resp = self._query(':SOUR:POW:LEV:IMM:AMPL?')
-
-        level = float(resp.split(';')[0])
-
-        infmsg = 'get SMBV100 RF level as {} dBm'.format(level)
-        self._logger.debug(infmsg)
-
-        return level
diff --git a/src/antlion/controllers/sl4a_lib/error_reporter.py b/src/antlion/controllers/sl4a_lib/error_reporter.py
deleted file mode 100644
index c36593b..0000000
--- a/src/antlion/controllers/sl4a_lib/error_reporter.py
+++ /dev/null
@@ -1,224 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import re
-import threading
-import time
-
-from antlion import utils
-
-
-class ErrorLogger(logging.LoggerAdapter):
-    """A logger for a given error report."""
-
-    def __init__(self, label):
-        self.label = label
-        super(ErrorLogger, self).__init__(logging.getLogger(), {})
-
-    def process(self, msg, kwargs):
-        """Transforms a log message to be in a given format."""
-        return '[Error Report|%s] %s' % (self.label, msg), kwargs
-
-
-class ErrorReporter(object):
-    """A class that reports errors and diagnoses possible points of failure.
-
-    Attributes:
-        max_reports: The maximum number of reports that should be reported.
-            Defaulted to 1 to prevent multiple reports from reporting at the
-            same time over one another.
-        name: The name of the report to be used in the error logs.
-    """
-
-    def __init__(self, name, max_reports=1):
-        """Creates an error report.
-
-        Args:
-            name: The name of the error report.
-            max_reports: Sets the maximum number of reports to this value.
-        """
-        self.name = name
-        self.max_reports = max_reports
-        self._ticket_number = 0
-        self._ticket_lock = threading.Lock()
-        self._current_request_count = 0
-        self._accept_requests = True
-
-    def create_error_report(self, sl4a_manager, sl4a_session, rpc_connection):
-        """Creates an error report, if possible.
-
-        Returns:
-            False iff a report cannot be created.
-        """
-        if not self._accept_requests:
-            return False
-
-        self._current_request_count += 1
-
-        try:
-            ticket = self._get_report_ticket()
-            if not ticket:
-                return False
-
-            report = ErrorLogger('%s|%s' % (self.name, ticket))
-            report.info('Creating error report.')
-
-            (self.report_on_adb(sl4a_manager.adb, report)
-             and self.report_device_processes(sl4a_manager.adb, report) and
-             self.report_sl4a_state(rpc_connection, sl4a_manager.adb, report)
-             and self.report_sl4a_session(sl4a_manager, sl4a_session, report))
-
-            return True
-        finally:
-            self._current_request_count -= 1
-
-    def report_on_adb(self, adb, report):
-        """Creates an error report for ADB. Returns false if ADB has failed."""
-        adb_uptime = utils.get_command_uptime('"adb .* server"')
-        if adb_uptime:
-            report.info('The adb daemon has an uptime of %s '
-                        '([[dd-]hh:]mm:ss).' % adb_uptime)
-        else:
-            report.warning('The adb daemon (on the host machine) is not '
-                           'running. All forwarded ports have been removed.')
-            return False
-
-        devices_output = adb.devices()
-        if adb.serial not in devices_output:
-            report.warning(
-                'This device cannot be found by ADB. The device may have shut '
-                'down or disconnected.')
-            return False
-        elif re.findall(r'%s\s+offline' % adb.serial, devices_output):
-            report.warning(
-                'The device is marked as offline in ADB. We are no longer able '
-                'to access the device.')
-            return False
-        else:
-            report.info(
-                'The device is online and accessible through ADB calls.')
-        return True
-
-    def report_device_processes(self, adb, report):
-        """Creates an error report for the device's required processes.
-
-        Returns:
-            False iff user-apks cannot be communicated with over tcp.
-        """
-        zygote_uptime = utils.get_device_process_uptime(adb, 'zygote')
-        if zygote_uptime:
-            report.info(
-                'Zygote has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                zygote_uptime)
-        else:
-            report.warning(
-                'Zygote has been killed. It is likely the Android Runtime has '
-                'crashed. Check the bugreport/logcat for more information.')
-            return False
-
-        netd_uptime = utils.get_device_process_uptime(adb, 'netd')
-        if netd_uptime:
-            report.info(
-                'Netd has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                zygote_uptime)
-        else:
-            report.warning(
-                'Netd has been killed. The Android Runtime may have crashed. '
-                'Check the bugreport/logcat for more information.')
-            return False
-
-        adbd_uptime = utils.get_device_process_uptime(adb, 'adbd')
-        if netd_uptime:
-            report.info(
-                'Adbd has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is low, the phone may have recently crashed.' %
-                adbd_uptime)
-        else:
-            report.warning('Adbd is not running.')
-            return False
-        return True
-
-    def report_sl4a_state(self, rpc_connection, adb, report):
-        """Creates an error report for the state of SL4A."""
-        report.info(
-            'Diagnosing Failure over connection %s.' % rpc_connection.ports)
-
-        ports = rpc_connection.ports
-        forwarded_ports_output = adb.forward('--list')
-
-        expected_output = '%s tcp:%s tcp:%s' % (
-            adb.serial, ports.forwarded_port, ports.server_port)
-        if expected_output not in forwarded_ports_output:
-            formatted_output = re.sub(
-                '^', '    ', forwarded_ports_output, flags=re.MULTILINE)
-            report.warning(
-                'The forwarded port for the failed RpcConnection is missing.\n'
-                'Expected:\n    %s\nBut found:\n%s' % (expected_output,
-                                                       formatted_output))
-            return False
-        else:
-            report.info('The connection port has been properly forwarded to '
-                        'the device.')
-
-        sl4a_uptime = utils.get_device_process_uptime(
-            adb, 'com.googlecode.android_scripting')
-        if sl4a_uptime:
-            report.info(
-                'SL4A has been running for %s ([[dd-]hh:]mm:ss). If this '
-                'value is lower than the test case, it must have been '
-                'restarted during the test.' % sl4a_uptime)
-        else:
-            report.warning(
-                'The SL4A scripting service is not running. SL4A may have '
-                'crashed, or have been terminated by the Android Runtime.')
-            return False
-        return True
-
-    def report_sl4a_session(self, sl4a_manager, session, report):
-        """Reports the state of an SL4A session."""
-        if session.server_port not in sl4a_manager.sl4a_ports_in_use:
-            report.warning('SL4A server port %s not found in set of open '
-                           'ports %s' % (session.server_port,
-                                         sl4a_manager.sl4a_ports_in_use))
-            return False
-
-        if session not in sl4a_manager.sessions.values():
-            report.warning('SL4A session %s over port %s is not managed by '
-                           'the SL4A Manager. This session is already dead.' %
-                           (session.uid, session.server_port))
-            return False
-        return True
-
-    def finalize_reports(self):
-        self._accept_requests = False
-        while self._current_request_count > 0:
-            # Wait for other threads to finish.
-            time.sleep(.1)
-
-    def _get_report_ticket(self):
-        """Returns the next ticket, or none if all tickets have been used."""
-        logging.debug('Getting ticket for SL4A error report.')
-        with self._ticket_lock:
-            self._ticket_number += 1
-            ticket_number = self._ticket_number
-
-        if ticket_number <= self.max_reports:
-            return ticket_number
-        else:
-            return None
diff --git a/src/antlion/controllers/sl4a_lib/event_dispatcher.py b/src/antlion/controllers/sl4a_lib/event_dispatcher.py
deleted file mode 100644
index 4cdce8f..0000000
--- a/src/antlion/controllers/sl4a_lib/event_dispatcher.py
+++ /dev/null
@@ -1,478 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from concurrent.futures import ThreadPoolExecutor
-import queue
-import re
-import threading
-import time
-
-from antlion import logger
-from antlion.controllers.sl4a_lib import rpc_client
-
-
-class EventDispatcherError(Exception):
-    """The base class for all EventDispatcher exceptions."""
-
-
-class IllegalStateError(EventDispatcherError):
-    """Raise when user tries to put event_dispatcher into an illegal state."""
-
-
-class DuplicateError(EventDispatcherError):
-    """Raise when two event handlers have been assigned to an event name."""
-
-
-class EventDispatcher:
-    """A class for managing the events for an SL4A Session.
-
-    Attributes:
-        _serial: The serial of the device.
-        _rpc_client: The rpc client for that session.
-        _started: A bool that holds whether or not the event dispatcher is
-                  running.
-        _executor: The thread pool executor for running event handlers and
-                   polling.
-        _event_dict: A dictionary of str eventName = Queue<Event> eventQueue
-        _handlers: A dictionary of str eventName => (lambda, args) handler
-        _lock: A lock that prevents multiple reads/writes to the event queues.
-        log: The EventDispatcher's logger.
-    """
-
-    DEFAULT_TIMEOUT = 60
-
-    def __init__(self, serial, rpc_client):
-        self._serial = serial
-        self._rpc_client = rpc_client
-        self._started = False
-        self._executor = None
-        self._event_dict = {}
-        self._handlers = {}
-        self._lock = threading.RLock()
-
-        def _log_formatter(message):
-            """Defines the formatting used in the logger."""
-            return '[E Dispatcher|%s|%s] %s' % (self._serial,
-                                                self._rpc_client.uid, message)
-
-        self.log = logger.create_logger(_log_formatter)
-
-    def poll_events(self):
-        """Continuously polls all types of events from sl4a.
-
-        Events are sorted by name and store in separate queues.
-        If there are registered handlers, the handlers will be called with
-        corresponding event immediately upon event discovery, and the event
-        won't be stored. If exceptions occur, stop the dispatcher and return
-        """
-        while self._started:
-            try:
-                # 60000 in ms, timeout in second
-                event_obj = self._rpc_client.eventWait(60000, timeout=120)
-            except rpc_client.Sl4aConnectionError as e:
-                if self._rpc_client.is_alive:
-                    self.log.warning('Closing due to closed session.')
-                    break
-                else:
-                    self.log.warning('Closing due to error: %s.' % e)
-                    self.close()
-                    raise e
-            if not event_obj:
-                continue
-            elif 'name' not in event_obj:
-                self.log.error('Received Malformed event {}'.format(event_obj))
-                continue
-            else:
-                event_name = event_obj['name']
-            # if handler registered, process event
-            if event_name == 'EventDispatcherShutdown':
-                self.log.debug('Received shutdown signal.')
-                # closeSl4aSession has been called, which closes the event
-                # dispatcher. Stop execution on this polling thread.
-                return
-            if event_name in self._handlers:
-                self.log.debug(
-                    'Using handler %s for event: %r' %
-                    (self._handlers[event_name].__name__, event_obj))
-                self.handle_subscribed_event(event_obj, event_name)
-            else:
-                self.log.debug('Queuing event: %r' % event_obj)
-                self._lock.acquire()
-                if event_name in self._event_dict:  # otherwise, cache event
-                    self._event_dict[event_name].put(event_obj)
-                else:
-                    q = queue.Queue()
-                    q.put(event_obj)
-                    self._event_dict[event_name] = q
-                self._lock.release()
-
-    def register_handler(self, handler, event_name, args):
-        """Registers an event handler.
-
-        One type of event can only have one event handler associated with it.
-
-        Args:
-            handler: The event handler function to be registered.
-            event_name: Name of the event the handler is for.
-            args: User arguments to be passed to the handler when it's called.
-
-        Raises:
-            IllegalStateError: Raised if attempts to register a handler after
-                the dispatcher starts running.
-            DuplicateError: Raised if attempts to register more than one
-                handler for one type of event.
-        """
-        if self._started:
-            raise IllegalStateError('Cannot register service after polling is '
-                                    'started.')
-        self._lock.acquire()
-        try:
-            if event_name in self._handlers:
-                raise DuplicateError(
-                    'A handler for {} already exists'.format(event_name))
-            self._handlers[event_name] = (handler, args)
-        finally:
-            self._lock.release()
-
-    def start(self):
-        """Starts the event dispatcher.
-
-        Initiates executor and start polling events.
-
-        Raises:
-            IllegalStateError: Can't start a dispatcher again when it's already
-                running.
-        """
-        if not self._started:
-            self._started = True
-            self._executor = ThreadPoolExecutor(max_workers=32)
-            self._executor.submit(self.poll_events)
-        else:
-            raise IllegalStateError("Dispatcher is already started.")
-
-    def close(self):
-        """Clean up and release resources.
-
-        This function should only be called after a
-        rpc_client.closeSl4aSession() call.
-        """
-        if not self._started:
-            return
-        self._started = False
-        self._executor.shutdown(wait=True)
-        self.clear_all_events()
-
-    def pop_event(self, event_name, timeout=DEFAULT_TIMEOUT):
-        """Pop an event from its queue.
-
-        Return and remove the oldest entry of an event.
-        Block until an event of specified name is available or
-        times out if timeout is set.
-
-        Args:
-            event_name: Name of the event to be popped.
-            timeout: Number of seconds to wait when event is not present.
-                Never times out if None.
-
-        Returns:
-            event: The oldest entry of the specified event. None if timed out.
-
-        Raises:
-            IllegalStateError: Raised if pop is called before the dispatcher
-                starts polling.
-        """
-        if not self._started:
-            raise IllegalStateError(
-                'Dispatcher needs to be started before popping.')
-
-        e_queue = self.get_event_q(event_name)
-
-        if not e_queue:
-            raise IllegalStateError(
-                'Failed to get an event queue for {}'.format(event_name))
-
-        try:
-            # Block for timeout
-            if timeout:
-                return e_queue.get(True, timeout)
-            # Non-blocking poll for event
-            elif timeout == 0:
-                return e_queue.get(False)
-            else:
-                # Block forever on event wait
-                return e_queue.get(True)
-        except queue.Empty:
-            msg = 'Timeout after {}s waiting for event: {}'.format(
-                timeout, event_name)
-            self.log.info(msg)
-            raise queue.Empty(msg)
-
-    def wait_for_event(self,
-                       event_name,
-                       predicate,
-                       timeout=DEFAULT_TIMEOUT,
-                       *args,
-                       **kwargs):
-        """Wait for an event that satisfies a predicate to appear.
-
-        Continuously pop events of a particular name and check against the
-        predicate until an event that satisfies the predicate is popped or
-        timed out. Note this will remove all the events of the same name that
-        do not satisfy the predicate in the process.
-
-        Args:
-            event_name: Name of the event to be popped.
-            predicate: A function that takes an event and returns True if the
-                predicate is satisfied, False otherwise.
-            timeout: Number of seconds to wait.
-            *args: Optional positional args passed to predicate().
-            **kwargs: Optional keyword args passed to predicate().
-                consume_ignored_events: Whether or not to consume events while
-                    searching for the desired event. Defaults to True if unset.
-
-        Returns:
-            The event that satisfies the predicate.
-
-        Raises:
-            queue.Empty: Raised if no event that satisfies the predicate was
-                found before time out.
-        """
-        deadline = time.time() + timeout
-        ignored_events = []
-        consume_events = kwargs.pop('consume_ignored_events', True)
-        while True:
-            event = None
-            try:
-                event = self.pop_event(event_name, 1)
-                if consume_events:
-                    self.log.debug('Consuming event: %r' % event)
-                else:
-                    self.log.debug('Peeking at event: %r' % event)
-                    ignored_events.append(event)
-            except queue.Empty:
-                pass
-
-            if event and predicate(event, *args, **kwargs):
-                for ignored_event in ignored_events:
-                    self.get_event_q(event_name).put(ignored_event)
-                self.log.debug('Matched event: %r with %s' %
-                               (event, predicate.__name__))
-                return event
-
-            if time.time() > deadline:
-                for ignored_event in ignored_events:
-                    self.get_event_q(event_name).put(ignored_event)
-                msg = 'Timeout after {}s waiting for event: {}'.format(
-                    timeout, event_name)
-                self.log.info(msg)
-                raise queue.Empty(msg)
-
-    def pop_events(self, regex_pattern, timeout, freq=1):
-        """Pop events whose names match a regex pattern.
-
-        If such event(s) exist, pop one event from each event queue that
-        satisfies the condition. Otherwise, wait for an event that satisfies
-        the condition to occur, with timeout.
-
-        Results are sorted by timestamp in ascending order.
-
-        Args:
-            regex_pattern: The regular expression pattern that an event name
-                should match in order to be popped.
-            timeout: Number of seconds to wait for events in case no event
-                matching the condition exits when the function is called.
-
-        Returns:
-            results: Pop events whose names match a regex pattern.
-                Empty if none exist and the wait timed out.
-
-        Raises:
-            IllegalStateError: Raised if pop is called before the dispatcher
-                starts polling.
-            queue.Empty: Raised if no event was found before time out.
-        """
-        if not self._started:
-            raise IllegalStateError(
-                "Dispatcher needs to be started before popping.")
-        deadline = time.time() + timeout
-        while True:
-            # TODO: fix the sleep loop
-            results = self._match_and_pop(regex_pattern)
-            if len(results) != 0 or time.time() > deadline:
-                break
-            time.sleep(freq)
-        if len(results) == 0:
-            msg = 'Timeout after {}s waiting for event: {}'.format(
-                timeout, regex_pattern)
-            self.log.error(msg)
-            raise queue.Empty(msg)
-
-        return sorted(results, key=lambda event: event['time'])
-
-    def _match_and_pop(self, regex_pattern):
-        """Pop one event from each of the event queues whose names
-        match (in a sense of regular expression) regex_pattern.
-        """
-        results = []
-        self._lock.acquire()
-        for name in self._event_dict.keys():
-            if re.match(regex_pattern, name):
-                q = self._event_dict[name]
-                if q:
-                    try:
-                        results.append(q.get(False))
-                    except queue.Empty:
-                        pass
-        self._lock.release()
-        return results
-
-    def get_event_q(self, event_name):
-        """Obtain the queue storing events of the specified name.
-
-        If no event of this name has been polled, wait for one to.
-
-        Returns: A queue storing all the events of the specified name.
-        """
-        self._lock.acquire()
-        if (event_name not in self._event_dict
-                or self._event_dict[event_name] is None):
-            self._event_dict[event_name] = queue.Queue()
-        self._lock.release()
-
-        event_queue = self._event_dict[event_name]
-        return event_queue
-
-    def handle_subscribed_event(self, event_obj, event_name):
-        """Execute the registered handler of an event.
-
-        Retrieve the handler and its arguments, and execute the handler in a
-            new thread.
-
-        Args:
-            event_obj: Json object of the event.
-            event_name: Name of the event to call handler for.
-        """
-        handler, args = self._handlers[event_name]
-        self._executor.submit(handler, event_obj, *args)
-
-    def _handle(self, event_handler, event_name, user_args, event_timeout,
-                cond, cond_timeout):
-        """Pop an event of specified type and calls its handler on it. If
-        condition is not None, block until condition is met or timeout.
-        """
-        if cond:
-            cond.wait(cond_timeout)
-        event = self.pop_event(event_name, event_timeout)
-        return event_handler(event, *user_args)
-
-    def handle_event(self,
-                     event_handler,
-                     event_name,
-                     user_args,
-                     event_timeout=None,
-                     cond=None,
-                     cond_timeout=None):
-        """Handle events that don't have registered handlers
-
-        In a new thread, poll one event of specified type from its queue and
-        execute its handler. If no such event exists, the thread waits until
-        one appears.
-
-        Args:
-            event_handler: Handler for the event, which should take at least
-                one argument - the event json object.
-            event_name: Name of the event to be handled.
-            user_args: User arguments for the handler; to be passed in after
-                the event json.
-            event_timeout: Number of seconds to wait for the event to come.
-            cond: A condition to wait on before executing the handler. Should
-                be a threading.Event object.
-            cond_timeout: Number of seconds to wait before the condition times
-                out. Never times out if None.
-
-        Returns:
-            worker: A concurrent.Future object associated with the handler.
-                If blocking call worker.result() is triggered, the handler
-                needs to return something to unblock.
-        """
-        worker = self._executor.submit(self._handle, event_handler, event_name,
-                                       user_args, event_timeout, cond,
-                                       cond_timeout)
-        return worker
-
-    def pop_all(self, event_name):
-        """Return and remove all stored events of a specified name.
-
-        Pops all events from their queue. May miss the latest ones.
-        If no event is available, return immediately.
-
-        Args:
-            event_name: Name of the events to be popped.
-
-        Returns:
-           results: List of the desired events.
-
-        Raises:
-            IllegalStateError: Raised if pop is called before the dispatcher
-                starts polling.
-        """
-        if not self._started:
-            raise IllegalStateError(("Dispatcher needs to be started before "
-                                     "popping."))
-        results = []
-        try:
-            self._lock.acquire()
-            while True:
-                e = self._event_dict[event_name].get(block=False)
-                results.append(e)
-        except (queue.Empty, KeyError):
-            return results
-        finally:
-            self._lock.release()
-
-    def clear_events(self, event_name):
-        """Clear all events of a particular name.
-
-        Args:
-            event_name: Name of the events to be popped.
-        """
-        self._lock.acquire()
-        try:
-            q = self.get_event_q(event_name)
-            q.queue.clear()
-        except queue.Empty:
-            return
-        finally:
-            self._lock.release()
-
-    def clear_all_events(self):
-        """Clear all event queues and their cached events."""
-        self._lock.acquire()
-        self._event_dict.clear()
-        self._lock.release()
-
-    def is_event_match(self, event, field, value):
-        return self.is_event_match_for_list(event, field, [value])
-
-    def is_event_match_for_list(self, event, field, value_list):
-        try:
-            value_in_event = event['data'][field]
-        except KeyError:
-            return False
-        for value in value_list:
-            if value_in_event == value:
-                return True
-        return False
diff --git a/src/antlion/controllers/sl4a_lib/rpc_client.py b/src/antlion/controllers/sl4a_lib/rpc_client.py
deleted file mode 100644
index fd45c04..0000000
--- a/src/antlion/controllers/sl4a_lib/rpc_client.py
+++ /dev/null
@@ -1,358 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import json
-import socket
-import threading
-import time
-from concurrent import futures
-
-from antlion import error
-from antlion import logger
-
-# The default timeout value when no timeout is set.
-SOCKET_TIMEOUT = 60
-
-# The Session UID when a UID has not been received yet.
-UNKNOWN_UID = -1
-
-class Sl4aException(error.ActsError):
-    """The base class for all SL4A exceptions."""
-
-
-class Sl4aStartError(Sl4aException):
-    """Raised when sl4a is not able to be started."""
-
-
-class Sl4aApiError(Sl4aException):
-    """Raised when remote API reports an error.
-
-    This error mirrors the JSON-RPC 2.0 spec for Error Response objects.
-
-    Attributes:
-        code: The error code returned by SL4A. Not to be confused with
-            ActsError's error_code.
-        message: The error message returned by SL4A.
-        data: The extra data, if any, returned by SL4A.
-    """
-
-    def __init__(self, message, code=-1, data=None, rpc_name=''):
-        super().__init__()
-        self.message = message
-        self.code = code
-        if data is None:
-            self.data = {}
-        else:
-            self.data = data
-        self.rpc_name = rpc_name
-
-    def __str__(self):
-        if self.data:
-            return 'Error in RPC %s %s:%s:%s' % (self.rpc_name, self.code,
-                                                 self.message, self.data)
-        else:
-            return 'Error in RPC %s %s:%s' % (self.rpc_name, self.code,
-                                              self.message)
-
-
-class Sl4aConnectionError(Sl4aException):
-    """An error raised upon failure to connect to SL4A."""
-
-
-class Sl4aProtocolError(Sl4aException):
-    """Raised when there an error in exchanging data with server on device."""
-    NO_RESPONSE_FROM_HANDSHAKE = 'No response from handshake.'
-    NO_RESPONSE_FROM_SERVER = 'No response from server.'
-    MISMATCHED_API_ID = 'Mismatched API id.'
-
-
-class Sl4aNotInstalledError(Sl4aException):
-    """An error raised when an Sl4aClient is created without SL4A installed."""
-
-
-class Sl4aRpcTimeoutError(Sl4aException):
-    """An error raised when an SL4A RPC has timed out."""
-
-
-class RpcClient(object):
-    """An RPC client capable of processing multiple RPCs concurrently.
-
-    Attributes:
-        _free_connections: A list of all idle RpcConnections.
-        _working_connections: A list of all working RpcConnections.
-        _lock: A lock used for accessing critical memory.
-        max_connections: The maximum number of RpcConnections at a time.
-            Increasing or decreasing the number of max connections does NOT
-            modify the thread pool size being used for self.future RPC calls.
-        _log: The logger for this RpcClient.
-    """
-    """The default value for the maximum amount of connections for a client."""
-    DEFAULT_MAX_CONNECTION = 15
-
-    class AsyncClient(object):
-        """An object that allows RPC calls to be called asynchronously.
-
-        Attributes:
-            _rpc_client: The RpcClient to use when making calls.
-            _executor: The ThreadPoolExecutor used to keep track of workers
-        """
-
-        def __init__(self, rpc_client):
-            self._rpc_client = rpc_client
-            self._executor = futures.ThreadPoolExecutor(
-                max_workers=max(rpc_client.max_connections - 2, 1))
-
-        def rpc(self, name, *args, **kwargs):
-            future = self._executor.submit(name, *args, **kwargs)
-            return future
-
-        def __getattr__(self, name):
-            """Wrapper for python magic to turn method calls into RPC calls."""
-
-            def rpc_call(*args, **kwargs):
-                future = self._executor.submit(
-                    self._rpc_client.__getattr__(name), *args, **kwargs)
-                return future
-
-            return rpc_call
-
-    def __init__(self,
-                 uid,
-                 serial,
-                 on_error_callback,
-                 _create_connection_func,
-                 max_connections=None):
-        """Creates a new RpcClient object.
-
-        Args:
-            uid: The session uid this client is a part of.
-            serial: The serial of the Android device. Used for logging.
-            on_error_callback: A callback for when a connection error is raised.
-            _create_connection_func: A reference to the function that creates a
-                new session.
-            max_connections: The maximum number of connections the RpcClient
-                can have.
-        """
-        self._serial = serial
-        self.on_error = on_error_callback
-        self._create_connection_func = _create_connection_func
-        self._free_connections = [self._create_connection_func(uid)]
-
-        self.uid = self._free_connections[0].uid
-        self._lock = threading.Lock()
-
-        def _log_formatter(message):
-            """Formats the message to be logged."""
-            return '[RPC Service|%s|%s] %s' % (self._serial, self.uid, message)
-
-        self._log = logger.create_logger(_log_formatter)
-
-        self._working_connections = []
-        if max_connections is None:
-            self.max_connections = RpcClient.DEFAULT_MAX_CONNECTION
-        else:
-            self.max_connections = max_connections
-
-        self._async_client = RpcClient.AsyncClient(self)
-        self.is_alive = True
-
-    def terminate(self):
-        """Terminates all connections to the SL4A server."""
-        if len(self._working_connections) > 0:
-            self._log.warning(
-                '%s connections are still active, and waiting on '
-                'responses.Closing these connections now.' % len(
-                    self._working_connections))
-        connections = self._free_connections + self._working_connections
-        for connection in connections:
-            self._log.debug(
-                'Closing connection over ports %s' % connection.ports)
-            connection.close()
-        self._free_connections = []
-        self._working_connections = []
-        self.is_alive = False
-
-    def _get_free_connection(self):
-        """Returns a free connection to be used for an RPC call.
-
-        This function also adds the client to the working set to prevent
-        multiple users from obtaining the same client.
-        """
-        while True:
-            if len(self._free_connections) > 0:
-                with self._lock:
-                    # Check if another thread grabbed the remaining connection.
-                    # while we were waiting for the lock.
-                    if len(self._free_connections) == 0:
-                        continue
-                    client = self._free_connections.pop()
-                    self._working_connections.append(client)
-                    return client
-
-            client_count = (len(self._free_connections) +
-                            len(self._working_connections))
-            if client_count < self.max_connections:
-                with self._lock:
-                    client_count = (len(self._free_connections) +
-                                    len(self._working_connections))
-                    if client_count < self.max_connections:
-                        client = self._create_connection_func(self.uid)
-                        self._working_connections.append(client)
-                        return client
-            time.sleep(.01)
-
-    def _release_working_connection(self, connection):
-        """Marks a working client as free.
-
-        Args:
-            connection: The client to mark as free.
-        Raises:
-            A ValueError if the client is not a known working connection.
-        """
-        # We need to keep this code atomic because the client count is based on
-        # the length of the free and working connection list lengths.
-        with self._lock:
-            self._working_connections.remove(connection)
-            self._free_connections.append(connection)
-
-    def rpc(self, method, *args, timeout=None, retries=3):
-        """Sends an rpc to sl4a.
-
-        Sends an rpc call to sl4a over this RpcClient's corresponding session.
-
-        Args:
-            method: str, The name of the method to execute.
-            args: any, The args to send to sl4a.
-            timeout: The amount of time to wait for a response.
-            retries: Misnomer, is actually the number of tries.
-
-        Returns:
-            The result of the rpc.
-
-        Raises:
-            Sl4aProtocolError: Something went wrong with the sl4a protocol.
-            Sl4aApiError: The rpc went through, however executed with errors.
-        """
-        connection = self._get_free_connection()
-        ticket = connection.get_new_ticket()
-        timed_out = False
-        if timeout:
-            connection.set_timeout(timeout)
-        data = {'id': ticket, 'method': method, 'params': args}
-        request = json.dumps(data)
-        response = ''
-        try:
-            for i in range(1, retries + 1):
-                connection.send_request(request)
-
-                response = connection.get_response()
-                if not response:
-                    if i < retries:
-                        self._log.warning(
-                            'No response for RPC method %s on iteration %s',
-                            method, i)
-                        continue
-                    else:
-                        self._log.exception(
-                            'No response for RPC method %s on iteration %s',
-                            method, i)
-                        self.on_error(connection)
-                        raise Sl4aProtocolError(
-                            Sl4aProtocolError.NO_RESPONSE_FROM_SERVER)
-                else:
-                    break
-        except BrokenPipeError as e:
-            if self.is_alive:
-                self._log.exception('The device disconnected during RPC call '
-                                    '%s. Please check the logcat for a crash '
-                                    'or disconnect.', method)
-                self.on_error(connection)
-            else:
-                self._log.warning('The connection was killed during cleanup:')
-                self._log.warning(e)
-            raise Sl4aConnectionError(e)
-        except socket.timeout as err:
-            # If a socket connection has timed out, the socket can no longer be
-            # used. Close it out and remove the socket from the connection pool.
-            timed_out = True
-            self._log.warning('RPC "%s" (id: %s) timed out after %s seconds.',
-                              method, ticket, timeout or SOCKET_TIMEOUT)
-            self._log.debug(
-                'Closing timed out connection over %s' % connection.ports)
-            connection.close()
-            self._working_connections.remove(connection)
-            # Re-raise the error as an SL4A Error so end users can process it.
-            raise Sl4aRpcTimeoutError(err)
-        finally:
-            if not timed_out:
-                if timeout:
-                    connection.set_timeout(SOCKET_TIMEOUT)
-                self._release_working_connection(connection)
-        result = json.loads(str(response, encoding='utf8'))
-
-        if result['error']:
-            error_object = result['error']
-            if isinstance(error_object, dict):
-                # Uses JSON-RPC 2.0 Format
-                sl4a_api_error = Sl4aApiError(error_object.get('message', None),
-                                              error_object.get('code', -1),
-                                              error_object.get('data', {}),
-                                              rpc_name=method)
-            else:
-                # Fallback on JSON-RPC 1.0 Format
-                sl4a_api_error = Sl4aApiError(error_object, rpc_name=method)
-            self._log.warning(sl4a_api_error)
-            raise sl4a_api_error
-        if result['id'] != ticket:
-            self._log.error('RPC method %s with mismatched api id %s', method,
-                            result['id'])
-            raise Sl4aProtocolError(Sl4aProtocolError.MISMATCHED_API_ID)
-        return result['result']
-
-    @property
-    def future(self):
-        """Returns a magic function that returns a future running an RPC call.
-
-        This function effectively allows the idiom:
-
-        >>> rpc_client = RpcClient(...)
-        >>> # returns after call finishes
-        >>> rpc_client.someRpcCall()
-        >>> # Immediately returns a reference to the RPC's future, running
-        >>> # the lengthy RPC call on another thread.
-        >>> future = rpc_client.future.someLengthyRpcCall()
-        >>> rpc_client.doOtherThings()
-        >>> ...
-        >>> # Wait for and get the returned value of the lengthy RPC.
-        >>> # Can specify a timeout as well.
-        >>> value = future.result()
-
-        The number of concurrent calls to this method is limited to
-        (max_connections - 2), to prevent future calls from exhausting all free
-        connections.
-        """
-        return self._async_client
-
-    def __getattr__(self, name):
-        """Wrapper for python magic to turn method calls into RPC calls."""
-
-        def rpc_call(*args, **kwargs):
-            return self.rpc(name, *args, **kwargs)
-
-        if not self.is_alive:
-            raise Sl4aStartError(
-                'This SL4A session has already been terminated. You must '
-                'create a new session to continue.')
-        return rpc_call
diff --git a/src/antlion/controllers/sl4a_lib/rpc_connection.py b/src/antlion/controllers/sl4a_lib/rpc_connection.py
deleted file mode 100644
index b8cb8cf..0000000
--- a/src/antlion/controllers/sl4a_lib/rpc_connection.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import json
-import socket
-import threading
-
-from antlion import logger
-from antlion.controllers.sl4a_lib import rpc_client
-
-# The Session UID when a UID has not been received yet.
-UNKNOWN_UID = -1
-
-
-class Sl4aConnectionCommand(object):
-    """Commands that can be invoked on the sl4a client.
-
-    INIT: Initializes a new sessions in sl4a.
-    CONTINUE: Creates a connection.
-    """
-    INIT = 'initiate'
-    CONTINUE = 'continue'
-
-
-class RpcConnection(object):
-    """A single RPC Connection thread.
-
-    Attributes:
-        _client_socket: The socket this connection uses.
-        _socket_file: The file created over the _client_socket.
-        _ticket_counter: The counter storing the current ticket number.
-        _ticket_lock: A lock on the ticket counter to prevent ticket collisions.
-        adb: A reference to the AdbProxy of the AndroidDevice. Used for logging.
-        log: The logger for this RPC Client.
-        ports: The Sl4aPorts object that stores the ports this connection uses.
-        uid: The SL4A session ID.
-    """
-
-    def __init__(self, adb, ports, client_socket, socket_fd, uid=UNKNOWN_UID):
-        self._client_socket = client_socket
-        self._socket_file = socket_fd
-        self._ticket_counter = 0
-        self._ticket_lock = threading.Lock()
-        self.adb = adb
-        self.uid = uid
-
-        def _log_formatter(message):
-            """Defines the formatting used in the logger."""
-            return '[SL4A Client|%s|%s|%s] %s' % (self.adb.serial,
-                                                  self.ports.client_port,
-                                                  self.uid, message)
-
-        self.log = logger.create_logger(_log_formatter)
-
-        self.ports = ports
-        self.set_timeout(rpc_client.SOCKET_TIMEOUT)
-
-    def open(self):
-        if self.uid != UNKNOWN_UID:
-            start_command = Sl4aConnectionCommand.CONTINUE
-        else:
-            start_command = Sl4aConnectionCommand.INIT
-
-        self._initiate_handshake(start_command)
-
-    def _initiate_handshake(self, start_command):
-        """Establishes a connection with the SL4A server.
-
-        Args:
-            start_command: The command to send. See Sl4aConnectionCommand.
-        """
-        try:
-            resp = self._cmd(start_command)
-        except socket.timeout as e:
-            self.log.error('Failed to open socket connection: %s', e)
-            raise
-        if not resp:
-            raise rpc_client.Sl4aProtocolError(
-                rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE)
-        result = json.loads(str(resp, encoding='utf8'))
-        if result['status']:
-            self.uid = result['uid']
-        else:
-            self.log.warning(
-                'UID not received for connection %s.' % self.ports)
-            self.uid = UNKNOWN_UID
-        self.log.debug('Created connection over: %s.' % self.ports)
-
-    def _cmd(self, command):
-        """Sends an session protocol command to SL4A to establish communication.
-
-        Args:
-            command: The name of the command to execute.
-
-        Returns:
-            The line that was written back.
-        """
-        self.send_request(json.dumps({'cmd': command, 'uid': self.uid}))
-        return self.get_response()
-
-    def get_new_ticket(self):
-        """Returns a ticket for a new request."""
-        with self._ticket_lock:
-            self._ticket_counter += 1
-            ticket = self._ticket_counter
-        return ticket
-
-    def set_timeout(self, timeout):
-        """Sets the socket's wait for response timeout."""
-        self._client_socket.settimeout(timeout)
-
-    def send_request(self, request):
-        """Sends a request over the connection."""
-        self._socket_file.write(request.encode('utf8') + b'\n')
-        self._socket_file.flush()
-        self.log.debug('Sent: ' + request)
-
-    def get_response(self):
-        """Returns the first response sent back to the client."""
-        data = self._socket_file.readline()
-        self.log.debug('Received: ' + data.decode('utf8', errors='replace'))
-        return data
-
-    def close(self):
-        """Closes the connection gracefully."""
-        self._client_socket.close()
-        self.adb.remove_tcp_forward(self.ports.forwarded_port)
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_manager.py b/src/antlion/controllers/sl4a_lib/sl4a_manager.py
deleted file mode 100644
index 8749702..0000000
--- a/src/antlion/controllers/sl4a_lib/sl4a_manager.py
+++ /dev/null
@@ -1,317 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import threading
-
-import time
-
-from antlion import logger
-from antlion.controllers.sl4a_lib import rpc_client
-from antlion.controllers.sl4a_lib import sl4a_session
-from antlion.controllers.sl4a_lib import error_reporter
-
-ATTEMPT_INTERVAL = .25
-MAX_WAIT_ON_SERVER_SECONDS = 5
-
-SL4A_PKG_NAME = 'com.googlecode.android_scripting'
-
-_SL4A_LAUNCH_SERVER_CMD = (
-    'am startservice -a com.googlecode.android_scripting.action.LAUNCH_SERVER '
-    '--ei com.googlecode.android_scripting.extra.USE_SERVICE_PORT %s '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
-
-_SL4A_CLOSE_SERVER_CMD = (
-    'am startservice -a com.googlecode.android_scripting.action.KILL_PROCESS '
-    '--ei com.googlecode.android_scripting.extra.PROXY_PORT %s '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
-
-# The command for finding SL4A's server port as root.
-_SL4A_ROOT_FIND_PORT_CMD = (
-    # Get all open, listening ports, and their process names
-    'ss -l -p -n | '
-    # Find all open TCP ports for SL4A
-    'grep "tcp.*droid_scripting" | '
-    # Shorten all whitespace to a single space character
-    'tr -s " " | '
-    # Grab the 5th column (which is server:port)
-    'cut -d " " -f 5 |'
-    # Only grab the port
-    'sed s/.*://g')
-
-# The command for finding SL4A's server port without root.
-_SL4A_USER_FIND_PORT_CMD = (
-    # Get all open, listening ports, and their process names
-    'ss -l -p -n | '
-    # Find all open ports exposed to the public. This can produce false
-    # positives since users cannot read the process associated with the port.
-    'grep -e "tcp.*::ffff:127\.0\.0\.1:" | '
-    # Shorten all whitespace to a single space character
-    'tr -s " " | '
-    # Grab the 5th column (which is server:port)
-    'cut -d " " -f 5 |'
-    # Only grab the port
-    'sed s/.*://g')
-
-# The command that begins the SL4A ScriptingLayerService.
-_SL4A_START_SERVICE_CMD = (
-    'am startservice '
-    'com.googlecode.android_scripting/.service.ScriptingLayerService')
-
-# Maps device serials to their SL4A Manager. This is done to prevent multiple
-# Sl4aManagers from existing for the same device.
-_all_sl4a_managers = {}
-
-
-def create_sl4a_manager(adb):
-    """Creates and returns an SL4AManager for the given device.
-
-    Args:
-        adb: A reference to the device's AdbProxy.
-    """
-    if adb.serial in _all_sl4a_managers:
-        _all_sl4a_managers[adb.serial].log.warning(
-            'Attempted to return multiple SL4AManagers on the same device. '
-            'Returning pre-existing SL4AManager instead.')
-        return _all_sl4a_managers[adb.serial]
-    else:
-        manager = Sl4aManager(adb)
-        _all_sl4a_managers[adb.serial] = manager
-        return manager
-
-
-class Sl4aManager(object):
-    """A manager for SL4A Clients to a given AndroidDevice.
-
-    SL4A is a single APK that can host multiple RPC servers at a time. This
-    class manages each server connection over ADB, and will gracefully
-    terminate the apk during cleanup.
-
-    Attributes:
-        _listen_for_port_lock: A lock for preventing multiple threads from
-            potentially mixing up requested ports.
-        _sl4a_ports: A set of all known SL4A server ports in use.
-        adb: A reference to the AndroidDevice's AdbProxy.
-        log: The logger for this object.
-        sessions: A dictionary of session_ids to sessions.
-    """
-
-    def __init__(self, adb):
-        self._listen_for_port_lock = threading.Lock()
-        self._sl4a_ports = set()
-        self.adb = adb
-        self.log = logger.create_logger(lambda msg: '[SL4A Manager|%s] %s' % (
-            adb.serial, msg))
-        self.sessions = {}
-        self._started = False
-        self.error_reporter = error_reporter.ErrorReporter('SL4A %s' %
-                                                           adb.serial)
-
-    @property
-    def sl4a_ports_in_use(self):
-        """Returns a list of all server ports used by SL4A servers."""
-        return set([session.server_port for session in self.sessions.values()])
-
-    def diagnose_failure(self, session, connection):
-        """Diagnoses all potential known reasons SL4A can fail.
-
-        Assumes the failure happened on an RPC call, which verifies the state
-        of ADB/device."""
-        self.error_reporter.create_error_report(self, session, connection)
-
-    def start_sl4a_server(self, device_port, try_interval=ATTEMPT_INTERVAL):
-        """Opens a server socket connection on SL4A.
-
-        Args:
-            device_port: The expected port for SL4A to open on. Note that in
-                many cases, this will be different than the port returned by
-                this method.
-            try_interval: The amount of seconds between attempts at finding an
-                opened port on the AndroidDevice.
-
-        Returns:
-            The port number on the device the SL4A server is open on.
-
-        Raises:
-            Sl4aConnectionError if SL4A's opened port cannot be found.
-        """
-        # Launch a server through SL4A.
-        self.adb.shell(_SL4A_LAUNCH_SERVER_CMD % device_port)
-
-        # There is a chance that the server has not come up yet by the time the
-        # launch command has finished. Try to read get the listening port again
-        # after a small amount of time.
-        time_left = MAX_WAIT_ON_SERVER_SECONDS
-        while time_left > 0:
-            port = self._get_open_listening_port()
-            if port is None:
-                time.sleep(try_interval)
-                time_left -= try_interval
-            else:
-                return port
-
-        raise rpc_client.Sl4aConnectionError(
-            'Unable to find a valid open port for a new server connection. '
-            'Expected port: %s. Open ports: %s' %
-            (device_port, self._sl4a_ports))
-
-    def _get_all_ports_command(self):
-        """Returns the list of all ports from the command to get ports."""
-        is_root = True
-        if not self.adb.is_root():
-            is_root = self.adb.ensure_root()
-
-        if is_root:
-            return _SL4A_ROOT_FIND_PORT_CMD
-        else:
-            # TODO(markdr): When root is unavailable, search logcat output for
-            #               the port the server has opened.
-            self.log.warning('Device cannot be put into root mode. SL4A '
-                             'server connections cannot be verified.')
-            return _SL4A_USER_FIND_PORT_CMD
-
-    def _get_all_ports(self):
-        return self.adb.shell(self._get_all_ports_command()).split()
-
-    def _get_open_listening_port(self):
-        """Returns any open, listening port found for SL4A.
-
-        Will return none if no port is found.
-        """
-        possible_ports = self._get_all_ports()
-        self.log.debug('SL4A Ports found: %s' % possible_ports)
-
-        # Acquire the lock. We lock this method because if multiple threads
-        # attempt to get a server at the same time, they can potentially find
-        # the same port as being open, and both attempt to connect to it.
-        with self._listen_for_port_lock:
-            for port in possible_ports:
-                if port not in self._sl4a_ports:
-                    self._sl4a_ports.add(port)
-                    return int(port)
-        return None
-
-    def is_sl4a_installed(self):
-        """Returns True if SL4A is installed on the AndroidDevice."""
-        return bool(
-            self.adb.shell('pm path %s' % SL4A_PKG_NAME, ignore_status=True))
-
-    def start_sl4a_service(self):
-        """Starts the SL4A Service on the device.
-
-        For starting an RPC server, use start_sl4a_server() instead.
-        """
-        # Verify SL4A is installed.
-        if not self._started:
-            self._started = True
-            if not self.is_sl4a_installed():
-                raise rpc_client.Sl4aNotInstalledError(
-                    'SL4A is not installed on device %s' % self.adb.serial)
-            if self.adb.shell('(ps | grep "S %s") || true' % SL4A_PKG_NAME):
-                # Close all SL4A servers not opened by this manager.
-                # TODO(markdr): revert back to closing all ports after
-                # b/76147680 is resolved.
-                self.adb.shell('kill -9 $(pidof %s)' % SL4A_PKG_NAME)
-            self.adb.shell(
-                'settings put global hidden_api_blacklist_exemptions "*"')
-            # Start the service if it is not up already.
-            self.adb.shell(_SL4A_START_SERVICE_CMD)
-
-    def obtain_sl4a_server(self, server_port):
-        """Obtain an SL4A server port.
-
-        If the port is open and valid, return it. Otherwise, open an new server
-        with the hinted server_port.
-        """
-        if server_port not in self.sl4a_ports_in_use:
-            return self.start_sl4a_server(server_port)
-        else:
-            return server_port
-
-    def create_session(self,
-                       max_connections=None,
-                       client_port=0,
-                       forwarded_port=0,
-                       server_port=None):
-        """Creates an SL4A server with the given ports if possible.
-
-        The ports are not guaranteed to be available for use. If the port
-        asked for is not available, this will be logged, and the port will
-        be randomized.
-
-        Args:
-            client_port: The client port on the host machine
-            forwarded_port: The server port on the host machine forwarded
-                            by adb from the Android device
-            server_port: The port on the Android device.
-            max_connections: The max number of client connections for the
-                session.
-
-        Returns:
-            A new Sl4aServer instance.
-        """
-        if server_port is None:
-            # If a session already exists, use the same server.
-            if len(self.sessions) > 0:
-                server_port = self.sessions[sorted(
-                    self.sessions.keys())[0]].server_port
-            # Otherwise, open a new server on a random port.
-            else:
-                server_port = 0
-        self.log.debug(
-            "Creating SL4A session client_port={}, forwarded_port={}, server_port={}"
-            .format(client_port, forwarded_port, server_port))
-        self.start_sl4a_service()
-        session = sl4a_session.Sl4aSession(self.adb,
-                                           client_port,
-                                           server_port,
-                                           self.obtain_sl4a_server,
-                                           self.diagnose_failure,
-                                           forwarded_port,
-                                           max_connections=max_connections)
-        self.sessions[session.uid] = session
-        return session
-
-    def stop_service(self):
-        """Stops The SL4A Service. Force-stops the SL4A apk."""
-        try:
-            self.adb.shell('am force-stop %s' % SL4A_PKG_NAME,
-                           ignore_status=True)
-        except Exception as e:
-            self.log.warning("Fail to stop package %s: %s", SL4A_PKG_NAME, e)
-        self._started = False
-
-    def terminate_all_sessions(self):
-        """Terminates all SL4A sessions gracefully."""
-        self.error_reporter.finalize_reports()
-        for _, session in self.sessions.items():
-            session.terminate()
-        self.sessions = {}
-        self._close_all_ports()
-
-    def _close_all_ports(self, try_interval=ATTEMPT_INTERVAL):
-        """Closes all ports opened on SL4A."""
-        ports = self._get_all_ports()
-        for port in set.union(self._sl4a_ports, ports):
-            self.adb.shell(_SL4A_CLOSE_SERVER_CMD % port)
-        time_left = MAX_WAIT_ON_SERVER_SECONDS
-        while time_left > 0 and self._get_open_listening_port():
-            time.sleep(try_interval)
-            time_left -= try_interval
-
-        if time_left <= 0:
-            self.log.warning(
-                'Unable to close all un-managed servers! Server ports that are '
-                'still open are %s' % self._get_open_listening_port())
-        self._sl4a_ports = set()
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_ports.py b/src/antlion/controllers/sl4a_lib/sl4a_ports.py
deleted file mode 100644
index d0172cc..0000000
--- a/src/antlion/controllers/sl4a_lib/sl4a_ports.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class Sl4aPorts(object):
-    """A container for the three ports needed for an SL4A connection.
-
-    Attributes:
-        client_port: The port on the host associated with the SL4A client
-        forwarded_port: The port forwarded to the Android device.
-        server_port: The port on the device associated with the SL4A server.
-    """
-
-    def __init__(self, client_port=0, forwarded_port=0, server_port=0):
-        self.client_port = client_port
-        self.forwarded_port = forwarded_port
-        self.server_port = server_port
-
-    def __str__(self):
-        return '(%s, %s, %s)' % (self.client_port, self.forwarded_port,
-                                 self.server_port)
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_session.py b/src/antlion/controllers/sl4a_lib/sl4a_session.py
deleted file mode 100644
index e84def2..0000000
--- a/src/antlion/controllers/sl4a_lib/sl4a_session.py
+++ /dev/null
@@ -1,259 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import socket
-import threading
-
-import errno
-
-from antlion import logger
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.sl4a_lib import event_dispatcher
-from antlion.controllers.sl4a_lib import rpc_connection
-from antlion.controllers.sl4a_lib import rpc_client
-from antlion.controllers.sl4a_lib import sl4a_ports
-from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError
-
-SOCKET_TIMEOUT = 60
-
-# The SL4A Session UID when a UID has not been received yet.
-UNKNOWN_UID = -1
-
-
-class Sl4aSession(object):
-    """An object that tracks the state of an SL4A Session.
-
-    Attributes:
-        _event_dispatcher: The EventDispatcher instance, if any, for this
-            session.
-        _terminate_lock: A lock that prevents race conditions for multiple
-            threads calling terminate()
-        _terminated: A bool that stores whether or not this session has been
-            terminated. Terminated sessions cannot be restarted.
-        adb: A reference to the AndroidDevice's AdbProxy.
-        log: The logger for this Sl4aSession
-        server_port: The SL4A server port this session is established on.
-        uid: The uid that corresponds the the SL4A Server's session id. This
-            value is only unique during the lifetime of the SL4A apk.
-    """
-
-    def __init__(self,
-                 adb,
-                 host_port,
-                 device_port,
-                 get_server_port_func,
-                 on_error_callback,
-                 forwarded_port=0,
-                 max_connections=None):
-        """Creates an SL4A Session.
-
-        Args:
-            adb: A reference to the adb proxy
-            get_server_port_func: A lambda (int) that returns the corrected
-                server port. The int passed in hints at which port to use, if
-                possible.
-            host_port: The port the host machine uses to connect to the SL4A
-                server for its first connection.
-            device_port: The SL4A server port to be used as a hint for which
-                SL4A server to connect to.
-            forwarded_port: The server port on host machine forwarded by adb
-                            from Android device to accept SL4A connection
-        """
-        self._event_dispatcher = None
-        self._terminate_lock = threading.Lock()
-        self._terminated = False
-        self.adb = adb
-
-        def _log_formatter(message):
-            return '[SL4A Session|%s|%s] %s' % (self.adb.serial, self.uid,
-                                                message)
-
-        self.log = logger.create_logger(_log_formatter)
-
-        self.forwarded_port = forwarded_port
-        self.server_port = device_port
-        self.uid = UNKNOWN_UID
-        self.obtain_server_port = get_server_port_func
-        self._on_error_callback = on_error_callback
-
-        connection_creator = self._rpc_connection_creator(host_port)
-        self.rpc_client = rpc_client.RpcClient(self.uid,
-                                               self.adb.serial,
-                                               self.diagnose_failure,
-                                               connection_creator,
-                                               max_connections=max_connections)
-
-    def _rpc_connection_creator(self, host_port):
-        def create_client(uid):
-            return self._create_rpc_connection(ports=sl4a_ports.Sl4aPorts(
-                host_port, self.forwarded_port, self.server_port),
-                                               uid=uid)
-
-        return create_client
-
-    @property
-    def is_alive(self):
-        return not self._terminated
-
-    def _create_forwarded_port(self, server_port, hinted_port=0):
-        """Creates a forwarded port to the specified server port.
-
-        Args:
-            server_port: (int) The port to forward to.
-            hinted_port: (int) The port to use for forwarding, if available.
-                         Otherwise, the chosen port will be random.
-        Returns:
-            The chosen forwarded port.
-
-        Raises AdbError if the version of ADB is too old, or the command fails.
-        """
-        if self.adb.get_version_number() < 37 and hinted_port == 0:
-            self.log.error(
-                'The current version of ADB does not automatically provide a '
-                'port to forward. Please upgrade ADB to version 1.0.37 or '
-                'higher.')
-            raise Sl4aStartError('Unable to forward a port to the device.')
-        else:
-            try:
-                return self.adb.tcp_forward(hinted_port, server_port)
-            except AdbError as e:
-                if 'cannot bind listener' in e.stderr:
-                    self.log.warning(
-                        'Unable to use %s to forward to device port %s due to: '
-                        '"%s". Attempting to choose a random port instead.' %
-                        (hinted_port, server_port, e.stderr))
-                    # Call this method again, but this time with no hinted port.
-                    return self._create_forwarded_port(server_port)
-                raise e
-
-    def _create_rpc_connection(self, ports=None, uid=UNKNOWN_UID):
-        """Creates an RPC Connection with the specified ports.
-
-        Args:
-            ports: A Sl4aPorts object or a tuple of (host/client_port,
-                   forwarded_port, device/server_port). If any of these are
-                   zero, the OS will determine their values during connection.
-
-                   Note that these ports are only suggestions. If they are not
-                   available, the a different port will be selected.
-            uid: The UID of the SL4A Session. To create a new session, use
-                 UNKNOWN_UID.
-        Returns:
-            An Sl4aClient.
-        """
-        if ports is None:
-            ports = sl4a_ports.Sl4aPorts(0, 0, 0)
-        # Open a new server if a server cannot be inferred.
-        ports.server_port = self.obtain_server_port(ports.server_port)
-        self.server_port = ports.server_port
-        # Forward the device port to the host.
-        ports.forwarded_port = self._create_forwarded_port(
-            ports.server_port, hinted_port=ports.forwarded_port)
-        client_socket, fd = self._create_client_side_connection(ports)
-        client = rpc_connection.RpcConnection(self.adb,
-                                              ports,
-                                              client_socket,
-                                              fd,
-                                              uid=uid)
-        client.open()
-        if uid == UNKNOWN_UID:
-            self.uid = client.uid
-        return client
-
-    def diagnose_failure(self, connection):
-        """Diagnoses any problems related to the SL4A session."""
-        self._on_error_callback(self, connection)
-
-    def get_event_dispatcher(self):
-        """Returns the EventDispatcher for this Sl4aSession."""
-        if self._event_dispatcher is None:
-            self._event_dispatcher = event_dispatcher.EventDispatcher(
-                self.adb.serial, self.rpc_client)
-        return self._event_dispatcher
-
-    def _create_client_side_connection(self, ports):
-        """Creates and connects the client socket to the forward device port.
-
-        Args:
-            ports: A Sl4aPorts object or a tuple of (host_port,
-            forwarded_port, device_port).
-
-        Returns:
-            A tuple of (socket, socket_file_descriptor).
-        """
-        client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        client_socket.settimeout(SOCKET_TIMEOUT)
-        client_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-        if ports.client_port != 0:
-            try:
-                client_socket.bind((socket.gethostname(), ports.client_port))
-            except OSError as e:
-                # If the port is in use, log and ask for any open port.
-                if e.errno == errno.EADDRINUSE:
-                    self.log.warning('Port %s is already in use on the host. '
-                                     'Generating a random port.' %
-                                     ports.client_port)
-                    ports.client_port = 0
-                    return self._create_client_side_connection(ports)
-                raise
-
-        # Verify and obtain the port opened by SL4A.
-        try:
-            # Connect to the port that has been forwarded to the device.
-            client_socket.connect(('127.0.0.1', ports.forwarded_port))
-        except socket.timeout:
-            raise rpc_client.Sl4aConnectionError(
-                'SL4A has not connected over the specified port within the '
-                'timeout of %s seconds.' % SOCKET_TIMEOUT)
-        except socket.error as e:
-            # In extreme, unlikely cases, a socket error with
-            # errno.EADDRNOTAVAIL can be raised when a desired host_port is
-            # taken by a separate program between the bind and connect calls.
-            # Note that if host_port is set to zero, there is no bind before
-            # the connection is made, so this error will never be thrown.
-            if e.errno == errno.EADDRNOTAVAIL:
-                ports.client_port = 0
-                return self._create_client_side_connection(ports)
-            raise
-        ports.client_port = client_socket.getsockname()[1]
-        return client_socket, client_socket.makefile(mode='brw')
-
-    def terminate(self):
-        """Terminates the session.
-
-        The return of process execution is blocked on completion of all events
-        being processed by handlers in the Event Dispatcher.
-        """
-        with self._terminate_lock:
-            if not self._terminated:
-                self.log.debug('Terminating Session.')
-                try:
-                    self.rpc_client.closeSl4aSession()
-                except Exception as e:
-                    if "SL4A session has already been terminated" not in str(
-                            e):
-                        self.log.warning(e)
-                # Must be set after closeSl4aSession so the rpc_client does not
-                # think the session has closed.
-                self._terminated = True
-                if self._event_dispatcher:
-                    try:
-                        self._event_dispatcher.close()
-                    except Exception as e:
-                        self.log.warning(e)
-                try:
-                    self.rpc_client.terminate()
-                except Exception as e:
-                    self.log.warning(e)
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_types.py b/src/antlion/controllers/sl4a_lib/sl4a_types.py
deleted file mode 100644
index 53ea0f0..0000000
--- a/src/antlion/controllers/sl4a_lib/sl4a_types.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.dict_object import DictObject
-
-
-class Sl4aEvent(DictObject):
-    """Event returned by sl4a calls to eventPoll() and eventWait()
-
-    The 'name' field uniquely identifies the contents of 'data'.
-
-    """
-
-    def __init__(self, name=None, time=None, data=None):
-        DictObject.__init__(self, name=name, time=time, data=data)
-
-
-class Sl4aNetworkInfo(DictObject):
-    """SL4A equivalent of an Android NetworkInfo Object"""
-
-    def __init__(self,
-                 isAvailable=None,
-                 isConnected=None,
-                 isFailover=None,
-                 isRoaming=None,
-                 ExtraInfo=None,
-                 FailedReason=None,
-                 TypeName=None,
-                 SubtypeName=None,
-                 State=None):
-        DictObject.__init__(
-            self,
-            isAvailable=isAvailable,
-            isConnected=isConnected,
-            isFailover=isFailover,
-            isRoaming=isRoaming,
-            ExtraInfo=ExtraInfo,
-            FailedReason=FailedReason,
-            TypeName=TypeName,
-            SubtypeName=SubtypeName,
-            State=State)
diff --git a/src/antlion/controllers/sniffer.py b/src/antlion/controllers/sniffer.py
deleted file mode 100644
index 88b43d9..0000000
--- a/src/antlion/controllers/sniffer.py
+++ /dev/null
@@ -1,292 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import importlib
-import logging
-
-MOBLY_CONTROLLER_CONFIG_NAME = "Sniffer"
-ACTS_CONTROLLER_REFERENCE_NAME = "sniffers"
-
-
-def create(configs):
-    """Initializes the sniffer structures based on the JSON configuration. The
-    expected keys are:
-
-    Type: A first-level type of sniffer. Planned to be 'local' for sniffers
-        running on the local machine, or 'remote' for sniffers running
-        remotely.
-    SubType: The specific sniffer type to be used.
-    Interface: The WLAN interface used to configure the sniffer.
-    BaseConfigs: A dictionary specifying baseline configurations of the
-        sniffer. Configurations can be overridden when starting a capture.
-        The keys must be one of the Sniffer.CONFIG_KEY_* values.
-    """
-    objs = []
-    for c in configs:
-        sniffer_type = c["Type"]
-        sniffer_subtype = c["SubType"]
-        interface = c["Interface"]
-        base_configs = c["BaseConfigs"]
-        module_name = "antlion.controllers.sniffer_lib.{}.{}".format(
-            sniffer_type, sniffer_subtype)
-        module = importlib.import_module(module_name)
-        objs.append(
-            module.Sniffer(interface,
-                           logging.getLogger(),
-                           base_configs=base_configs))
-    return objs
-
-
-def destroy(objs):
-    """Destroys the sniffers and terminates any ongoing capture sessions.
-    """
-    for sniffer in objs:
-        try:
-            sniffer.stop_capture()
-        except SnifferError:
-            pass
-
-
-class SnifferError(Exception):
-    """This is the Exception class defined for all errors generated by
-    Sniffer-related modules.
-    """
-
-
-class InvalidDataError(Exception):
-    """This exception is thrown when invalid configuration data is passed
-    to a method.
-    """
-
-
-class ExecutionError(SnifferError):
-    """This exception is thrown when trying to configure the capture device
-    or when trying to execute the capture operation.
-
-    When this exception is seen, it is possible that the sniffer module is run
-    without sudo (for local sniffers) or keys are out-of-date (for remote
-    sniffers).
-    """
-
-
-class InvalidOperationError(SnifferError):
-    """Certain methods may only be accessed when the instance upon which they
-    are invoked is in a certain state. This indicates that the object is not
-    in the correct state for a method to be called.
-    """
-
-
-class Sniffer(object):
-    """This class defines an object representing a sniffer.
-
-    The object defines the generic behavior of sniffers - irrespective of how
-    they are implemented, or where they are located: on the local machine or on
-    the remote machine.
-    """
-
-    CONFIG_KEY_CHANNEL = "channel"
-
-    def __init__(self, interface, logger, base_configs=None):
-        """The constructor for the Sniffer. It constructs a sniffer and
-        configures it to be ready for capture.
-
-        Args:
-            interface: A string specifying the interface used to configure the
-                sniffer.
-            logger: ACTS logger object.
-            base_configs: A dictionary containing baseline configurations of the
-                sniffer. These can be overridden when staring a capture. The
-                keys are specified by Sniffer.CONFIG_KEY_*.
-
-        Returns:
-            self: A configured sniffer.
-
-        Raises:
-            InvalidDataError: if the config_path is invalid.
-            NoPermissionError: if an error occurs while configuring the
-                sniffer.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def get_descriptor(self):
-        """This function returns a string describing the sniffer. The specific
-        string (and its format) is up to each derived sniffer type.
-
-        Returns:
-            A string describing the sniffer.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def get_type(self):
-        """This function returns the type of the sniffer.
-
-        Returns:
-            The type (string) of the sniffer. Corresponds to the 'Type' key of
-            the sniffer configuration.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def get_subtype(self):
-        """This function returns the sub-type of the sniffer.
-
-        Returns:
-            The sub-type (string) of the sniffer. Corresponds to the 'SubType'
-            key of the sniffer configuration.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def get_interface(self):
-        """This function returns The interface used to configure the sniffer,
-        e.g. 'wlan0'.
-
-        Returns:
-            The interface (string) used to configure the sniffer. Corresponds to
-            the 'Interface' key of the sniffer configuration.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def get_capture_file(self):
-        """The sniffer places a capture in the logger directory. This function
-        enables the caller to obtain the path of that capture.
-
-        Returns:
-            The full path of the current or last capture.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def start_capture(self,
-                      override_configs=None,
-                      additional_args=None,
-                      duration=None,
-                      packet_count=None):
-        """This function starts a capture which is saved to the specified file
-        path.
-
-        Depending on the type/subtype and configuration of the sniffer the
-        capture may terminate on its own or may require an explicit call to the
-        stop_capture() function.
-
-        This is a non-blocking function so a terminating function must be
-        called - either explicitly or implicitly:
-        - Explicitly: call either stop_capture() or wait_for_capture()
-        - Implicitly: use with a with clause. The wait_for_capture() function
-                      will be called if a duration is specified (i.e. is not
-                      None), otherwise a stop_capture() will be called.
-
-        The capture is saved to a file in the log path of the logger. Use
-        the get_capture_file() to get the full path to the current or most
-        recent capture.
-
-        Args:
-            override_configs: A dictionary which is combined with the
-                base_configs ("BaseConfigs" in the sniffer configuration). The
-                keys (specified by Sniffer.CONFIG_KEY_*) determine the
-                configuration of the sniffer for this specific capture.
-            additional_args: A string specifying additional raw
-                command-line arguments to pass to the underlying sniffer. The
-                interpretation of these flags is sniffer-dependent.
-            duration: An integer specifying the number of seconds over which to
-                capture packets. The sniffer will be terminated after this
-                duration. Used in implicit mode when using a 'with' clause. In
-                explicit control cases may have to be performed using a
-                sleep+stop or as the timeout argument to the wait function.
-            packet_count: An integer specifying the number of packets to capture
-                before terminating. Should be used with duration to guarantee
-                that capture terminates at some point (even if did not capture
-                the specified number of packets).
-
-        Returns:
-            An ActiveCaptureContext process which can be used with a 'with'
-            clause.
-
-        Raises:
-            InvalidDataError: for invalid configurations
-            NoPermissionError: if an error occurs while configuring and running
-                the sniffer.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def stop_capture(self):
-        """This function stops a capture and guarantees that the capture is
-        saved to the capture file configured during the start_capture() method.
-        Depending on the type of the sniffer the file may previously contain
-        partial results (e.g. for a local sniffer) or may not exist until the
-        stop_capture() method is executed (e.g. for a remote sniffer).
-
-        Depending on the type/subtype and configuration of the sniffer the
-        capture may terminate on its own without requiring a call to this
-        function. In such a case it is still necessary to call either this
-        function or the wait_for_capture() function to make sure that the
-        capture file is moved to the correct location.
-
-        Raises:
-            NoPermissionError: No permission when trying to stop a capture
-                and save the capture file.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def wait_for_capture(self, timeout=None):
-        """This function waits for a capture to terminate and guarantees that
-        the capture is saved to the capture file configured during the
-        start_capture() method. Depending on the type of the sniffer the file
-        may previously contain partial results (e.g. for a local sniffer) or
-        may not exist until the stop_capture() method is executed (e.g. for a
-        remote sniffer).
-
-        Depending on the type/subtype and configuration of the sniffer the
-        capture may terminate on its own without requiring a call to this
-        function. In such a case it is still necessary to call either this
-        function or the stop_capture() function to make sure that the capture
-        file is moved to the correct location.
-
-        Args:
-            timeout: An integer specifying the number of seconds to wait for
-                the capture to terminate on its own. On expiration of the
-                timeout the sniffer is stopped explicitly using the
-                stop_capture() function.
-
-        Raises:
-            NoPermissionError: No permission when trying to stop a capture and
-                save the capture file.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-
-class ActiveCaptureContext(object):
-    """This class defines an object representing an active sniffer capture.
-
-    The object is returned by a Sniffer.start_capture() command and terminates
-    the capture when the 'with' clause exits. It is syntactic sugar for
-    try/finally.
-    """
-
-    _sniffer = None
-    _timeout = None
-
-    def __init__(self, sniffer, timeout=None):
-        self._sniffer = sniffer
-        self._timeout = timeout
-
-    def __enter__(self):
-        pass
-
-    def __exit__(self, type, value, traceback):
-        if self._sniffer is not None:
-            if self._timeout is None:
-                self._sniffer.stop_capture()
-            else:
-                self._sniffer.wait_for_capture(self._timeout)
-        self._sniffer = None
diff --git a/src/antlion/controllers/sniffer_lib/local/local_base.py b/src/antlion/controllers/sniffer_lib/local/local_base.py
deleted file mode 100644
index 5a6af09..0000000
--- a/src/antlion/controllers/sniffer_lib/local/local_base.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Class for Local sniffers - i.e. running on the local machine.
-
-This class provides configuration for local interfaces but leaves
-the actual capture (sniff) to sub-classes.
-"""
-
-import os
-import shutil
-import signal
-import subprocess
-import tempfile
-from antlion import logger
-from antlion import utils
-from antlion.controllers import sniffer
-
-class SnifferLocalBase(sniffer.Sniffer):
-    """This class defines the common behaviors of WLAN sniffers running on
-    WLAN interfaces of the local machine.
-
-    Specific mechanisms to capture packets over the local WLAN interfaces are
-    implemented by sub-classes of this class - i.e. it is not a final class.
-    """
-
-    def __init__(self, interface, logger, base_configs=None):
-        """See base class documentation
-        """
-        self._base_configs = None
-        self._capture_file_path = ""
-        self._interface = ""
-        self._logger = logger
-        self._process = None
-        self._temp_capture_file_path = ""
-
-        if interface == "":
-            raise sniffer.InvalidDataError("Empty interface provided")
-        self._interface = interface
-        self._base_configs = base_configs
-
-        try:
-            utils.exe_cmd("ifconfig", self._interface, "down")
-            utils.exe_cmd("iwconfig", self._interface, "mode", "monitor")
-            utils.exe_cmd("ifconfig", self._interface, "up")
-        except Exception as err:
-            raise sniffer.ExecutionError(err)
-
-    def get_interface(self):
-        """See base class documentation
-        """
-        return self._interface
-
-    def get_type(self):
-        """See base class documentation
-        """
-        return "local"
-
-    def get_capture_file(self):
-        return self._capture_file_path
-
-    def _pre_capture_config(self, override_configs=None):
-        """Utility function which configures the wireless interface per the
-        specified configurations. Operation is performed before every capture
-        start using baseline configurations (specified when sniffer initialized)
-        and override configurations specified here.
-        """
-        final_configs = {}
-        if self._base_configs:
-            final_configs.update(self._base_configs)
-        if override_configs:
-            final_configs.update(override_configs)
-
-        if sniffer.Sniffer.CONFIG_KEY_CHANNEL in final_configs:
-            try:
-                utils.exe_cmd("iwconfig", self._interface, "channel",
-                        str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]))
-            except Exception as err:
-                raise sniffer.ExecutionError(err)
-
-    def _get_command_line(self, additional_args=None, duration=None,
-                          packet_count=None):
-        """Utility function to be implemented by every child class - which
-        are the concrete sniffer classes. Each sniffer-specific class should
-        derive the command line to execute its sniffer based on the specified
-        arguments.
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def _post_process(self):
-        """Utility function which is executed after a capture is done. It
-        moves the capture file to the requested location.
-        """
-        self._process = None
-        shutil.move(self._temp_capture_file_path, self._capture_file_path)
-
-    def start_capture(self, override_configs=None,
-                      additional_args=None, duration=None,
-                      packet_count=None):
-        """See base class documentation
-        """
-        if self._process is not None:
-            raise sniffer.InvalidOperationError(
-                    "Trying to start a sniff while another is still running!")
-        capture_dir = os.path.join(self._logger.log_path,
-                                   "Sniffer-{}".format(self._interface))
-        os.makedirs(capture_dir, exist_ok=True)
-        self._capture_file_path = os.path.join(capture_dir,
-                      "capture_{}.pcap".format(logger.get_log_file_timestamp()))
-
-        self._pre_capture_config(override_configs)
-        _, self._temp_capture_file_path = tempfile.mkstemp(suffix=".pcap")
-
-        cmd = self._get_command_line(additional_args=additional_args,
-                                duration=duration, packet_count=packet_count)
-
-        self._process = utils.start_standing_subprocess(cmd)
-        return sniffer.ActiveCaptureContext(self, duration)
-
-    def stop_capture(self):
-        """See base class documentation
-        """
-        if self._process is None:
-            raise sniffer.InvalidOperationError(
-                                      "Trying to stop a non-started process")
-        utils.stop_standing_subprocess(self._process, kill_signal=signal.SIGINT)
-        self._post_process()
-
-    def wait_for_capture(self, timeout=None):
-        """See base class documentation
-        """
-        if self._process is None:
-            raise sniffer.InvalidOperationError(
-                                  "Trying to wait on a non-started process")
-        try:
-            utils.wait_for_standing_subprocess(self._process, timeout)
-            self._post_process()
-        except subprocess.TimeoutExpired:
-            self.stop_capture()
diff --git a/src/antlion/controllers/sniffer_lib/local/tcpdump.py b/src/antlion/controllers/sniffer_lib/local/tcpdump.py
deleted file mode 100644
index 39f8720..0000000
--- a/src/antlion/controllers/sniffer_lib/local/tcpdump.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import shutil
-from antlion.controllers import sniffer
-from antlion.controllers.sniffer_lib.local import local_base
-
-
-class Sniffer(local_base.SnifferLocalBase):
-    """This class defines a sniffer which uses tcpdump as its back-end
-    """
-
-    def __init__(self, config_path, logger, base_configs=None):
-        """See base class documentation
-        """
-        self._executable_path = None
-
-        super(local_base.SnifferLocalBase).__init__(
-            config_path, logger, base_configs=base_configs)
-
-        self._executable_path = shutil.which("tcpdump")
-        if self._executable_path is None:
-            raise sniffer.SnifferError(
-                "Cannot find a path to the 'tcpdump' executable")
-
-    def get_descriptor(self):
-        """See base class documentation
-        """
-        return "local-tcpdump-{}".format(self._interface)
-
-    def get_subtype(self):
-        """See base class documentation
-        """
-        return "tcpdump"
-
-    def _get_command_line(self,
-                          additional_args=None,
-                          duration=None,
-                          packet_count=None):
-        cmd = "{} -i {} -w {}".format(self._executable_path, self._interface,
-                                      self._temp_capture_file_path)
-        if packet_count is not None:
-            cmd = "{} -c {}".format(cmd, packet_count)
-        if additional_args is not None:
-            cmd = "{} {}".format(cmd, additional_args)
-        return cmd
diff --git a/src/antlion/controllers/sniffer_lib/local/tshark.py b/src/antlion/controllers/sniffer_lib/local/tshark.py
deleted file mode 100644
index b95aa7d..0000000
--- a/src/antlion/controllers/sniffer_lib/local/tshark.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import shutil
-from antlion.controllers import sniffer
-from antlion.controllers.sniffer_lib.local import local_base
-
-class Sniffer(local_base.SnifferLocalBase):
-    """This class defines a sniffer which uses tshark as its back-end
-    """
-
-    def __init__(self, config_path, logger, base_configs=None):
-        """See base class documentation
-        """
-        self._executable_path = None
-
-        super().__init__(config_path, logger, base_configs=base_configs)
-
-        self._executable_path = (shutil.which("tshark")
-                                 or shutil.which("/usr/local/bin/tshark"))
-        if self._executable_path is None:
-            raise sniffer.SnifferError("Cannot find a path to the 'tshark' "
-                                 "executable (or to '/usr/local/bin/tshark')")
-
-    def get_descriptor(self):
-        """See base class documentation
-        """
-        return "local-tshark-{}-ch{}".format(self._interface)
-
-    def get_subtype(self):
-        """See base class documentation
-        """
-        return "tshark"
-
-    def _get_command_line(self, additional_args=None, duration=None,
-                          packet_count=None):
-        cmd = "{} -i {} -w {}".format(self._executable_path, self._interface,
-                                      self._temp_capture_file_path)
-        if duration is not None:
-            cmd = "{} -a duration:{}".format(cmd, duration)
-        if packet_count is not None:
-            cmd = "{} -c {}".format(cmd, packet_count)
-        if additional_args is not None:
-            cmd = "{} {}".format(cmd, additional_args)
-        return cmd
diff --git a/src/antlion/controllers/spectracom_lib/__init__.py b/src/antlion/controllers/spectracom_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/spectracom_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/spectracom_lib/gsg6.py b/src/antlion/controllers/spectracom_lib/gsg6.py
deleted file mode 100644
index e3d0896..0000000
--- a/src/antlion/controllers/spectracom_lib/gsg6.py
+++ /dev/null
@@ -1,219 +0,0 @@
-"""Python module for Spectracom/Orolia GSG-6 GNSS simulator."""
-
-from antlion.controllers import abstract_inst
-
-
-class GSG6Error(abstract_inst.SocketInstrumentError):
-    """GSG-6 Instrument Error Class."""
-
-
-class GSG6(abstract_inst.SocketInstrument):
-    """GSG-6 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, ip_port):
-        """Init method for GSG-6.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super(GSG6, self).__init__(ip_addr, ip_port)
-
-        self.idn = ''
-
-    def connect(self):
-        """Init and Connect to GSG-6."""
-        self._connect_socket()
-
-        self.get_idn()
-
-        infmsg = 'Connected to GSG-6, with ID: {}'.format(self.idn)
-        self._logger.debug(infmsg)
-
-    def close(self):
-        """Close GSG-6."""
-        self._close_socket()
-
-        self._logger.debug('Closed connection to GSG-6')
-
-    def get_idn(self):
-        """Get the Idenification of GSG-6.
-
-        Returns:
-            GSG-6 Identifier
-        """
-        self.idn = self._query('*IDN?')
-
-        return self.idn
-
-    def start_scenario(self, scenario=''):
-        """Start to run scenario.
-
-        Args:
-            scenario: Scenario to run.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = 'SOUR:SCEN:LOAD ' + scenario
-            self._send(cmd)
-
-        self._send('SOUR:SCEN:CONT START')
-
-        if scenario:
-            infmsg = 'Started running scenario {}'.format(scenario)
-        else:
-            infmsg = 'Started running current scenario'
-
-        self._logger.debug(infmsg)
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-
-        self._send('SOUR:SCEN:CONT STOP')
-
-        self._logger.debug('Stopped running scenario')
-
-    def preset(self):
-        """Preset GSG-6 to default status."""
-        self._send('*RST')
-
-        self._logger.debug('Reset GSG-6')
-
-    def set_power(self, power_level):
-        """set GSG-6 transmit power on all bands.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSG6Error: raise when power level is not in [-160, -65] range.
-        """
-        if not -160 <= power_level <= -65:
-            errmsg = ('"power_level" must be within [-160, -65], '
-                      'current input is {}').format(str(power_level))
-            raise GSG6Error(error=errmsg, command='set_power')
-
-        self._send(':SOUR:POW ' + str(round(power_level, 1)))
-
-        infmsg = 'Set GSG-6 transmit power to "{}"'.format(round(
-            power_level, 1))
-        self._logger.debug(infmsg)
-
-    def get_nmealog(self):
-        """Get GSG6 NMEA data.
-
-        Returns:
-            GSG6's NMEA data
-        """
-        nmea_data = self._query('SOUR:SCEN:LOG?')
-
-        return nmea_data
-
-    def toggle_scenario_power(self,
-                              toggle_onoff='ON',
-                              sat_id='',
-                              sat_system=''):
-        """Toggle ON OFF scenario.
-
-        Args:
-            toggle_onoff: turn on or off the satellites
-                Type, str. Option ON/OFF
-                Default, 'ON'
-            sat_id: satellite identifiers
-                Type, str.
-                Option 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers no.
-                e.g.: G10
-            sat_system: to toggle On/OFF for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-        Raises:
-            GSG6Error: raise when toggle is not set.
-        """
-        if not sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(toggle_onoff))
-            infmsg = 'Set GSG-6 Power to "{}"'.format(toggle_onoff)
-            self._logger.debug(infmsg)
-
-        elif sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_id) + ',' +
-                       str(toggle_onoff))
-            infmsg = ('Set GSG-6 Power to "{}" for "{}" satellite '
-                      'identifiers').format(toggle_onoff, sat_id)
-            self._logger.debug(infmsg)
-
-        elif not sat_id and sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_system) + ',' +
-                       str(toggle_onoff))
-            infmsg = 'Set GSG-6 Power to "{}" for "{}" satellite system'.format(
-                toggle_onoff, sat_system)
-            self._logger.debug(infmsg)
-
-        else:
-            errmsg = ('"toggle power" must have either of these value [ON/OFF],'
-                      ' current input is {}').format(str(toggle_onoff))
-            raise GSG6Error(error=errmsg, command='toggle_scenario_power')
-
-    def set_scenario_power(self,
-                           power_level,
-                           sat_id='',
-                           sat_system='',
-                           freq_band=''):
-        """Set dynamic power for the running scenario.
-
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-            sat_id: set power level for specific satellite identifiers
-                Type, str. Option
-                'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
-                where xx is satellite identifiers number
-                e.g.: G10
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option  [L1, L2, L5, ALL]
-                Default, '', assumed to be L1.
-        Raises:
-            GSG6Error: raise when power level is not in [-160, -65] range.
-        """
-        if freq_band == 'ALL':
-            if not -100 <= power_level <= 100:
-                errmsg = ('"power_level" must be within [-100, 100], for '
-                          '"freq_band"="ALL", current input is {}').format(
-                              str(power_level))
-                raise GSG6Error(error=errmsg, command='set_scenario_power')
-        else:
-            if not -160 <= power_level <= -65:
-                errmsg = ('"power_level" must be within [-160, -65], for '
-                          '"freq_band" != "ALL", current input is {}').format(
-                              str(power_level))
-                raise GSG6Error(error=errmsg, command='set_scenario_power')
-
-        if sat_id and not sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_id) + ',' +
-                       str(round(power_level, 1)) + ',' + str(freq_band))
-            infmsg = ('Set GSG-6 transmit power to "{}" for "{}" '
-                      'satellite id').format(round(power_level, 1), sat_id)
-            self._logger.debug(infmsg)
-
-        elif not sat_id and sat_system:
-            self._send(':SOUR:SCEN:POW ' + str(sat_system) + ',' +
-                       str(round(power_level, 1)) + ',' + str(freq_band))
-            infmsg = ('Set GSG-6 transmit power to "{}" for "{}" '
-                      'satellite system').format(round(power_level, 1),
-                                                 sat_system)
-            self._logger.debug(infmsg)
-
-        else:
-            errmsg = ('sat_id or sat_system must have value, current input of '
-                      'sat_id {} and sat_system {}').format(sat_id, sat_system)
-            raise GSG6Error(error=errmsg, command='set_scenario_power')
diff --git a/src/antlion/controllers/spirent_lib/__init__.py b/src/antlion/controllers/spirent_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/spirent_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/spirent_lib/gss6450.py b/src/antlion/controllers/spirent_lib/gss6450.py
deleted file mode 100644
index 3fd1191..0000000
--- a/src/antlion/controllers/spirent_lib/gss6450.py
+++ /dev/null
@@ -1,381 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python module for Spirent GSS6450 GNSS RPS."""
-
-import datetime
-import numbers
-from antlion.controllers import abstract_inst
-
-
-class GSS6450Error(abstract_inst.SocketInstrumentError):
-    """GSS6450 Instrument Error Class."""
-
-
-class GSS6450(abstract_inst.RequestInstrument):
-    """GSS6450 Class, inherted from abstract_inst RequestInstrument."""
-
-    def __init__(self, ip_addr):
-        """Init method for GSS6450.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-        """
-        super(GSS6450, self).__init__(ip_addr)
-
-        self.idn = 'Spirent-GSS6450'
-
-    def _put(self, cmd):
-        """Send put command via GSS6450 HTTP Request and get response.
-
-        Args:
-            cmd: parameters listed in SHM_PUT.
-                Type, Str.
-
-        Returns:
-            resp: Response from the _query method.
-                Type, Str.
-        """
-        put_cmd = 'shm_put.shtml?' + cmd
-        resp = self._query(put_cmd)
-
-        return resp
-
-    def _get(self, cmd):
-        """Send get command via GSS6450 HTTP Request and get response.
-
-        Args:
-            cmd: parameters listed in SHM_GET.
-                Type, Str.
-
-        Returns:
-          resp: Response from the _query method.
-              Type, Str.
-        """
-        get_cmd = 'shm_get.shtml?' + cmd
-        resp = self._query(get_cmd)
-
-        return resp
-
-    def get_scenario_filename(self):
-        """Get the scenario filename of GSS6450.
-
-        Returns:
-            filename: RPS Scenario file name.
-                Type, Str.
-        """
-        resp_raw = self._get('-f')
-        filename = resp_raw.split(':')[-1].strip(' ')
-        self._logger.debug('Got scenario file name: "%s".', filename)
-
-        return filename
-
-    def get_scenario_description(self):
-        """Get the scenario description of GSS6450.
-
-        Returns:
-            description: RPS Scenario description.
-                Type, Str.
-        """
-        resp_raw = self._get('-d')
-        description = resp_raw.split('-d')[-1].strip(' ')
-
-        if description:
-            self._logger.debug('Got scenario description: "%s".', description)
-        else:
-            self._logger.warning('Got scenario description with empty string.')
-
-        return description
-
-    def get_scenario_location(self):
-        """Get the scenario location of GSS6450.
-
-        Returns:
-            location: RPS Scenario location.
-                Type, Str.
-        """
-        resp_raw = self._get('-i')
-        location = resp_raw.split('-i')[-1].strip(' ')
-
-        if location:
-            self._logger.debug('Got scenario location: "%s".', location)
-        else:
-            self._logger.warning('Got scenario location with empty string.')
-
-        return location
-
-    def get_operation_mode(self):
-        """Get the operation mode of GSS6450.
-
-        Returns:
-            mode: RPS Operation Mode.
-                Type, Str.
-                Option, STOPPED/PLAYING/RECORDING
-        """
-        resp_raw = self._get('-m')
-        mode = resp_raw.split('-m')[-1].strip(' ')
-        self._logger.debug('Got operation mode: "%s".', mode)
-
-        return mode
-
-    def get_battery_level(self):
-        """Get the battery level of GSS6450.
-
-        Returns:
-            batterylevel: RPS Battery Level.
-                Type, float.
-        """
-        resp_raw = self._get('-l')
-        batterylevel = float(resp_raw.split('-l')[-1].strip(' '))
-        self._logger.debug('Got battery level: %s%%.', batterylevel)
-
-        return batterylevel
-
-    def get_rfport_voltage(self):
-        """Get the RF port voltage of GSS6450.
-
-        Returns:
-            voltageout: RPS RF port voltage.
-                Type, str
-        """
-        resp_raw = self._get('-v')
-        voltageout = resp_raw.split('-v')[-1].strip(' ')
-        self._logger.debug('Got RF port voltage: "%s".', voltageout)
-
-        return voltageout
-
-    def get_storage_media(self):
-        """Get the storage media of GSS6450.
-
-        Returns:
-            media: RPS storage.
-                Type, str
-
-        Raises:
-            GSS6450Error: raise when request response is not support.
-        """
-        resp_raw = self._get('-M')
-        resp_num = resp_raw.split('-M')[-1].strip(' ')
-
-        if resp_num == '1':
-            media = '1-INTERNAL'
-        elif resp_num == '2':
-            media = '2-REMOVABLE'
-        else:
-            errmsg = ('"{}" is not recognized as GSS6450 valid storage media'
-                      ' type'.format(resp_num))
-            raise GSS6450Error(error=errmsg, command='get_storage_media')
-
-        self._logger.debug('Got current storage media: %s.', media)
-
-        return media
-
-    def get_attenuation(self):
-        """Get the attenuation of GSS6450.
-
-        Returns:
-            attenuation: RPS attenuation level, in dB.
-                Type, list of float.
-        """
-        resp_raw = self._get('-a')
-        resp_str = resp_raw.split('-a')[-1].strip(' ')
-        self._logger.debug('Got attenuation: %s dB.', resp_str)
-        attenuation = [float(itm) for itm in resp_str.split(',')]
-
-        return attenuation
-
-    def get_elapsed_time(self):
-        """Get the running scenario elapsed time of GSS6450.
-
-        Returns:
-            etime: RPS elapsed time.
-                Type, datetime.timedelta.
-        """
-        resp_raw = self._get('-e')
-        resp_str = resp_raw.split('-e')[-1].strip(' ')
-        self._logger.debug('Got senario elapsed time: "%s".', resp_str)
-        etime_tmp = datetime.datetime.strptime(resp_str, '%H:%M:%S')
-        etime = datetime.timedelta(hours=etime_tmp.hour,
-                                   minutes=etime_tmp.minute,
-                                   seconds=etime_tmp.second)
-
-        return etime
-
-    def get_playback_offset(self):
-        """Get the running scenario playback offset of GSS6450.
-
-        Returns:
-            offset: RPS playback offset.
-                Type, datetime.timedelta.
-        """
-        resp_raw = self._get('-o')
-        offset_tmp = float(resp_raw.split('-o')[-1].strip(' '))
-        self._logger.debug('Got senario playback offset: %s sec.', offset_tmp)
-        offset = datetime.timedelta(seconds=offset_tmp)
-
-        return offset
-
-    def play_scenario(self, scenario=''):
-        """Start to play scenario in GSS6450.
-
-        Args:
-            scenario: Scenario to play.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = '-f{},-wP'.format(scenario)
-        else:
-            cmd = '-wP'
-
-        _ = self._put(cmd)
-
-        if scenario:
-            infmsg = 'Started playing scenario: "{}".'.format(scenario)
-        else:
-            infmsg = 'Started playing current scenario.'
-
-        self._logger.debug(infmsg)
-
-    def record_scenario(self, scenario=''):
-        """Start to record scenario in GSS6450.
-
-        Args:
-            scenario: Scenario to record.
-                Type, str.
-                Default, '', which will run current selected one.
-        """
-        if scenario:
-            cmd = '-f{},-wR'.format(scenario)
-        else:
-            cmd = '-wR'
-
-        _ = self._put(cmd)
-
-        if scenario:
-            infmsg = 'Started recording scenario: "{}".'.format(scenario)
-        else:
-            infmsg = 'Started recording scenario.'
-
-        self._logger.debug(infmsg)
-
-    def stop_scenario(self):
-        """Start to stop playing/recording scenario in GSS6450."""
-        _ = self._put('-wS')
-
-        self._logger.debug('Stopped playing/recording scanrio.')
-
-    def set_rfport_voltage(self, voltageout):
-        """Set the RF port voltage of GSS6450.
-
-        Args:
-            voltageout: RPS RF port voltage.
-                Type, str
-
-        Raises:
-            GSS6450Error: raise when voltageout input is not valid.
-        """
-        if voltageout == 'OFF':
-            voltage_cmd = '0'
-        elif voltageout == '3.3V':
-            voltage_cmd = '3'
-        elif voltageout == '5V':
-            voltage_cmd = '5'
-        else:
-            errmsg = ('"{}" is not recognized as GSS6450 valid RF port voltage'
-                      ' type'.format(voltageout))
-            raise GSS6450Error(error=errmsg, command='set_rfport_voltage')
-
-        _ = self._put('-v{},-wV'.format(voltage_cmd))
-        self._logger.debug('Set RF port voltage: "%s".', voltageout)
-
-    def set_attenuation(self, attenuation):
-        """Set the attenuation of GSS6450.
-
-        Args:
-            attenuation: RPS attenuation level, in dB.
-                Type, numerical.
-
-        Raises:
-            GSS6450Error: raise when attenuation is not in range.
-        """
-        if not 0 <= attenuation <= 31:
-            errmsg = ('"attenuation" must be within [0, 31], '
-                      'current input is {}').format(str(attenuation))
-            raise GSS6450Error(error=errmsg, command='set_attenuation')
-
-        attenuation_raw = round(attenuation)
-
-        if attenuation_raw != attenuation:
-            warningmsg = ('"attenuation" must be integer, current input '
-                          'will be rounded to {}'.format(attenuation_raw))
-            self._logger.warning(warningmsg)
-
-        _ = self._put('-a{},-wA'.format(attenuation_raw))
-
-        self._logger.debug('Set attenuation: %s dB.', attenuation_raw)
-
-    def set_playback_offset(self, offset):
-        """Set the playback offset of GSS6450.
-
-        Args:
-            offset: RPS playback offset.
-                Type, datetime.timedelta, or numerical.
-
-        Raises:
-            GSS6450Error: raise when offset is not numeric or timedelta.
-        """
-        if isinstance(offset, datetime.timedelta):
-            offset_raw = offset.total_seconds()
-        elif isinstance(offset, numbers.Number):
-            offset_raw = offset
-        else:
-            raise GSS6450Error(error=('"offset" must be numerical value or '
-                                      'datetime.timedelta'),
-                               command='set_playback_offset')
-
-        _ = self._put('-o{}'.format(offset_raw))
-
-        self._logger.debug('Set playback offset: %s sec.', offset_raw)
-
-    def set_storage_media(self, media):
-        """Set the storage media of GSS6450.
-
-        Args:
-            media: RPS storage Media, Internal or External.
-                Type, str. Option, 'internal', 'removable'
-
-        Raises:
-            GSS6450Error: raise when media option is not support.
-        """
-        if media == 'internal':
-            raw_media = '1'
-        elif media == 'removable':
-            raw_media = '2'
-        else:
-            raise GSS6450Error(
-                error=('"media" input must be in ["internal", "removable"]. '
-                       ' Current input is {}'.format(media)),
-                command='set_storage_media')
-
-        _ = self._put('-M{}-wM'.format(raw_media))
-
-        resp_raw = self.get_storage_media()
-        if raw_media != resp_raw[0]:
-            raise GSS6450Error(
-                error=('Setting media "{}" is not the same as queried media '
-                       '"{}".'.format(media, resp_raw)),
-                command='set_storage_media')
diff --git a/src/antlion/controllers/spirent_lib/gss7000.py b/src/antlion/controllers/spirent_lib/gss7000.py
deleted file mode 100644
index 7cc47d4..0000000
--- a/src/antlion/controllers/spirent_lib/gss7000.py
+++ /dev/null
@@ -1,490 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Python module for Spirent GSS7000 GNSS simulator.
-@author: Clay Liao (jianhsiungliao@)
-"""
-from time import sleep
-import xml.etree.ElementTree as ET
-from antlion.controllers import abstract_inst
-
-
-def get_xml_text(xml_string='', tag=''):
-    """Parse xml from string and return specific tag
-
-        Args:
-            xml_string: xml string,
-                Type, Str.
-            tag: tag in xml,
-                Type, Str.
-
-        Returns:
-            text: Text content in the tag
-                Type, Str.
-        """
-    if xml_string and tag:
-        root = ET.fromstring(xml_string)
-        try:
-            text = str(root.find(tag).text).rstrip().lstrip()
-        except ValueError:
-            text = 'INVALID DATA'
-    else:
-        text = 'INVALID DATA'
-    return text
-
-
-class GSS7000Error(abstract_inst.SocketInstrumentError):
-    """GSS7000 Instrument Error Class."""
-
-
-class AbstractInstGss7000(abstract_inst.SocketInstrument):
-    """Abstract instrument for  GSS7000"""
-
-    def _query(self, cmd):
-        """query instrument via Socket.
-
-        Args:
-            cmd: Command to send,
-                Type, Str.
-
-        Returns:
-            resp: Response from Instrument via Socket,
-                Type, Str.
-        """
-        self._send(cmd)
-        self._wait()
-        resp = self._recv()
-        return resp
-
-    def _wait(self, wait_time=1):
-        """wait function
-        Args:
-            wait_time: wait time in sec.
-                Type, int,
-                Default, 1.
-        """
-        sleep(wait_time)
-
-
-class GSS7000Ctrl(AbstractInstGss7000):
-    """GSS7000 control daemon class"""
-
-    def __init__(self, ip_addr, ip_port=7717):
-        """Init method for GSS7000 Control Daemon.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            ip_port: TCPIP Port.
-                Type, str.
-        """
-        super().__init__(ip_addr, ip_port)
-        self.idn = 'Spirent-GSS7000 Control Daemon'
-
-    def connect(self):
-        """Init and Connect to GSS7000 Control Daemon."""
-        # Connect socket then connect socket again
-        self._close_socket()
-        self._connect_socket()
-        # Stop GSS7000 Control Daeamon Then Start
-        self._query('STOP_ENGINE')
-        self._wait()
-        self._query('START_ENGINE')
-
-    def close(self):
-        """Close GSS7000 control daemon"""
-        self._close_socket()
-        self._logger.debug('Closed connection to GSS7000 control daemon')
-
-
-class GSS7000(AbstractInstGss7000):
-    """GSS7000 Class, inherted from abstract_inst SocketInstrument."""
-
-    def __init__(self, ip_addr, engine_ip_port=15650, ctrl_ip_port=7717):
-        """Init method for GSS7000.
-
-        Args:
-            ip_addr: IP Address.
-                Type, str.
-            engine_ip_port: TCPIP Port for
-                Type, str.
-            ctrl_ip_port: TCPIP Port for Control Daemon
-        """
-        super().__init__(ip_addr, engine_ip_port)
-        self.idn = ''
-        self.connected = False
-        self.capability = []
-        self.gss7000_ctrl_daemon = GSS7000Ctrl(ip_addr, ctrl_ip_port)
-        # Close control daemon and engine sockets at the beginning
-        self.gss7000_ctrl_daemon._close_socket()
-        self._close_socket()
-
-    def connect(self):
-        """Connect GSS7000 engine daemon"""
-        # Connect control daemon socket
-        self._logger.debug('Connect to GSS7000')
-        self.gss7000_ctrl_daemon.connect()
-        # Connect to remote engine socket
-        self._wait()
-        self._connect_socket()
-        self.connected = True
-        self.get_hw_capability()
-
-    def close(self):
-        """Close GSS7000 engine daemon"""
-        # Close GSS7000 control daemon
-        self.gss7000_ctrl_daemon.close()
-        # Close GSS7000 engine daemon
-        self._close_socket()
-        self._logger.debug('Closed connection to GSS7000 engine daemon')
-
-    def _parse_hw_cap(self, xml):
-        """Parse GSS7000 hardware capability xml to list.
-            Args:
-                xml: hardware capability xml,
-                    Type, str.
-
-            Returns:
-                capability: Hardware capability dictionary
-                    Type, list.
-        """
-        root = ET.fromstring(xml)
-        capability_ls = list()
-        sig_cap_list = root.find('data').find('Signal_capabilities').findall(
-            'Signal')
-        for signal in sig_cap_list:
-            value = str(signal.text).rstrip().lstrip()
-            capability_ls.extend(value.upper().split(' '))
-        return capability_ls
-
-    def get_hw_capability(self):
-        """Check GSS7000 hardware capability
-
-            Returns:
-                capability: Hardware capability dictionary,
-                    Type, list.
-        """
-        if self.connected:
-            capability_xml = self._query('GET_LICENCED_HARDWARE_CAPABILITY')
-            self.capability = self._parse_hw_cap(capability_xml)
-
-        return self.capability
-
-    def get_idn(self):
-        """Get the SimREPLAYplus Version
-
-        Returns:
-            SimREPLAYplus Version
-        """
-        idn_xml = self._query('*IDN?')
-        self.idn = get_xml_text(idn_xml, 'data')
-        return self.idn
-
-    def load_scenario(self, scenario=''):
-        """Load the scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        if scenario == '':
-            errmsg = ('Missing scenario file')
-            raise GSS7000Error(error=errmsg, command='load_scenario')
-        else:
-            self._logger.debug('Stopped the original scenario')
-            self._query('-,EN,1')
-            cmd = 'SC,' + scenario
-            self._logger.debug('Loading scenario')
-            self._query(cmd)
-            self._logger.debug('Scenario is loaded')
-            return True
-        return False
-
-    def start_scenario(self, scenario=''):
-        """Load and Start the running scenario.
-
-        Args:
-            scenario: path of scenario,
-                Type, str
-        """
-        if scenario:
-            if self.load_scenario(scenario):
-                self._query('RU')
-            else:
-                infmsg = 'No scenario is loaded. Stop running scenario'
-                self._logger.debug(infmsg)
-        else:
-            pass
-
-        if scenario:
-            infmsg = 'Started running scenario {}'.format(scenario)
-        else:
-            infmsg = 'Started running current scenario'
-
-        self._logger.debug(infmsg)
-
-    def get_scenario_name(self):
-        """Get current scenario name"""
-        sc_name_xml = self._query('SC_NAME')
-        return get_xml_text(sc_name_xml, 'data')
-
-    def stop_scenario(self):
-        """Stop the running scenario."""
-        self._query('-,EN,1')
-        self._logger.debug('Stopped running scenario')
-
-    def set_power_offset(self, ant=1, power_offset=0):
-        """Set Power Offset of GSS7000 Tx
-        Args:
-            ant: antenna number of GSS7000
-            power_offset: transmit power offset level
-                Type, float.
-                Decimal, unit [dB]
-
-        Raises:
-            GSS7000Error: raise when power offset level is not in [-49, 15] range.
-        """
-        if not -49 <= power_offset <= 15:
-            errmsg = (f'"power_offset" must be within [-49, 15], '
-                      f'current input is {power_offset}')
-            raise GSS7000Error(error=errmsg, command='set_power_offset')
-
-        cmd = f'-,POW_LEV,V1_A{ant},{power_offset},GPS,0,0,1,1,1,1,0'
-        self._query(cmd)
-
-        infmsg = f'Set veichel 1 antenna {ant} power offset: {power_offset}'
-        self._logger.debug(infmsg)
-
-    def set_ref_power(self, ref_dBm=-130):
-        """Set Ref Power of GSS7000 Tx
-        Args:
-            ref_dBm: transmit reference power level in dBm for GSS7000
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSS7000Error: raise when power offset level is not in [-170, -115] range.
-        """
-        if not -170 <= ref_dBm <= -115:
-            errmsg = ('"power_offset" must be within [-170, -115], '
-                      'current input is {}').format(str(ref_dBm))
-            raise GSS7000Error(error=errmsg, command='set_ref_power')
-        cmd = 'REF_DBM,{}'.format(str(round(ref_dBm, 1)))
-        self._query(cmd)
-        infmsg = 'Set reference power level: {}'.format(str(round(ref_dBm, 1)))
-        self._logger.debug(infmsg)
-
-    def get_status(self, return_txt=False):
-        """Get current GSS7000 Status
-        Args:
-            return_txt: booling for determining the return results
-                Type, booling.
-        """
-        status_xml = self._query('NULL')
-        status = get_xml_text(status_xml, 'status')
-        if return_txt:
-            status_dict = {
-                '0': 'No Scenario loaded',
-                '1': 'Not completed loading a scenario',
-                '2': 'Idle, ready to run a scenario',
-                '3': 'Arming the scenario',
-                '4': 'Completed arming; or waiting for a command or'
-                     'trigger signal to start the scenario',
-                '5': 'Scenario running',
-                '6': 'Current scenario is paused.',
-                '7': 'Active scenario has stopped and has not been reset.'
-                     'Waiting for further commands.'
-            }
-            return status_dict.get(status)
-        else:
-            return int(status)
-
-    def set_power(self, power_level=-130):
-        """Set Power Level of GSS7000 Tx
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-
-        Raises:
-            GSS7000Error: raise when power level is not in [-170, -115] range.
-        """
-        if not -170 <= power_level <= -115:
-            errmsg = (f'"power_level" must be within [-170, -115], '
-                      f'current input is {power_level}')
-            raise GSS7000Error(error=errmsg, command='set_power')
-
-        power_offset = power_level + 130
-        self.set_power_offset(1, power_offset)
-        self.set_power_offset(2, power_offset)
-
-        infmsg = 'Set GSS7000 transmit power to "{}"'.format(
-            round(power_level, 1))
-        self._logger.debug(infmsg)
-
-    def power_lev_offset_cal(self, power_level=-130, sat='GPS', band='L1'):
-        """Convert target power level to power offset for GSS7000 power setting
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-                Default. -130
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL'
-                Type, str
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/E5'
-                Default, '', assumed to be L1.
-        Return:
-            power_offset: The calculated power offset for setting GSS7000 GNSS target power.
-        """
-        gss7000_tx_pwr = {
-            'GPS_L1': -130,
-            'GPS_L5': -127.9,
-            'GLONASS_F1': -131,
-            'GALILEO_L1': -127,
-            'GALILEO_E5': -122,
-            'BEIDOU_B1I': -133,
-            'BEIDOU_B1C': -130,
-            'BEIDOU_B2A': -127,
-            'QZSS_L1': -128.5,
-            'QZSS_L5': -124.9,
-            'IRNSS_L5': -130
-        }
-
-        sat_band = f'{sat}_{band}'
-        infmsg = f'Target satellite system and band: {sat_band}'
-        self._logger.debug(infmsg)
-        default_pwr_lev = gss7000_tx_pwr.get(sat_band, -130)
-        power_offset = power_level - default_pwr_lev
-        infmsg = (
-            f'Targer power: {power_level}; Default power: {default_pwr_lev};'
-            f' Power offset: {power_offset}')
-        self._logger.debug(infmsg)
-
-        return power_offset
-
-    def sat_band_convert(self, sat, band):
-        """Satellite system and operation band conversion and check.
-        Args:
-            sat: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL/BDS'
-                Type, str
-            band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/F1/E5'
-                Default, '', assumed to be L1.
-        """
-        sat_system_dict = {
-            'GPS': 'GPS',
-            'GLO': 'GLONASS',
-            'GAL': 'GALILEO',
-            'BDS': 'BEIDOU',
-            'IRNSS': 'IRNSS',
-            'ALL': 'GPS'
-        }
-        sat = sat_system_dict.get(sat, 'GPS')
-        if band == '':
-            infmsg = 'No band is set. Set to default band = L1'
-            self._logger.debug(infmsg)
-            band = 'L1'
-        if sat == '':
-            infmsg = 'No satellite system is set. Set to default sat = GPS'
-            self._logger.debug(infmsg)
-            sat = 'GPS'
-        sat_band = f'{sat}_{band}'
-        self._logger.debug(f'Current band: {sat_band}')
-        self._logger.debug(f'Capability: {self.capability}')
-        # Check if satellite standard and band are supported
-        # If not in support list, return GPS_L1 as default
-        if not sat_band in self.capability:
-            errmsg = (
-                f'Satellite system and band ({sat_band}) are not supported.'
-                f'The GSS7000 support list: {self.capability}')
-            raise GSS7000Error(error=errmsg, command='set_scenario_power')
-        else:
-            sat_band_tp = tuple(sat_band.split('_'))
-
-        return sat_band_tp
-
-    def set_scenario_power(self,
-                           power_level=-130,
-                           sat_id='',
-                           sat_system='',
-                           freq_band='L1'):
-        """Set dynamic power for the running scenario.
-        Args:
-            power_level: transmit power level
-                Type, float.
-                Decimal, unit [dBm]
-                Default. -130
-            sat_id: set power level for specific satellite identifiers
-                Type, int.
-            sat_system: to set power level for all Satellites
-                Type, str
-                Option 'GPS/GLO/GAL/BDS'
-                Type, str
-                Default, '', assumed to be GPS.
-            freq_band: Frequency band to set the power level
-                Type, str
-                Option 'L1/L5/B1I/B1C/B2A/F1/E5/ALL'
-                Default, '', assumed to be L1.
-        Raises:
-            GSS7000Error: raise when power offset is not in [-49, -15] range.
-        """
-        band_dict = {
-            'L1': 1,
-            'L5': 2,
-            'B2A': 2,
-            'B1I': 1,
-            'B1C': 1,
-            'F1': 1,
-            'E5': 2,
-            'ALL': 3
-        }
-
-        # Convert and check satellite system and band
-        sat, band = self.sat_band_convert(sat_system, freq_band)
-        # Get freq band setting
-        band_cmd = band_dict.get(band, 1)
-
-        if not sat_id:
-            sat_id = 0
-            all_tx_type = 1
-        else:
-            all_tx_type = 0
-
-        # Convert absolute power level to absolute power offset.
-        power_offset = self.power_lev_offset_cal(power_level, sat, band)
-
-        if not -49 <= power_offset <= 15:
-            errmsg = (f'"power_offset" must be within [-49, 15], '
-                      f'current input is {power_offset}')
-            raise GSS7000Error(error=errmsg, command='set_power_offset')
-
-        if band_cmd == 1:
-            cmd = f'-,POW_LEV,v1_a1,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-        elif band_cmd == 2:
-            cmd = f'-,POW_LEV,v1_a2,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-        elif band_cmd == 3:
-            cmd = f'-,POW_LEV,v1_a1,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
-            self._query(cmd)
-            cmd = f'-,POW_LEV,v1_a2,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
diff --git a/src/antlion/controllers/tigertail.py b/src/antlion/controllers/tigertail.py
deleted file mode 100644
index e99c6e5..0000000
--- a/src/antlion/controllers/tigertail.py
+++ /dev/null
@@ -1,123 +0,0 @@
-"""Module manager the required definitions for tigertail"""
-
-import logging
-import time
-
-from enum import Enum
-
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = "Tigertail"
-ACTS_CONTROLLER_REFERENCE_NAME = "tigertails"
-
-TIGERTAIL_SLEEP_TIME = 5
-
-
-def create(configs):
-    """Takes a list of Tigertail serial numbers and returns Tigertail Controllers.
-
-    Args:
-        configs: A list of serial numbers
-
-    Returns:
-        a list of Tigertail controllers
-
-    Raises:
-        ValueError if the configuration is not a list of serial number
-    """
-    tigertails = []
-    if isinstance(configs, list):
-        for serial_no in configs:
-            tigertail = Tigertail(serial_no)
-            tigertails.append(tigertail)
-    else:
-        raise ValueError(
-            'Invalid config for tigertail, should be a list of serial number')
-
-    return tigertails
-
-
-def destroy(tigertails):
-    pass
-
-
-def get_info(tigertails):
-    return [tigertail.get_info() for tigertail in tigertails]
-
-
-class TigertailError(Exception):
-    pass
-
-
-class TigertailState(Enum):
-    def __str__(self):
-        return str(self.value)
-
-    A = 'A'
-    B = 'B'
-    Off = 'off'
-
-
-class Tigertail(object):
-    def __init__(self, serial_number):
-        self.serial_number = serial_number
-        self.tigertool_bin = None
-
-    def setup(self, user_params):
-        """Links tigertool binary
-
-        This function needs to be:
-        Args:
-            user_params: User defined parameters. Expected field is:
-            {
-                // required, string or list of strings
-                tigertool: ['/path/to/tigertool.par']
-            }
-        """
-        tigertool_path = user_params['tigertool']
-        if tigertool_path is None:
-            self.tigertool_bin = None
-        elif isinstance(tigertool_path, str):
-            self.tigertool_bin = tigertool_path
-        elif isinstance(tigertool_path, list):
-            if len(tigertool_path) == 0:
-                self.tigertool_bin = None
-            else:
-                self.tigertool_bin = tigertool_path[0]
-
-        if self.tigertool_bin is None:
-            raise TigertailError('Tigertail binary not found')
-
-        logging.getLogger().debug(
-            f'Setup {self.serial_number} with binary at {self.tigertool_bin}')
-
-    def turn_on_mux_A(self):
-        self._set_tigertail_state(TigertailState.A)
-
-    def turn_on_mux_B(self):
-        self._set_tigertail_state(TigertailState.B)
-
-    def turn_off(self):
-        self._set_tigertail_state(TigertailState.Off)
-
-    def get_info(self):
-        return {'tigertail_serial_no': self.serial_number}
-
-    def _set_tigertail_state(self, state: TigertailState):
-        """Sets state for tigertail, there are 3 possible states:
-            A  : enable port A
-            B  : enable port B
-            Off: turn off both ports
-        """
-        result = job.run([
-            self.tigertool_bin, '--serialno',
-            str(self.serial_number), '--mux',
-            str(state)
-        ],
-                         timeout=10)
-
-        if result.stderr != '':
-            raise TigertailError(result.stderr)
-
-        # Sleep time to let the device connected/disconnect to tigertail
-        time.sleep(TIGERTAIL_SLEEP_TIME)
diff --git a/src/antlion/controllers/utils_lib/commands/ip.py b/src/antlion/controllers/utils_lib/commands/ip.py
deleted file mode 100644
index 0b14d2e..0000000
--- a/src/antlion/controllers/utils_lib/commands/ip.py
+++ /dev/null
@@ -1,156 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import re
-
-from antlion.libs.proc import job
-
-
-class LinuxIpCommand(object):
-    """Interface for doing standard IP commands on a linux system.
-
-    Wraps standard shell commands used for ip into a python object that can
-    be interacted with more easily.
-    """
-
-    def __init__(self, runner):
-        """
-        Args:
-            runner: Object that can take unix commands and run them in an
-                    enviroment (eg. connection.SshConnection).
-        """
-        self._runner = runner
-
-    def get_ipv4_addresses(self, net_interface):
-        """Gets all ipv4 addresses of a network interface.
-
-        Args:
-            net_interface: string, The network interface to get info on
-                           (eg. wlan0).
-
-        Returns: An iterator of tuples that contain (address, broadcast).
-                 where address is a ipaddress.IPv4Interface and broadcast
-                 is an ipaddress.IPv4Address.
-        """
-        results = self._runner.run('ip addr show dev %s' % net_interface)
-        lines = results.stdout.splitlines()
-
-        # Example stdout:
-        # 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP group default qlen 1000
-        #   link/ether 48:0f:cf:3c:9d:89 brd ff:ff:ff:ff:ff:ff
-        #   inet 192.168.1.1/24 brd 192.168.1.255 scope global eth0
-        #       valid_lft forever preferred_lft forever
-        #   inet6 2620:0:1000:1500:a968:a776:2d80:a8b3/64 scope global temporary dynamic
-        #       valid_lft 599919sec preferred_lft 80919sec
-
-        for line in lines:
-            line = line.strip()
-            match = re.search('inet (?P<address>[^\s]*) brd (?P<bcast>[^\s]*)',
-                              line)
-            if match:
-                d = match.groupdict()
-                address = ipaddress.IPv4Interface(d['address'])
-                bcast = ipaddress.IPv4Address(d['bcast'])
-                yield (address, bcast)
-
-            match = re.search('inet (?P<address>[^\s]*)', line)
-            if match:
-                d = match.groupdict()
-                address = ipaddress.IPv4Interface(d['address'])
-                yield (address, None)
-
-    def add_ipv4_address(self, net_interface, address, broadcast=None):
-        """Adds an ipv4 address to a net_interface.
-
-        Args:
-            net_interface: string, The network interface
-                           to get the new ipv4 (eg. wlan0).
-            address: ipaddress.IPv4Interface, The new ipaddress and netmask
-                     to add to an interface.
-            broadcast: ipaddress.IPv4Address, The broadcast address to use for
-                       this net_interfaces subnet.
-        """
-        if broadcast:
-            self._runner.run('ip addr add %s broadcast %s dev %s' %
-                             (address, broadcast, net_interface))
-        else:
-            self._runner.run('ip addr add %s dev %s' %
-                             (address, net_interface))
-
-    def remove_ipv4_address(self, net_interface, address, ignore_status=False):
-        """Remove an ipv4 address.
-
-        Removes an ipv4 address from a network interface.
-
-        Args:
-            net_interface: string, The network interface to remove the
-                           ipv4 address from (eg. wlan0).
-            address: ipaddress.IPv4Interface or ipaddress.IPv4Address,
-                     The ip address to remove from the net_interface.
-            ignore_status: True if the exit status can be ignored
-        Returns:
-            The job result from a the command
-        """
-        return self._runner.run(
-            'ip addr del %s dev %s' % (address, net_interface),
-            ignore_status=ignore_status)
-
-    def set_ipv4_address(self, net_interface, address, broadcast=None):
-        """Set the ipv4 address.
-
-        Sets the ipv4 address of a network interface. If the network interface
-        has any other ipv4 addresses these will be cleared.
-
-        Args:
-            net_interface: string, The network interface to set the ip address
-                           on (eg. wlan0).
-            address: ipaddress.IPv4Interface, The ip address and subnet to give
-                     the net_interface.
-            broadcast: ipaddress.IPv4Address, The broadcast address to use for
-                       the subnet.
-        """
-        self.clear_ipv4_addresses(net_interface)
-        self.add_ipv4_address(net_interface, address, broadcast)
-
-    def clear_ipv4_addresses(self, net_interface):
-        """Clears all ipv4 addresses registered to a net_interface.
-
-        Args:
-            net_interface: string, The network interface to clear addresses from
-                           (eg. wlan0).
-        """
-        ip_info = self.get_ipv4_addresses(net_interface)
-
-        for address, _ in ip_info:
-            result = self.remove_ipv4_address(net_interface, address,
-                                              ignore_status=True)
-            # It is possible that the address has already been removed by the
-            # time this command has been called. In such a case, we would get
-            # this error message.
-            error_msg = 'RTNETLINK answers: Cannot assign requested address'
-            if result.exit_status != 0:
-                if error_msg in result.stderr:
-                    # If it was removed by another process, log a warning
-                    if address not in self.get_ipv4_addresses(net_interface):
-                        self._runner.log.warning(
-                            'Unable to remove address %s. The address was '
-                            'removed by another process.' % address)
-                        continue
-                    # If it was not removed, raise an error
-                    self._runner.log.error(
-                        'Unable to remove address %s. The address is still '
-                        'registered to %s, despite call for removal.' %
-                        (address, net_interface))
-                raise job.Error(result)
diff --git a/src/antlion/controllers/utils_lib/commands/route.py b/src/antlion/controllers/utils_lib/commands/route.py
deleted file mode 100644
index 3897f39..0000000
--- a/src/antlion/controllers/utils_lib/commands/route.py
+++ /dev/null
@@ -1,195 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import re
-
-from antlion.controllers.utils_lib.ssh import connection
-
-
-class Error(Exception):
-    """Exception thrown when a valid ip command experiences errors."""
-
-
-class NetworkInterfaceDown(Error):
-    """Exception thrown when a network interface is down."""
-
-
-class LinuxRouteCommand(object):
-    """Interface for doing standard ip route commands on a linux system."""
-
-    DEFAULT_ROUTE = 'default'
-
-    def __init__(self, runner):
-        """
-        Args:
-            runner: Object that can take unix commands and run them in an
-                    environment.
-        """
-        self._runner = runner
-
-    def add_route(self, net_interface, address, proto='static'):
-        """Add an entry to the ip routing table.
-
-        Will add a route for either a specific ip address, or a network.
-
-        Args:
-            net_interface: string, Any packet that sends through this route
-                           will be sent using this network interface
-                           (eg. wlan0).
-            address: ipaddress.IPv4Address, ipaddress.IPv4Network,
-                     or DEFAULT_ROUTE. The address to use. If a network
-                     is given then the entire subnet will be routed.
-                     If DEFAULT_ROUTE is given then this will set the
-                     default route.
-            proto: string, Routing protocol identifier of this route
-                   (e.g. kernel, redirect, boot, static, ra).
-                   See `man ip-route(8)` for details.
-
-        Raises:
-            NetworkInterfaceDown: Raised when the network interface is down.
-        """
-        try:
-            self._runner.run(f'ip route add {address} dev {net_interface} proto {proto}')
-        except connection.CommandError as e:
-            if 'File exists' in e.result.stderr:
-                raise Error('Route already exists.')
-            if 'Network is down' in e.result.stderr:
-                raise NetworkInterfaceDown(
-                    'Device must be up for adding a route.')
-            raise
-
-    def get_routes(self, net_interface=None):
-        """Get the routes in the ip routing table.
-
-        Args:
-            net_interface: string, If given, only retrieve routes that have
-                           been registered to go through this network
-                           interface (eg. wlan0).
-
-        Returns: An iterator that returns a tuple of (address, net_interface).
-                 If it is the default route then address
-                 will be the DEFAULT_ROUTE. If the route is a subnet then
-                 it will be a ipaddress.IPv4Network otherwise it is a
-                 ipaddress.IPv4Address.
-        """
-        result_ipv4 = self._runner.run('ip -4 route show')
-        result_ipv6 = self._runner.run('ip -6 route show')
-
-        lines = result_ipv4.stdout.splitlines() + result_ipv6.stdout.splitlines()
-
-        # Scan through each line for valid route entries
-        # Example output:
-        # default via 192.168.1.254 dev eth0  proto static
-        # 192.168.1.0/24 dev eth0  proto kernel  scope link  src 172.22.100.19  metric 1
-        # 192.168.2.1 dev eth2 proto kernel scope link metric 1
-        # fe80::/64 dev wlan0 proto static metric 1024
-        for line in lines:
-            if not 'dev' in line:
-                continue
-
-            if line.startswith(self.DEFAULT_ROUTE):
-                # The default route entry is formatted differently.
-                match = re.search('dev (?P<net_interface>\S+)', line)
-                pair = None
-                if match:
-                    # When there is a match for the route entry pattern create
-                    # A pair to hold the info.
-                    pair = (self.DEFAULT_ROUTE,
-                            match.groupdict()['net_interface'])
-            else:
-                # Test the normal route entry pattern.
-                match = re.search(
-                    '(?P<address>[0-9A-Fa-f\.\:/]+) dev (?P<net_interface>\S+)', line)
-                pair = None
-                if match:
-                    # When there is a match for the route entry pattern create
-                    # A pair to hold the info.
-                    d = match.groupdict()
-                    # Route can be either a network or specific address
-                    try:
-                        address = ipaddress.ip_address(d['address'])
-                    except ValueError:
-                        address = d['address']
-
-                    pair = (address, d['net_interface'])
-
-            # No pair means no pattern was found.
-            if not pair:
-                continue
-
-            if net_interface:
-                # If a net_interface was passed in then only give the pair when it is
-                # The correct net_interface.
-                if pair[1] == net_interface:
-                    yield pair
-            else:
-                # No net_interface given give all valid route entries.
-                yield pair
-
-    def is_route(self, address, net_interface=None):
-        """Checks to see if a route exists.
-
-        Args:
-            address: ipaddress.IPv4Address, ipaddress.IPv4Network,
-                     or DEFAULT_ROUTE, The address to use.
-            net_interface: string, If specified, the route must be
-                           registered to go through this network interface
-                           (eg. wlan0).
-
-        Returns: True if the route is found, False otherwise.
-        """
-        for route, _ in self.get_routes(net_interface):
-            if route == address:
-                return True
-
-        return False
-
-    def remove_route(self, address, net_interface=None):
-        """Removes a route from the ip routing table.
-
-        Removes a route from the ip routing table. If the route does not exist
-        nothing is done.
-
-        Args:
-            address: ipaddress.IPv4Address, ipaddress.IPv4Network,
-                     or DEFAULT_ROUTE, The address of the route to remove.
-            net_interface: string, If specified the route being removed is
-                           registered to go through this network interface
-                           (eg. wlan0)
-        """
-        try:
-            if net_interface:
-                self._runner.run('ip route del %s dev %s' %
-                                 (address, net_interface))
-            else:
-                self._runner.run('ip route del %s' % address)
-        except connection.CommandError as e:
-            if 'No such process' in e.result.stderr:
-                # The route didn't exist.
-                return
-            raise
-
-    def clear_routes(self, net_interface=None):
-        """Clears all routes.
-
-        Args:
-            net_interface: The network interface to clear routes on.
-            If not given then all routes will be removed on all network
-            interfaces (eg. wlan0).
-        """
-        routes = self.get_routes(net_interface)
-
-        for a, d in routes:
-            self.remove_route(a, d)
diff --git a/src/antlion/controllers/utils_lib/commands/shell.py b/src/antlion/controllers/utils_lib/commands/shell.py
deleted file mode 100644
index 34abeeb..0000000
--- a/src/antlion/controllers/utils_lib/commands/shell.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import shlex
-import signal
-import time
-
-from antlion.libs.proc import job
-
-
-class ShellCommand(object):
-    """Wraps basic commands that tend to be tied very closely to a shell.
-
-    This class is a wrapper for running basic shell commands through
-    any object that has a run command. Basic shell functionality for managing
-    the system, programs, and files in wrapped within this class.
-
-    Note: At the moment this only works with the ssh runner.
-    """
-
-    def __init__(self, runner, working_dir=None):
-        """Creates a new shell command invoker.
-
-        Args:
-            runner: The object that will run the shell commands.
-            working_dir: The directory that all commands should work in,
-                         if none then the runners enviroment default is used.
-        """
-        self._runner = runner
-        self._working_dir = working_dir
-
-    def run(self, command, timeout=60):
-        """Runs a generic command through the runner.
-
-        Takes the command and prepares it to be run in the target shell using
-        this objects settings.
-
-        Args:
-            command: The command to run.
-            timeout: How long to wait for the command (in seconds).
-
-        Returns:
-            A CmdResult object containing the results of the shell command.
-
-        Raises:
-            job.Error: When the command executed but had an error.
-        """
-        if self._working_dir:
-            command_str = 'cd %s; %s' % (self._working_dir, command)
-        else:
-            command_str = command
-
-        return self._runner.run(command_str, timeout=timeout)
-
-    def is_alive(self, identifier):
-        """Checks to see if a program is alive.
-
-        Checks to see if a program is alive on the shells enviroment. This can
-        be used to check on generic programs, or a specific program using
-        a pid.
-
-        Args:
-            identifier: string or int, Used to identify the program to check.
-                        if given an int then it is assumed to be a pid. If
-                        given a string then it will be used as a search key
-                        to compare on the running processes.
-        Returns:
-            True if a process was found running, false otherwise.
-        """
-        try:
-            if isinstance(identifier, str):
-                self.run('ps aux | grep -v grep | grep %s' % identifier)
-            elif isinstance(identifier, int):
-                self.signal(identifier, 0)
-            else:
-                raise ValueError('Bad type was given for identifier')
-
-            return True
-        except job.Error:
-            return False
-
-    def get_pids(self, identifier):
-        """Gets the pids of a program.
-
-        Searches for a program with a specific name and grabs the pids for all
-        programs that match.
-
-        Args:
-            identifier: A search term that identifies the program.
-
-        Returns: An array of all pids that matched the identifier, or None
-                  if no pids were found.
-        """
-        try:
-            result = self.run('ps aux | grep -v grep | grep %s' % identifier)
-        except job.Error as e:
-            if e.result.exit_status == 1:
-                # Grep returns exit status 1 when no lines are selected. This is
-                # an expected return code.
-                return
-            raise e
-
-        lines = result.stdout.splitlines()
-
-        # The expected output of the above command is like so:
-        # bob    14349  0.0  0.0  34788  5552 pts/2    Ss   Oct10   0:03 bash
-        # bob    52967  0.0  0.0  34972  5152 pts/4    Ss   Oct10   0:00 bash
-        # Where the format is:
-        # USER    PID  ...
-        for line in lines:
-            pieces = line.split()
-            try:
-                yield int(pieces[1])
-            except StopIteration:
-                return
-
-    def search_file(self, search_string, file_name):
-        """Searches through a file for a string.
-
-        Args:
-            search_string: The string or pattern to look for.
-            file_name: The name of the file to search.
-
-        Returns:
-            True if the string or pattern was found, False otherwise.
-        """
-        try:
-            self.run('grep %s %s' % (shlex.quote(search_string), file_name))
-            return True
-        except job.Error:
-            return False
-
-    def read_file(self, file_name):
-        """Reads a file through the shell.
-
-        Args:
-            file_name: The name of the file to read.
-
-        Returns:
-            A string of the files contents.
-        """
-        return self.run('cat %s' % file_name).stdout
-
-    def write_file(self, file_name, data):
-        """Writes a block of data to a file through the shell.
-
-        Args:
-            file_name: The name of the file to write to.
-            data: The string of data to write.
-        """
-        return self.run('echo %s > %s' % (shlex.quote(data), file_name))
-
-    def append_file(self, file_name, data):
-        """Appends a block of data to a file through the shell.
-
-        Args:
-            file_name: The name of the file to write to.
-            data: The string of data to write.
-        """
-        return self.run('echo %s >> %s' % (shlex.quote(data), file_name))
-
-    def touch_file(self, file_name):
-        """Creates a file through the shell.
-
-        Args:
-            file_name: The name of the file to create.
-        """
-        self.write_file(file_name, '')
-
-    def delete_file(self, file_name):
-        """Deletes a file through the shell.
-
-        Args:
-            file_name: The name of the file to delete.
-        """
-        try:
-            self.run('rm -r %s' % file_name)
-        except job.Error as e:
-            if 'No such file or directory' in e.result.stderr:
-                return
-
-            raise
-
-    def kill(self, identifier, timeout=10):
-        """Kills a program or group of programs through the shell.
-
-        Kills all programs that match an identifier through the shell. This
-        will send an increasing queue of kill signals to all programs
-        that match the identifier until either all are dead or the timeout
-        finishes.
-
-        Programs are guaranteed to be killed after running this command.
-
-        Args:
-            identifier: A string used to identify the program.
-            timeout: The time to wait for all programs to die. Each signal will
-                     take an equal portion of this time.
-        """
-        if isinstance(identifier, int):
-            pids = [identifier]
-        else:
-            pids = list(self.get_pids(identifier))
-
-        signal_queue = [signal.SIGINT, signal.SIGTERM, signal.SIGKILL]
-
-        signal_duration = timeout / len(signal_queue)
-        for sig in signal_queue:
-            for pid in pids:
-                try:
-                    self.signal(pid, sig)
-                except job.Error:
-                    pass
-
-            start_time = time.time()
-            while pids and time.time() - start_time < signal_duration:
-                time.sleep(0.1)
-                pids = [pid for pid in pids if self.is_alive(pid)]
-
-            if not pids:
-                break
-
-    def signal(self, pid, sig):
-        """Sends a specific signal to a program.
-
-        Args:
-            pid: The process id of the program to kill.
-            sig: The signal to send.
-
-        Raises:
-            job.Error: Raised when the signal fail to reach
-                       the specified program.
-        """
-        self.run('kill -%d %d' % (sig, pid))
diff --git a/src/antlion/controllers/utils_lib/host_utils.py b/src/antlion/controllers/utils_lib/host_utils.py
deleted file mode 100644
index db7be27..0000000
--- a/src/antlion/controllers/utils_lib/host_utils.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import socket
-
-
-def get_available_host_port():
-    """Finds a semi-random available port.
-
-    A race condition is still possible after the port number is returned, if
-    another process happens to bind it.
-
-    Returns:
-        A port number that is unused on both TCP and UDP.
-    """
-    # On the 2.6 kernel, calling _try_bind() on UDP socket returns the
-    # same port over and over. So always try TCP first.
-    while True:
-        # Ask the OS for an unused port.
-        port = _try_bind(0, socket.SOCK_STREAM, socket.IPPROTO_TCP)
-        # Check if this port is unused on the other protocol.
-        if port and _try_bind(port, socket.SOCK_DGRAM, socket.IPPROTO_UDP):
-            return port
-
-
-def is_port_available(port):
-    """Checks if a given port number is available on the system.
-
-    Args:
-        port: An integer which is the port number to check.
-
-    Returns:
-        True if the port is available; False otherwise.
-    """
-    return (_try_bind(port, socket.SOCK_STREAM, socket.IPPROTO_TCP) and
-            _try_bind(port, socket.SOCK_DGRAM, socket.IPPROTO_UDP))
-
-
-def _try_bind(port, socket_type, socket_proto):
-    s = socket.socket(socket.AF_INET, socket_type, socket_proto)
-    try:
-        try:
-            s.bind(('', port))
-            # The result of getsockname() is protocol dependent, but for both
-            # IPv4 and IPv6 the second field is a port number.
-            return s.getsockname()[1]
-        except socket.error:
-            return None
-    finally:
-        s.close()
diff --git a/src/antlion/controllers/utils_lib/ssh/connection.py b/src/antlion/controllers/utils_lib/ssh/connection.py
deleted file mode 100644
index 799905e..0000000
--- a/src/antlion/controllers/utils_lib/ssh/connection.py
+++ /dev/null
@@ -1,439 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import os
-import re
-import shutil
-import tempfile
-import threading
-import time
-import uuid
-
-from antlion import logger
-from antlion.controllers.utils_lib import host_utils
-from antlion.controllers.utils_lib.ssh import formatter
-from antlion.libs.proc import job
-
-
-class Error(Exception):
-    """An error occurred during an ssh operation."""
-
-
-class CommandError(Exception):
-    """An error occurred with the command.
-
-    Attributes:
-        result: The results of the ssh command that had the error.
-    """
-    def __init__(self, result):
-        """
-        Args:
-            result: The result of the ssh command that created the problem.
-        """
-        self.result = result
-
-    def __str__(self):
-        return 'cmd: %s\nstdout: %s\nstderr: %s' % (
-            self.result.command, self.result.stdout, self.result.stderr)
-
-
-_Tunnel = collections.namedtuple('_Tunnel',
-                                 ['local_port', 'remote_port', 'proc'])
-
-
-class SshConnection(object):
-    """Provides a connection to a remote machine through ssh.
-
-    Provides the ability to connect to a remote machine and execute a command
-    on it. The connection will try to establish a persistent connection When
-    a command is run. If the persistent connection fails it will attempt
-    to connect normally.
-    """
-    @property
-    def socket_path(self):
-        """Returns: The os path to the master socket file."""
-        return os.path.join(self._master_ssh_tempdir, 'socket')
-
-    def __init__(self, settings):
-        """
-        Args:
-            settings: The ssh settings to use for this connection.
-            formatter: The object that will handle formatting ssh command
-                       for use with the background job.
-        """
-        self._settings = settings
-        self._formatter = formatter.SshFormatter()
-        self._lock = threading.Lock()
-        self._master_ssh_proc = None
-        self._master_ssh_tempdir = None
-        self._tunnels = list()
-
-        def log_line(msg):
-            return '[SshConnection | %s] %s' % (self._settings.hostname, msg)
-
-        self.log = logger.create_logger(log_line)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, _, __, ___):
-        self.close()
-
-    def __del__(self):
-        self.close()
-
-    def setup_master_ssh(self, timeout_seconds=5):
-        """Sets up the master ssh connection.
-
-        Sets up the initial master ssh connection if it has not already been
-        started.
-
-        Args:
-            timeout_seconds: The time to wait for the master ssh connection to
-            be made.
-
-        Raises:
-            Error: When setting up the master ssh connection fails.
-        """
-        with self._lock:
-            if self._master_ssh_proc is not None:
-                socket_path = self.socket_path
-                if (not os.path.exists(socket_path)
-                        or self._master_ssh_proc.poll() is not None):
-                    self.log.debug('Master ssh connection to %s is down.',
-                                   self._settings.hostname)
-                    self._cleanup_master_ssh()
-
-            if self._master_ssh_proc is None:
-                # Create a shared socket in a temp location.
-                self._master_ssh_tempdir = tempfile.mkdtemp(
-                    prefix='ssh-master')
-
-                # Setup flags and options for running the master ssh
-                # -N: Do not execute a remote command.
-                # ControlMaster: Spawn a master connection.
-                # ControlPath: The master connection socket path.
-                extra_flags = {'-N': None}
-                extra_options = {
-                    'ControlMaster': True,
-                    'ControlPath': self.socket_path,
-                    'BatchMode': True
-                }
-
-                # Construct the command and start it.
-                master_cmd = self._formatter.format_ssh_local_command(
-                    self._settings,
-                    extra_flags=extra_flags,
-                    extra_options=extra_options)
-                self.log.info('Starting master ssh connection.')
-                self._master_ssh_proc = job.run_async(master_cmd)
-
-                end_time = time.time() + timeout_seconds
-
-                while time.time() < end_time:
-                    if os.path.exists(self.socket_path):
-                        break
-                    time.sleep(.2)
-                else:
-                    self._cleanup_master_ssh()
-                    raise Error('Master ssh connection timed out.')
-
-    def run(self,
-            command,
-            timeout=60,
-            ignore_status=False,
-            env=None,
-            io_encoding='utf-8',
-            attempts=2):
-        """Runs a remote command over ssh.
-
-        Will ssh to a remote host and run a command. This method will
-        block until the remote command is finished.
-
-        Args:
-            command: The command to execute over ssh. Can be either a string
-                     or a list.
-            timeout: number seconds to wait for command to finish.
-            ignore_status: bool True to ignore the exit code of the remote
-                           subprocess.  Note that if you do ignore status codes,
-                           you should handle non-zero exit codes explicitly.
-            env: dict environment variables to setup on the remote host.
-            io_encoding: str unicode encoding of command output.
-            attempts: Number of attempts before giving up on command failures.
-
-        Returns:
-            A job.Result containing the results of the ssh command.
-
-        Raises:
-            job.TimeoutError: When the remote command took to long to execute.
-            Error: When the ssh connection failed to be created.
-            CommandError: Ssh worked, but the command had an error executing.
-        """
-        if attempts == 0:
-            return None
-        if env is None:
-            env = {}
-
-        try:
-            self.setup_master_ssh(self._settings.connect_timeout)
-        except Error:
-            self.log.warning('Failed to create master ssh connection, using '
-                             'normal ssh connection.')
-
-        extra_options = {'BatchMode': True}
-        if self._master_ssh_proc:
-            extra_options['ControlPath'] = self.socket_path
-
-        identifier = str(uuid.uuid4())
-        full_command = 'echo "CONNECTED: %s"; %s' % (identifier, command)
-
-        terminal_command = self._formatter.format_command(
-            full_command, env, self._settings, extra_options=extra_options)
-
-        dns_retry_count = 2
-        while True:
-            result = job.run(terminal_command,
-                             ignore_status=True,
-                             timeout=timeout,
-                             io_encoding=io_encoding)
-            output = result.stdout
-
-            # Check for a connected message to prevent false negatives.
-            valid_connection = re.search('^CONNECTED: %s' % identifier,
-                                         output,
-                                         flags=re.MULTILINE)
-            if valid_connection:
-                # Remove the first line that contains the connect message.
-                line_index = output.find('\n') + 1
-                if line_index == 0:
-                    line_index = len(output)
-                real_output = output[line_index:].encode(io_encoding)
-
-                result = job.Result(command=result.command,
-                                    stdout=real_output,
-                                    stderr=result._raw_stderr,
-                                    exit_status=result.exit_status,
-                                    duration=result.duration,
-                                    did_timeout=result.did_timeout,
-                                    encoding=io_encoding)
-                if result.exit_status and not ignore_status:
-                    raise job.Error(result)
-                return result
-
-            error_string = result.stderr
-
-            had_dns_failure = (result.exit_status == 255 and re.search(
-                r'^ssh: .*: Name or service not known',
-                error_string,
-                flags=re.MULTILINE))
-            if had_dns_failure:
-                dns_retry_count -= 1
-                if not dns_retry_count:
-                    raise Error('DNS failed to find host.', result)
-                self.log.debug('Failed to connect to host, retrying...')
-            else:
-                break
-
-        had_timeout = re.search(
-            r'^ssh: connect to host .* port .*: '
-            r'Connection timed out\r$',
-            error_string,
-            flags=re.MULTILINE)
-        if had_timeout:
-            raise Error('Ssh timed out.', result)
-
-        permission_denied = 'Permission denied' in error_string
-        if permission_denied:
-            raise Error('Permission denied.', result)
-
-        unknown_host = re.search(
-            r'ssh: Could not resolve hostname .*: '
-            r'Name or service not known',
-            error_string,
-            flags=re.MULTILINE)
-        if unknown_host:
-            raise Error('Unknown host.', result)
-
-        self.log.error('An unknown error has occurred. Job result: %s' %
-                       result)
-        ping_output = job.run('ping %s -c 3 -w 1' % self._settings.hostname,
-                              ignore_status=True)
-        self.log.error('Ping result: %s' % ping_output)
-        if attempts > 1:
-            self._cleanup_master_ssh()
-            self.run(command, timeout, ignore_status, env, io_encoding,
-                     attempts - 1)
-        raise Error('The job failed for unknown reasons.', result)
-
-    def run_async(self, command, env=None):
-        """Starts up a background command over ssh.
-
-        Will ssh to a remote host and startup a command. This method will
-        block until there is confirmation that the remote command has started.
-
-        Args:
-            command: The command to execute over ssh. Can be either a string
-                     or a list.
-            env: A dictionary of environment variables to setup on the remote
-                 host.
-
-        Returns:
-            The result of the command to launch the background job.
-
-        Raises:
-            CmdTimeoutError: When the remote command took to long to execute.
-            SshTimeoutError: When the connection took to long to established.
-            SshPermissionDeniedError: When permission is not allowed on the
-                                      remote host.
-        """
-        return self.run(f'({command}) < /dev/null > /dev/null 2>&1 & echo -n $!', env=env)
-
-    def close(self):
-        """Clean up open connections to remote host."""
-        self._cleanup_master_ssh()
-        while self._tunnels:
-            self.close_ssh_tunnel(self._tunnels[0].local_port)
-
-    def _cleanup_master_ssh(self):
-        """
-        Release all resources (process, temporary directory) used by an active
-        master SSH connection.
-        """
-        # If a master SSH connection is running, kill it.
-        if self._master_ssh_proc is not None:
-            self.log.debug('Nuking master_ssh_job.')
-            self._master_ssh_proc.kill()
-            self._master_ssh_proc.wait()
-            self._master_ssh_proc = None
-
-        # Remove the temporary directory for the master SSH socket.
-        if self._master_ssh_tempdir is not None:
-            self.log.debug('Cleaning master_ssh_tempdir.')
-            shutil.rmtree(self._master_ssh_tempdir)
-            self._master_ssh_tempdir = None
-
-    def create_ssh_tunnel(self, port, local_port=None):
-        """Create an ssh tunnel from local_port to port.
-
-        This securely forwards traffic from local_port on this machine to the
-        remote SSH host at port.
-
-        Args:
-            port: remote port on the host.
-            local_port: local forwarding port, or None to pick an available
-                        port.
-
-        Returns:
-            the created tunnel process.
-        """
-        if not local_port:
-            local_port = host_utils.get_available_host_port()
-        else:
-            for tunnel in self._tunnels:
-                if tunnel.remote_port == port:
-                    return tunnel.local_port
-
-        extra_flags = {
-            '-n': None,  # Read from /dev/null for stdin
-            '-N': None,  # Do not execute a remote command
-            '-q': None,  # Suppress warnings and diagnostic commands
-            '-L': '%d:localhost:%d' % (local_port, port),
-        }
-        extra_options = dict()
-        if self._master_ssh_proc:
-            extra_options['ControlPath'] = self.socket_path
-        tunnel_cmd = self._formatter.format_ssh_local_command(
-            self._settings,
-            extra_flags=extra_flags,
-            extra_options=extra_options)
-        self.log.debug('Full tunnel command: %s', tunnel_cmd)
-        # Exec the ssh process directly so that when we deliver signals, we
-        # deliver them straight to the child process.
-        tunnel_proc = job.run_async(tunnel_cmd)
-        self.log.debug('Started ssh tunnel, local = %d remote = %d, pid = %d',
-                       local_port, port, tunnel_proc.pid)
-        self._tunnels.append(_Tunnel(local_port, port, tunnel_proc))
-        return local_port
-
-    def close_ssh_tunnel(self, local_port):
-        """Close a previously created ssh tunnel of a TCP port.
-
-        Args:
-            local_port: int port on localhost previously forwarded to the remote
-                        host.
-
-        Returns:
-            integer port number this port was forwarded to on the remote host or
-            None if no tunnel was found.
-        """
-        idx = None
-        for i, tunnel in enumerate(self._tunnels):
-            if tunnel.local_port == local_port:
-                idx = i
-                break
-        if idx is not None:
-            tunnel = self._tunnels.pop(idx)
-            tunnel.proc.kill()
-            tunnel.proc.wait()
-            return tunnel.remote_port
-        return None
-
-    def send_file(self, local_path, remote_path, ignore_status=False):
-        """Send a file from the local host to the remote host.
-
-        Args:
-            local_path: string path of file to send on local host.
-            remote_path: string path to copy file to on remote host.
-            ignore_status: Whether or not to ignore the command's exit_status.
-        """
-        # TODO: This may belong somewhere else: b/32572515
-        user_host = self._formatter.format_host_name(self._settings)
-        job.run('scp %s %s:%s' % (local_path, user_host, remote_path),
-                ignore_status=ignore_status)
-
-    def pull_file(self, local_path, remote_path, ignore_status=False):
-        """Send a file from remote host to local host
-
-        Args:
-            local_path: string path of file to recv on local host
-            remote_path: string path to copy file from on remote host.
-            ignore_status: Whether or not to ignore the command's exit_status.
-        """
-        user_host = self._formatter.format_host_name(self._settings)
-        job.run('scp %s:%s %s' % (user_host, remote_path, local_path),
-                ignore_status=ignore_status)
-
-    def find_free_port(self, interface_name='localhost'):
-        """Find a unused port on the remote host.
-
-        Note that this method is inherently racy, since it is impossible
-        to promise that the remote port will remain free.
-
-        Args:
-            interface_name: string name of interface to check whether a
-                            port is used against.
-
-        Returns:
-            integer port number on remote interface that was free.
-        """
-        # TODO: This may belong somewhere else: b/3257251
-        free_port_cmd = (
-            'python -c "import socket; s=socket.socket(); '
-            's.bind((\'%s\', 0)); print(s.getsockname()[1]); s.close()"'
-        ) % interface_name
-        port = int(self.run(free_port_cmd).stdout)
-        # Yield to the os to ensure the port gets cleaned up.
-        time.sleep(0.001)
-        return port
diff --git a/src/antlion/controllers/utils_lib/ssh/formatter.py b/src/antlion/controllers/utils_lib/ssh/formatter.py
deleted file mode 100644
index 2466012..0000000
--- a/src/antlion/controllers/utils_lib/ssh/formatter.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class SshFormatter(object):
-    """Handles formatting ssh commands.
-
-    Handler for formatting chunks of the ssh command to run.
-    """
-
-    def format_ssh_executable(self, settings):
-        """Format the executable name.
-
-        Formats the executable name as a string.
-
-        Args:
-            settings: The ssh settings being used.
-
-        Returns:
-            A string for the ssh executable name.
-        """
-        return settings.executable
-
-    def format_host_name(self, settings):
-        """Format hostname.
-
-        Formats the hostname to connect to.
-
-        Args:
-            settings: The ssh settings being used.
-
-        Returns:
-            A string of the connection host name to connect to.
-        """
-        return '%s@%s' % (settings.username, settings.hostname)
-
-    def format_value(self, value):
-        """Formats a command line value.
-
-        Takes in a value and formats it so it can be safely used in the
-        command line.
-
-        Args:
-            value: The value to format.
-
-        Returns:
-            A string representation of the formatted value.
-        """
-        if isinstance(value, bool):
-            return 'yes' if value else 'no'
-
-        return str(value)
-
-    def format_options_list(self, options):
-        """Format the option list.
-
-        Formats a dictionary of options into a list of strings to be used
-        on the command line.
-
-        Args:
-            options: A dictionary of options.
-
-        Returns:
-            An iterator of strings that should go on the command line.
-        """
-        for option_name in options:
-            option = options[option_name]
-
-            yield '-o'
-            yield '%s=%s' % (option_name, self.format_value(option))
-
-    def format_flag_list(self, flags):
-        """Format the flags list.
-
-        Formats a dictionary of flags into a list of strings to be used
-        on the command line.
-
-        Args:
-            flags: A dictonary of options.
-
-        Returns:
-            An iterator of strings that should be used on the command line.
-        """
-        for flag_name in flags:
-            flag = flags[flag_name]
-
-            yield flag_name
-            if flag is not None:
-                yield self.format_value(flag)
-
-    def format_ssh_local_command(self,
-                                 settings,
-                                 extra_flags={},
-                                 extra_options={}):
-        """Formats the local part of the ssh command.
-
-        Formats the local section of the ssh command. This is the part of the
-        command that will actual launch ssh on our local machine with the
-        specified settings.
-
-        Args:
-            settings: The ssh settings.
-            extra_flags: Extra flags to inlcude.
-            extra_options: Extra options to include.
-
-        Returns:
-            An array of strings that make up the command and its local
-            arguments.
-        """
-        options = settings.construct_ssh_options()
-        for extra_option_name in extra_options:
-            options[extra_option_name] = extra_options[extra_option_name]
-        options_list = list(self.format_options_list(options))
-
-        flags = settings.construct_ssh_flags()
-        for extra_flag_name in extra_flags:
-            flags[extra_flag_name] = extra_flags[extra_flag_name]
-        flags_list = list(self.format_flag_list(flags))
-
-        all_options = options_list + flags_list
-        host_name = self.format_host_name(settings)
-        executable = self.format_ssh_executable(settings)
-
-        base_command = [executable] + all_options + [host_name]
-
-        return base_command
-
-    def format_ssh_command(self,
-                           remote_command,
-                           settings,
-                           extra_flags={},
-                           extra_options={}):
-        """Formats the full ssh command.
-
-        Creates the full format for an ssh command.
-
-        Args:
-            remote_command: A string that represents the remote command to
-                            execute.
-            settings: The ssh settings to use.
-            extra_flags: Extra flags to include in the settings.
-            extra_options: Extra options to include in the settings.
-
-        Returns:
-            A list of strings that make up the total ssh command.
-        """
-        local_command = self.format_ssh_local_command(settings, extra_flags,
-                                                      extra_options)
-
-        local_command.append(remote_command)
-        return local_command
-
-    def format_remote_command(self, command, env):
-        """Formats the remote part of the ssh command.
-
-        Formatts the command that will run on the remote machine.
-
-        Args:
-            command: string, The command to be executed.
-            env: Enviroment variables to add to the remote envirment.
-
-        Returns:
-            A string that represents the command line to execute on the remote
-            machine.
-        """
-        if not env:
-            env_str = ''
-        else:
-            env_str = 'export '
-            for name in env:
-                value = env[name]
-                env_str += '%s=%s ' % (name, str(value))
-            env_str += ';'
-
-        execution_line = '%s %s;' % (env_str, command)
-        return execution_line
-
-    def format_command(self,
-                       command,
-                       env,
-                       settings,
-                       extra_flags={},
-                       extra_options={}):
-        """Formats a full command.
-
-        Formats the full command to run in order to run a command on a remote
-        machine.
-
-        Args:
-            command: The command to run on the remote machine. Can either be
-                     a string or a list.
-            env: The enviroment variables to include on the remote machine.
-            settings: The ssh settings to use.
-            extra_flags: Extra flags to include with the settings.
-            extra_options: Extra options to include with the settings.
-        """
-        remote_command = self.format_remote_command(command, env)
-        return self.format_ssh_command(remote_command, settings, extra_flags,
-                                       extra_options)
diff --git a/src/antlion/controllers/utils_lib/ssh/settings.py b/src/antlion/controllers/utils_lib/ssh/settings.py
deleted file mode 100644
index ca14b91..0000000
--- a/src/antlion/controllers/utils_lib/ssh/settings.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Create a SshSettings from a dictionary from an ACTS config
-
-Args:
-    config dict instance from an ACTS config
-
-Returns:
-    An instance of SshSettings or None
-"""
-
-
-def from_config(config):
-    if config is None:
-        return None  # Having no settings is not an error
-
-    user = config.get('user', None)
-    host = config.get('host', None)
-    port = config.get('port', 22)
-    identity_file = config.get('identity_file', None)
-    ssh_config = config.get('ssh_config', None)
-    connect_timeout = config.get('connect_timeout', 30)
-    if user is None or host is None:
-        raise ValueError('Malformed SSH config did not include user and '
-                         'host keys: %s' % config)
-
-    return SshSettings(host, user, port=port, identity_file=identity_file,
-                       ssh_config=ssh_config, connect_timeout=connect_timeout)
-
-
-class SshSettings(object):
-    """Contains settings for ssh.
-
-    Container for ssh connection settings.
-
-    Attributes:
-        username: The name of the user to log in as.
-        hostname: The name of the host to connect to.
-        executable: The ssh executable to use.
-        port: The port to connect through (usually 22).
-        host_file: The known host file to use.
-        connect_timeout: How long to wait on a connection before giving a
-                         timeout.
-        alive_interval: How long between ssh heartbeat signals to keep the
-                        connection alive.
-    """
-
-    def __init__(self,
-                 hostname,
-                 username,
-                 port=22,
-                 host_file='/dev/null',
-                 connect_timeout=30,
-                 alive_interval=300,
-                 executable='/usr/bin/ssh',
-                 identity_file=None,
-                 ssh_config=None):
-        self.username = username
-        self.hostname = hostname
-        self.executable = executable
-        self.port = port
-        self.host_file = host_file
-        self.connect_timeout = connect_timeout
-        self.alive_interval = alive_interval
-        self.identity_file = identity_file
-        self.ssh_config = ssh_config
-
-    def construct_ssh_options(self):
-        """Construct the ssh options.
-
-        Constructs a dictionary of option that should be used with the ssh
-        command.
-
-        Returns:
-            A dictionary of option name to value.
-        """
-        current_options = {}
-        current_options['StrictHostKeyChecking'] = False
-        current_options['UserKnownHostsFile'] = self.host_file
-        current_options['ConnectTimeout'] = self.connect_timeout
-        current_options['ServerAliveInterval'] = self.alive_interval
-        return current_options
-
-    def construct_ssh_flags(self):
-        """Construct the ssh flags.
-
-        Constructs what flags should be used in the ssh connection.
-
-        Returns:
-            A dictonary of flag name to value. If value is none then it is
-            treated as a binary flag.
-        """
-        current_flags = {}
-        current_flags['-a'] = None
-        current_flags['-x'] = None
-        current_flags['-p'] = self.port
-        if self.identity_file:
-            current_flags['-i'] = self.identity_file
-        if self.ssh_config:
-            current_flags['-F'] = self.ssh_config
-        return current_flags
diff --git a/src/antlion/controllers/uxm_lib/OWNERS b/src/antlion/controllers/uxm_lib/OWNERS
deleted file mode 100644
index 0c40622..0000000
--- a/src/antlion/controllers/uxm_lib/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-jethier@google.com
-hmtuan@google.com
-harjani@google.com
\ No newline at end of file
diff --git a/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py b/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py
deleted file mode 100644
index 713d3cf..0000000
--- a/src/antlion/controllers/uxm_lib/uxm_cellular_simulator.py
+++ /dev/null
@@ -1,707 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import os
-import socket
-import time
-import paramiko
-import re
-
-from antlion.controllers.cellular_simulator import AbstractCellularSimulator
-
-
-class UXMCellularSimulator(AbstractCellularSimulator):
-    """A cellular simulator for UXM callbox."""
-
-    # Keys to obtain data from cell_info dictionary.
-    KEY_CELL_NUMBER = "cell_number"
-    KEY_CELL_TYPE = "cell_type"
-
-    # UXM socket port
-    UXM_PORT = 5125
-
-    # UXM SCPI COMMAND
-    SCPI_IMPORT_STATUS_QUERY_CMD = 'SYSTem:SCPI:IMPort:STATus?'
-    SCPI_SYSTEM_ERROR_CHECK_CMD = 'SYST:ERR?\n'
-    # require: path to SCPI file
-    SCPI_IMPORT_SCPI_FILE_CMD = 'SYSTem:SCPI:IMPort "{}"\n'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_CELL_ON_CMD = 'BSE:CONFig:{}:{}:ACTive 1'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_CELL_OFF_CMD = 'BSE:CONFig:{}:{}:ACTive 0'
-    # require: 1. cell type (E.g. NR5G), 2. cell number (E.g CELL1)
-    SCPI_GET_CELL_STATUS = 'BSE:STATus:{}:{}?'
-    SCPI_CHECK_CONNECTION_CMD = '*IDN?\n'
-
-    # UXM's Test Application recovery
-    TA_BOOT_TIME = 100
-
-    # shh command
-    SSH_START_GUI_APP_CMD_FORMAT = 'psexec -s -d -i 1 "{exe_path}"'
-    SSH_CHECK_APP_RUNNING_CMD_FORMAT = 'tasklist | findstr /R {regex_app_name}'
-
-    # start process success regex
-    PSEXEC_PROC_STARTED_REGEX_FORMAT = 'started on * with process ID {proc_id}'
-
-    def __init__(self, ip_address, custom_files, uxm_user,
-                 ssh_private_key_to_uxm, ta_exe_path, ta_exe_name):
-        """Initializes the cellular simulator.
-
-        Args:
-            ip_address: the ip address of host where Keysight Test Application (TA)
-                is installed.
-            custom_files: a list of file path for custom files.
-            uxm_user: username of host where Keysight TA resides.
-            ssh_private_key_to_uxm: private key for key based ssh to
-                host where Keysight TA resides.
-            ta_exe_path: path to TA exe.
-            ta_exe_name: name of TA exe.
-        """
-        super().__init__()
-        self.custom_files = custom_files
-        self.rockbottom_script = None
-        self.cells = []
-        self.uxm_ip = ip_address
-        self.uxm_user = uxm_user
-        self.ssh_private_key_to_uxm = ssh_private_key_to_uxm
-        self.ta_exe_path = ta_exe_path
-        self.ta_exe_name = ta_exe_name
-        self.ssh_client = self._create_ssh_client()
-
-        # get roclbottom file
-        for file in self.custom_files:
-            if 'rockbottom_' in file:
-                self.rockbottom_script = file
-
-        # connect to Keysight Test Application via socket
-        self.recovery_ta()
-        self.socket = self._socket_connect(self.uxm_ip, self.UXM_PORT)
-        self.check_socket_connection()
-        self.timeout = 120
-
-    def _create_ssh_client(self):
-        """Create a ssh client to host."""
-        ssh = paramiko.SSHClient()
-        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-        mykey = paramiko.Ed25519Key.from_private_key_file(
-            self.ssh_private_key_to_uxm)
-        ssh.connect(hostname=self.uxm_ip, username=self.uxm_user, pkey=mykey)
-        self.log.info('SSH client to %s is connected' % self.uxm_ip)
-        return ssh
-
-    def is_ta_running(self):
-        is_running_cmd = self.SSH_CHECK_APP_RUNNING_CMD_FORMAT.format(
-            regex_app_name=self.ta_exe_name)
-        stdin, stdout, stderr = self.ssh_client.exec_command(is_running_cmd)
-        stdin.close()
-        err = ''.join(stderr.readlines())
-        out = ''.join(stdout.readlines())
-        final_output = str(out) + str(err)
-        self.log.info(final_output)
-        return (out != '' and err == '')
-
-    def _start_test_app(self):
-        """Start Test Application on Windows."""
-        # start GUI exe via ssh
-        start_app_cmd = self.SSH_START_GUI_APP_CMD_FORMAT.format(
-            exe_path=self.ta_exe_path)
-        stdin, stdout, stderr = self.ssh_client.exec_command(start_app_cmd)
-        self.log.info(f'Command sent to {self.uxm_ip}: {start_app_cmd}')
-        stdin.close()
-        err = ''.join(stderr.readlines())
-        out = ''.join(stdout.readlines())
-        # psexec return process ID as part of the exit code
-        exit_status = stderr.channel.recv_exit_status()
-        is_started = re.search(
-            self.PSEXEC_PROC_STARTED_REGEX_FORMAT.format(proc_id=exit_status),
-            err[-1])
-        if is_started:
-            raise RuntimeError('Fail to start TA: ' + out + err)
-        # wait for ta completely boot up
-        self.log.info('TA is starting')
-        time.sleep(self.TA_BOOT_TIME)
-
-    def recovery_ta(self):
-        """Start TA if it is not running."""
-        if not self.is_ta_running():
-            self._start_test_app()
-            # checking if ta booting process complete
-            # by checking socket connection
-            s = None
-            retries = 12
-            for _ in range(retries):
-                try:
-                    s = self._socket_connect(self.uxm_ip, self.UXM_PORT)
-                    s.close()
-                    return
-                except ConnectionRefusedError as cre:
-                    self.log.info(
-                        'Connection refused, wait 10s for TA to boot')
-                    time.sleep(10)
-            raise RuntimeError('TA does not start on time')
-
-    def set_rockbottom_script_path(self, path):
-        """Set path to rockbottom script.
-
-        Args:
-            path: path to rockbottom script.
-        """
-        self.rockbottom_script = path
-
-    def set_cell_info(self, cell_info):
-        """Set type and number for multiple cells.
-
-        Args:
-            cell_info: list of dictionaries,
-                each dictionary contain cell type
-                and cell number for each cell
-                that the simulator need to control.
-        """
-        if not cell_info:
-            raise ValueError('Missing cell info from configurations file')
-        self.cells = cell_info
-
-    def turn_cell_on(self, cell_type, cell_number):
-        """Turn UXM's cell on.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if cell_type and cell_number:
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_ON_CMD.format(cell_type, cell_number))
-        else:
-            raise ValueError('Invalid cell info\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-    def turn_cell_off(self, cell_type, cell_number):
-        """Turn UXM's cell off.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if cell_type and cell_number:
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_OFF_CMD.format(cell_type, cell_number))
-        else:
-            raise ValueError('Invalid cell info\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-    def get_cell_status(self, cell_type, cell_number):
-        """Get status of cell.
-
-        Args:
-            cell_type: type of cell (e.g NR5G, LTE).
-            cell_number: ordinal number of a cell.
-        """
-        if not cell_type or not cell_number:
-            raise ValueError('Invalid cell with\n' +
-                             f' cell type: {cell_type}\n' +
-                             f' cell number: {cell_number}\n')
-
-        return self._socket_send_SCPI_for_result_command(
-            self.SCPI_GET_CELL_STATUS.format(cell_type, cell_number))
-
-    def check_socket_connection(self):
-        """Check if the socket connection is established.
-
-        Query the identification of the Keysight Test Application
-        we are trying to connect to. Empty response indicates
-        connection fail, and vice versa.
-        """
-        self.socket.sendall(self.SCPI_CHECK_CONNECTION_CMD.encode())
-        response = self.socket.recv(1024).decode()
-        if response:
-            self.log.info(f'Connected to: {response}')
-        else:
-            self.log.error('Fail to connect to callbox')
-
-    def _socket_connect(self, host, port):
-        """Create socket connection.
-
-        Args:
-            host: IP address of desktop where Keysight Test Application resides.
-            port: port that Keysight Test Application is listening for socket
-                communication.
-        Return:
-            s: socket object.
-        """
-        self.log.info('Establishing connection to callbox via socket')
-        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s.connect((host, port))
-        return s
-
-    def _socket_send_SCPI_command(self, command):
-        """Send SCPI command without expecting response.
-
-        Args:
-            command: a string SCPI command.
-        """
-        # make sure there is a line break for the socket to send command
-        command = command + '\n'
-        # send command
-        self.socket.sendall(command.encode())
-        self.log.info(f'Sent {command}')
-
-    def _socket_receive_SCPI_result(self):
-        """Receive response from socket. """
-        i = 1
-        response = ''
-        while i < self.timeout and not response:
-            response = self.socket.recv(1024).decode()
-            i += 1
-        return response
-
-    def _socket_send_SCPI_for_result_command(self, command):
-        """Send SCPI command and expecting response.
-
-        Args:
-            command: a string SCPI command.
-        """
-        self._socket_send_SCPI_command(command)
-        response = self._socket_receive_SCPI_result()
-        return response
-
-    def check_system_error(self):
-        """Query system error from Keysight Test Application.
-
-        Return:
-            status: a message indicate the number of errors
-                and detail of errors if any.
-                a string `0,"No error"` indicates no error.
-        """
-        status = self._socket_send_SCPI_for_result_command(
-            self.SCPI_SYSTEM_ERROR_CHECK_CMD)
-        self.log.info(f'System error status: {status}')
-        return status
-
-    def import_configuration(self, path):
-        """Import SCPI config file.
-
-        Args:
-            path: path to SCPI file.
-        """
-        self._socket_send_SCPI_command(
-            self.SCPI_IMPORT_SCPI_FILE_CMD.format(path))
-        time.sleep(45)
-
-    def destroy(self):
-        """Close socket connection with UXM. """
-        self.socket.close()
-
-    def setup_lte_scenario(self, path):
-        """Configures the equipment for an LTE simulation.
-
-        Args:
-            path: path to SCPI config file.
-        """
-        self.import_configuration(path)
-
-    def dut_rockbottom(self, dut):
-        """Set the dut to rockbottom state.
-
-        Args:
-            dut: a CellularAndroid controller.
-        """
-        # The rockbottom script might include a device reboot, so it is
-        # necessary to stop SL4A during its execution.
-        dut.ad.stop_services()
-        self.log.info('Executing rockbottom script for ' + dut.ad.model)
-        os.chmod(self.rockbottom_script, 0o777)
-        os.system('{} {}'.format(self.rockbottom_script, dut.ad.serial))
-        # Make sure the DUT is in root mode after coming back
-        dut.ad.root_adb()
-        # Restart SL4A
-        dut.ad.start_services()
-
-    def wait_until_attached_one_cell(self,
-                                     cell_type,
-                                     cell_number,
-                                     dut,
-                                     wait_for_camp_interval,
-                                     attach_retries,
-                                     change_dut_setting_allow=True):
-        """Wait until connect to given UXM cell.
-
-        After turn off airplane mode, sleep for
-        wait_for_camp_interval seconds for device to camp.
-        If not device is not connected after the wait,
-        either toggle airplane mode on/off or reboot device.
-        Args:
-            cell_type: type of cell
-                which we are trying to connect to.
-            cell_number: ordinal number of a cell
-                which we are trying to connect to.
-            dut: a CellularAndroid controller.
-            wait_for_camp_interval: sleep interval,
-                wait for device to camp.
-            attach_retries: number of retry
-                to wait for device
-                to connect to 1 basestation.
-            change_dut_setting_allow: turn on/off APM
-                or reboot device helps with device camp time.
-                However, if we are trying to connect to second cell
-                changing APM status or reboot is not allowed.
-        Raise:
-            AbstractCellularSimulator.CellularSimulatorError:
-                device unable to connect to cell.
-        """
-        # airplane mode off
-        # dut.ad.adb.shell('settings put secure adaptive_connectivity_enabled 0')
-        dut.toggle_airplane_mode(False)
-        time.sleep(5)
-        # turn cell on
-        self.turn_cell_on(cell_type, cell_number)
-        time.sleep(5)
-
-        # waits for connect
-        for index in range(1, attach_retries):
-            # airplane mode on
-            time.sleep(wait_for_camp_interval)
-            cell_state = self.get_cell_status(cell_type, cell_number)
-            self.log.info(f'cell state: {cell_state}')
-            if cell_state == 'CONN\n':
-                return True
-            if cell_state == 'OFF\n':
-                self.turn_cell_on(cell_type, cell_number)
-                time.sleep(5)
-            if change_dut_setting_allow:
-                if (index % 4) == 0:
-                    dut.ad.reboot()
-                    if self.rockbottom_script:
-                        self.dut_rockbottom(dut)
-                    else:
-                        self.log.warning(
-                            f'Rockbottom script {self} was not executed after reboot'
-                        )
-                else:
-                    # airplane mode on
-                    dut.toggle_airplane_mode(True)
-                    time.sleep(5)
-                    # airplane mode off
-                    dut.toggle_airplane_mode(False)
-
-        # Phone cannot connected to basestation of callbox
-        raise RuntimeError(
-            f'Phone was unable to connect to cell: {cell_type}-{cell_number}')
-
-    def wait_until_attached(self, dut, timeout, attach_retries):
-        """Waits until the DUT is attached to all required cells.
-
-        Args:
-            dut: a CellularAndroid controller.
-            timeout: sleep interval,
-                wait for device to camp in 1 try.
-            attach_retries: number of retry
-                to wait for device
-                to connect to 1 basestation.
-        """
-        # get cell info
-        first_cell_type = self.cells[0][self.KEY_CELL_TYPE]
-        first_cell_number = self.cells[0][self.KEY_CELL_NUMBER]
-        if len(self.cells) == 2:
-            second_cell_type = self.cells[1][self.KEY_CELL_TYPE]
-            second_cell_number = self.cells[1][self.KEY_CELL_NUMBER]
-
-        # connect to 1st cell
-        try:
-            self.wait_until_attached_one_cell(first_cell_type,
-                                              first_cell_number, dut, timeout,
-                                              attach_retries)
-        except Exception as exc:
-            raise RuntimeError(f'Cannot connect to first cell') from exc
-
-        # connect to 2nd cell
-        if len(self.cells) == 2:
-            self.turn_cell_on(
-                second_cell_type,
-                second_cell_number,
-            )
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL None')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:UL None')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL CELL1')
-            self._socket_send_SCPI_command(
-                'BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:DL CELL1')
-            time.sleep(1)
-            self._socket_send_SCPI_command(
-                "BSE:CONFig:LTE:CELL1:CAGGregation:AGGRegate:NRCC:APPly")
-            try:
-                self.wait_until_attached_one_cell(second_cell_type,
-                                                  second_cell_number, dut,
-                                                  timeout, attach_retries,
-                                                  False)
-            except Exception as exc:
-                raise RuntimeError(f'Cannot connect to second cell') from exc
-
-    def set_lte_rrc_state_change_timer(self, enabled, time=10):
-        """Configures the LTE RRC state change timer.
-
-        Args:
-            enabled: a boolean indicating if the timer should be on or off.
-            time: time in seconds for the timer to expire.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_band(self, bts_index, band):
-        """Sets the band for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            band: the new band.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def get_duplex_mode(self, band):
-        """Determines if the band uses FDD or TDD duplex mode
-
-        Args:
-            band: a band number.
-
-        Returns:
-            an variable of class DuplexMode indicating if band is FDD or TDD.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_input_power(self, bts_index, input_power):
-        """Sets the input power for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            input_power: the new input power.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_output_power(self, bts_index, output_power):
-        """Sets the output power for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            output_power: the new output power.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_tdd_config(self, bts_index, tdd_config):
-        """Sets the tdd configuration number for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            tdd_config: the new tdd configuration number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_ssf_config(self, bts_index, ssf_config):
-        """Sets the Special Sub-Frame config number for the indicated.
-
-        base station.
-
-        Args:
-            bts_index: the base station number.
-            ssf_config: the new ssf config number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_bandwidth(self, bts_index, bandwidth):
-        """Sets the bandwidth for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            bandwidth: the new bandwidth
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_downlink_channel_number(self, bts_index, channel_number):
-        """Sets the downlink channel number for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            channel_number: the new channel number.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_mimo_mode(self, bts_index, mimo_mode):
-        """Sets the mimo mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number
-            mimo_mode: the new mimo mode
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_transmission_mode(self, bts_index, tmode):
-        """Sets the transmission mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            tmode: the new transmission mode.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_scheduling_mode(self,
-                            bts_index,
-                            scheduling,
-                            mcs_dl=None,
-                            mcs_ul=None,
-                            nrb_dl=None,
-                            nrb_ul=None):
-        """Sets the scheduling mode for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            scheduling: the new scheduling mode.
-            mcs_dl: Downlink MCS.
-            mcs_ul: Uplink MCS.
-            nrb_dl: Number of RBs for downlink.
-            nrb_ul: Number of RBs for uplink.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_dl_256_qam_enabled(self, bts_index, enabled):
-        """Determines what MCS table should be used for the downlink.
-
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number.
-            enabled: whether 256 QAM should be used.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_ul_64_qam_enabled(self, bts_index, enabled):
-        """Determines what MCS table should be used for the uplink.
-
-        This only saves the setting that will be used when configuring MCS.
-
-        Args:
-            bts_index: the base station number.
-            enabled: whether 64 QAM should be used.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_mac_padding(self, bts_index, mac_padding):
-        """Enables or disables MAC padding in the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            mac_padding: the new MAC padding setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_cfi(self, bts_index, cfi):
-        """Sets the Channel Format Indicator for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            cfi: the new CFI setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_paging_cycle(self, bts_index, cycle_duration):
-        """Sets the paging cycle duration for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            cycle_duration: the new paging cycle duration in milliseconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def set_phich_resource(self, bts_index, phich):
-        """Sets the PHICH Resource setting for the indicated base station.
-
-        Args:
-            bts_index: the base station number.
-            phich: the new PHICH resource setting.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def lte_attach_secondary_carriers(self, ue_capability_enquiry):
-        """Activates the secondary carriers for CA.
-
-        Requires the DUT to be attached to the primary carrier first.
-
-        Args:
-            ue_capability_enquiry: UE capability enquiry message to be sent to
-              the UE before starting carrier aggregation.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def wait_until_communication_state(self, timeout=120):
-        """Waits until the DUT is in Communication state.
-
-        Args:
-            timeout: after this amount of time the method will raise
-                a CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def wait_until_idle_state(self, timeout=120):
-        """Waits until the DUT is in Idle state.
-
-        Args:
-            timeout: after this amount of time the method will raise a
-                CellularSimulatorError exception. Default is 120 seconds.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def detach(self):
-        """ Turns off all the base stations so the DUT loose connection."""
-        for cell in self.cells:
-            cell_type = cell[self.KEY_CELL_TYPE]
-            cell_number = cell[self.KEY_CELL_NUMBER]
-            self._socket_send_SCPI_command(
-                self.SCPI_CELL_OFF_CMD.format(cell_type, cell_number))
-
-    def stop(self):
-        """Stops current simulation.
-
-        After calling this method, the simulator will need to be set up again.
-        """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def start_data_traffic(self):
-        """Starts transmitting data from the instrument to the DUT. """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
-
-    def stop_data_traffic(self):
-        """Stops transmitting data from the instrument to the DUT. """
-        raise NotImplementedError(
-            'This UXM callbox simulator does not support this feature.')
diff --git a/src/antlion/dict_object.py b/src/antlion/dict_object.py
deleted file mode 100644
index 0be7821..0000000
--- a/src/antlion/dict_object.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class DictObject(dict):
-    """Optional convenient base type for creating simple objects that are
-    naturally serializable.
-
-    A DictObject provides object-oriented access semantics to a dictionary,
-    allowing it to look like a class with defined members. By ensuring that
-    all of the class members are serializable, the object can be serialized
-    as a dictionary/de-serialized from a dictionary.
-    """
-
-    def __init__(self, *args, **kwargs):
-        """Constructor for a dictionary-as-object representation of kwargs
-
-        Args:
-            args: Currently unused - included for completeness
-            kwargs: keyword arguments used to construct the underlying dict
-
-        Returns:
-            Instance of DictObject
-        """
-        super(DictObject, self).update(**kwargs)
-
-    def __getattr__(self, name):
-        """Returns a key from the superclass dictionary as an attribute
-
-        Args:
-            name: name of the pseudo class attribute
-
-        Returns:
-            Dictionary item stored at "name"
-
-        Raises:
-            AttributeError if the item is not found
-        """
-        try:
-            return self[name]
-        except KeyError as ke:
-            raise AttributeError(ke)
-
-    def __setattr__(self, name, value):
-        """Updates the value of a key=name to a given value
-
-        Args:
-            name: name of the pseudo class attribute
-            value: value of the key
-
-        Raises:
-            AttributeError if the item is not found
-        """
-        if name in super(DictObject, self).keys():
-            super(DictObject, self).__setitem__(name, value)
-        else:
-            raise AttributeError("Class does not have attribute {}"
-                                 .format(value))
-
-    @classmethod
-    def from_dict(cls, dictionary):
-        """Factory method for constructing a DictObject from a dictionary
-
-        Args:
-            dictionary: Dictionary used to construct the DictObject
-
-        Returns:
-            Instance of DictObject
-        """
-        c = cls()
-        c.update(dictionary)
-        return c
diff --git a/src/antlion/error.py b/src/antlion/error.py
deleted file mode 100644
index 95969e5..0000000
--- a/src/antlion/error.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""This class is where error information will be stored.
-"""
-
-from antlion.signals import TestError
-
-
-class ActsError(TestError):
-    """Base Acts Error"""
-    def __init__(self, *args, **kwargs):
-        class_name = self.__class__.__name__
-        self.error_doc = self.__class__.__doc__
-        self.error_code = getattr(ActsErrorCode, class_name,
-                                  ActsErrorCode.UNKNOWN)
-        extras = dict(**kwargs, error_doc=self.error_doc,
-                      error_code=self.error_code)
-        details = args[0] if len(args) > 0 else ''
-        super().__init__(details, extras)
-
-
-class ActsErrorCode:
-    # Framework Errors 0-999
-
-    UNKNOWN = 0
-
-    # This error code is used to implement unittests for this class.
-    ActsError = 100
-    AndroidDeviceError = 101
-
-    # Controllers Errors 1000-3999
-
-    Sl4aStartError = 1001
-    Sl4aApiError = 1002
-    Sl4aConnectionError = 1003
-    Sl4aProtocolError = 1004
-    Sl4aNotInstalledError = 1005
-    Sl4aRpcTimeoutError = 1006
-
-    # Util Errors 4000-9999
-
-    FastbootError = 9000
-    AdbError = 9001
-    AdbCommandError = 9002
diff --git a/src/antlion/event/decorators.py b/src/antlion/event/decorators.py
deleted file mode 100644
index c3f7b83..0000000
--- a/src/antlion/event/decorators.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.event.subscription_handle import InstanceSubscriptionHandle
-from antlion.event.subscription_handle import StaticSubscriptionHandle
-from antlion.event import subscription_bundle
-
-
-def subscribe_static(event_type, event_filter=None, order=0):
-    """A decorator that subscribes a static or module-level function.
-
-    This function must be registered manually.
-    """
-    class InnerSubscriptionHandle(StaticSubscriptionHandle):
-        def __init__(self, func):
-            super().__init__(event_type, func,
-                             event_filter=event_filter,
-                             order=order)
-
-    return InnerSubscriptionHandle
-
-
-def subscribe(event_type, event_filter=None, order=0):
-    """A decorator that subscribes an instance method."""
-    class InnerSubscriptionHandle(InstanceSubscriptionHandle):
-        def __init__(self, func):
-            super().__init__(event_type, func,
-                             event_filter=event_filter,
-                             order=order)
-
-    return InnerSubscriptionHandle
-
-
-def register_static_subscriptions(decorated):
-    """Registers all static subscriptions in decorated's attributes.
-
-    Args:
-        decorated: The object being decorated
-
-    Returns:
-        The decorated.
-    """
-    subscription_bundle.create_from_static(decorated).register()
-
-    return decorated
-
-
-def register_instance_subscriptions(obj):
-    """A decorator that subscribes all instance subscriptions after object init.
-    """
-    old_init = obj.__init__
-
-    def init_replacement(self, *args, **kwargs):
-        old_init(self, *args, **kwargs)
-        subscription_bundle.create_from_instance(self).register()
-
-    obj.__init__ = init_replacement
-    return obj
diff --git a/src/antlion/event/event_bus.py b/src/antlion/event/event_bus.py
deleted file mode 100644
index 9c6a862..0000000
--- a/src/antlion/event/event_bus.py
+++ /dev/null
@@ -1,292 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import bisect
-import logging
-import inspect
-from threading import RLock
-
-from antlion.event.event_subscription import EventSubscription
-from antlion.event.subscription_handle import SubscriptionHandle
-
-
-class _EventBus(object):
-    """
-    Attributes:
-        _subscriptions: A dictionary of {EventType: list<EventSubscription>}.
-        _registration_id_map: A dictionary of
-                             {RegistrationID: EventSubscription}
-        _subscription_lock: The lock to prevent concurrent removal or addition
-                            to events.
-    """
-
-    def __init__(self):
-        self._subscriptions = {}
-        self._registration_id_map = {}
-        self._subscription_lock = RLock()
-
-    def register(self, event_type, func, filter_fn=None, order=0):
-        """Subscribes the given function to the event type given.
-
-        Args:
-            event_type: The type of the event to subscribe to.
-            func: The function to call when the event is posted.
-            filter_fn: An option function to be called before calling the
-                       subscribed func. If this function returns falsy, then the
-                       function will not be invoked.
-            order: The order the the subscription should run in. Lower values
-                   run first, with the default value set to 0. In the case of a
-                   tie between two subscriptions of the same event type, the
-                   subscriber added first executes first. In the case of a tie
-                   between two subscribers of a different type, the type of the
-                   subscription that is more specific goes first (i.e.
-                   BaseEventType will execute after ChildEventType if they share
-                   the same order).
-
-        Returns:
-            A registration ID.
-        """
-        subscription = EventSubscription(event_type, func,
-                                         event_filter=filter_fn,
-                                         order=order)
-        return self.register_subscription(subscription)
-
-    def register_subscriptions(self, subscriptions):
-        """Registers all subscriptions to the event bus.
-
-        Args:
-            subscriptions: an iterable that returns EventSubscriptions
-
-        Returns:
-            The list of registration IDs.
-        """
-        registration_ids = []
-        for subscription in subscriptions:
-            registration_ids.append(self.register_subscription(subscription))
-
-        return registration_ids
-
-    def register_subscription(self, subscription):
-        """Registers the given subscription to the event bus.
-
-        Args:
-            subscription: An EventSubscription object
-
-        Returns:
-            A registration ID.
-        """
-        with self._subscription_lock:
-            if subscription.event_type in self._subscriptions.keys():
-                subscription_list = self._subscriptions[subscription.event_type]
-                subscription_list.append(subscription)
-                subscription_list.sort(key=lambda x: x.order)
-            else:
-                subscription_list = list()
-                bisect.insort(subscription_list, subscription)
-                self._subscriptions[subscription.event_type] = subscription_list
-
-            registration_id = id(subscription)
-            self._registration_id_map[registration_id] = subscription
-
-        return registration_id
-
-    def post(self, event, ignore_errors=False):
-        """Posts an event to its subscribers.
-
-        Args:
-            event: The event object to send to the subscribers.
-            ignore_errors: Deliver to all subscribers, ignoring any errors.
-        """
-        listening_subscriptions = []
-        for current_type in inspect.getmro(type(event)):
-            if current_type not in self._subscriptions.keys():
-                continue
-            for subscription in self._subscriptions[current_type]:
-                listening_subscriptions.append(subscription)
-
-        # The subscriptions will be collected in sorted runs of sorted order.
-        # Running timsort here is the optimal way to sort this list.
-        listening_subscriptions.sort(key=lambda x: x.order)
-        for subscription in listening_subscriptions:
-            try:
-                subscription.deliver(event)
-            except Exception:
-                if ignore_errors:
-                    logging.exception('An exception occurred while handling '
-                                      'an event.')
-                    continue
-                raise
-
-    def unregister(self, registration_id):
-        """Unregisters an EventSubscription.
-
-        Args:
-            registration_id: the Subscription or registration_id to unsubscribe.
-        """
-        if type(registration_id) is SubscriptionHandle:
-            subscription = registration_id.subscription
-            registration_id = id(registration_id.subscription)
-        elif type(registration_id) is EventSubscription:
-            subscription = registration_id
-            registration_id = id(registration_id)
-        elif registration_id in self._registration_id_map.keys():
-            subscription = self._registration_id_map[registration_id]
-        elif type(registration_id) is not int:
-            raise ValueError(
-                'Subscription ID "%s" is not a valid ID. This value'
-                'must be an integer ID returned from subscribe().'
-                % registration_id)
-        else:
-            # The value is a "valid" id, but is not subscribed. It's possible
-            # another thread has unsubscribed this value.
-            logging.warning('Attempted to unsubscribe %s, but the matching '
-                            'subscription cannot be found.' % registration_id)
-            return False
-
-        event_type = subscription.event_type
-        with self._subscription_lock:
-            self._registration_id_map.pop(registration_id, None)
-            if (event_type in self._subscriptions and
-                    subscription in self._subscriptions[event_type]):
-                self._subscriptions[event_type].remove(subscription)
-        return True
-
-    def unregister_all(self, from_list=None, from_event=None):
-        """Removes all event subscriptions.
-
-        Args:
-            from_list: Unregisters all events from a given list.
-            from_event: Unregisters all events of a given event type.
-        """
-        if from_list is None:
-            from_list = list(self._registration_id_map.values())
-
-        for subscription in from_list:
-            if from_event is None or subscription.event_type == from_event:
-                self.unregister(subscription)
-
-
-_event_bus = _EventBus()
-
-
-def register(event_type, func, filter_fn=None, order=0):
-    """Subscribes the given function to the event type given.
-
-    Args:
-        event_type: The type of the event to subscribe to.
-        func: The function to call when the event is posted.
-        filter_fn: An option function to be called before calling the subscribed
-                   func. If this function returns falsy, then the function will
-                   not be invoked.
-        order: The order the the subscription should run in. Lower values run
-               first, with the default value set to 0. In the case of a tie
-               between two subscriptions of the same event type, the
-               subscriber added first executes first. In the case of a tie
-               between two subscribers of a different type, the type of the
-               subscription that is more specific goes first (i.e. BaseEventType
-               will execute after ChildEventType if they share the same order).
-
-    Returns:
-        A registration ID.
-    """
-    return _event_bus.register(event_type, func, filter_fn=filter_fn,
-                               order=order)
-
-
-def register_subscriptions(subscriptions):
-    """Registers all subscriptions to the event bus.
-
-    Args:
-        subscriptions: an iterable that returns EventSubscriptions
-
-    Returns:
-        The list of registration IDs.
-    """
-    return _event_bus.register_subscriptions(subscriptions)
-
-
-def register_subscription(subscription):
-    """Registers the given subscription to the event bus.
-
-    Args:
-        subscription: An EventSubscription object
-
-    Returns:
-        A registration ID.
-    """
-    return _event_bus.register_subscription(subscription)
-
-
-def post(event, ignore_errors=False):
-    """Posts an event to its subscribers.
-
-    Args:
-        event: The event object to send to the subscribers.
-        ignore_errors: Deliver to all subscribers, ignoring any errors.
-    """
-    _event_bus.post(event, ignore_errors)
-
-
-def unregister(registration_id):
-    """Unregisters an EventSubscription.
-
-    Args:
-        registration_id: the Subscription or registration_id to unsubscribe.
-    """
-    # null check for the corner case where the _event_bus is destroyed before
-    # the subscribers unregister. In such case there is nothing else to
-    # be done.
-    if _event_bus is None:
-        return True
-    return _event_bus.unregister(registration_id)
-
-
-def unregister_all(from_list=None, from_event=None):
-    """Removes all event subscriptions.
-
-    Args:
-        from_list: Unregisters all events from a given list.
-        from_event: Unregisters all events of a given event type.
-    """
-    return _event_bus.unregister_all(from_list=from_list, from_event=from_event)
-
-
-class listen_for(object):
-    """A context-manager class (with statement) for listening to an event within
-    a given section of code.
-
-    Usage:
-
-    with listen_for(EventType, event_listener):
-        func_that_posts_event()  # Will call event_listener
-
-    func_that_posts_event()  # Will not call event_listener
-
-    """
-
-    def __init__(self, event_type, func, filter_fn=None, order=0):
-        self.event_type = event_type
-        self.func = func
-        self.filter_fn = filter_fn
-        self.order = order
-        self.registration_id = None
-
-    def __enter__(self):
-        self.registration_id = _event_bus.register(self.event_type, self.func,
-                                                   filter_fn=self.filter_fn,
-                                                   order=self.order)
-
-    def __exit__(self, *unused):
-        _event_bus.unregister(self.registration_id)
diff --git a/src/antlion/event/event_subscription.py b/src/antlion/event/event_subscription.py
deleted file mode 100644
index e442507..0000000
--- a/src/antlion/event/event_subscription.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class EventSubscription(object):
-    """A class that defines the way a function is subscribed to an event.
-
-    Attributes:
-        event_type: The type of the event.
-        _func: The subscribed function.
-        _event_filter: A lambda that returns True if an event should be passed
-                       to the subscribed function.
-        order: The order value in which this subscription should be called.
-    """
-    def __init__(self, event_type, func, event_filter=None, order=0):
-        self._event_type = event_type
-        self._func = func
-        self._event_filter = event_filter
-        self.order = order
-
-    @property
-    def event_type(self):
-        return self._event_type
-
-    def deliver(self, event):
-        """Delivers an event to the subscriber.
-
-        This function will not deliver the event if the event filter rejects the
-        event.
-
-        Args:
-            event: The event to send to the subscriber.
-        """
-        if self._event_filter and not self._event_filter(event):
-            return
-        self._func(event)
diff --git a/src/antlion/event/subscription_bundle.py b/src/antlion/event/subscription_bundle.py
deleted file mode 100644
index d936bd2..0000000
--- a/src/antlion/event/subscription_bundle.py
+++ /dev/null
@@ -1,153 +0,0 @@
-import logging
-import threading
-
-from antlion.event import event_bus
-from antlion.event.event_subscription import EventSubscription
-from antlion.event.subscription_handle import InstanceSubscriptionHandle
-from antlion.event.subscription_handle import SubscriptionHandle
-from antlion.event.subscription_handle import StaticSubscriptionHandle
-
-
-class SubscriptionBundle(object):
-    """A class for maintaining a set of EventSubscriptions in the event bus.
-
-    Attributes:
-        subscriptions: A dictionary of {EventSubscription: RegistrationID}
-    """
-
-    def __init__(self):
-        self.subscriptions = {}
-        self._subscription_lock = threading.Lock()
-        self._registered = False
-
-    @property
-    def registered(self):
-        """True if this SubscriptionBundle has been registered."""
-        return self._registered
-
-    def add(self, event_type, func, event_filter=None,
-            order=0):
-        """Adds a new Subscription to this SubscriptionBundle.
-
-        If this SubscriptionBundle is registered, the added Subscription will
-        also be registered.
-
-        Returns:
-            the EventSubscription object created.
-        """
-        subscription = EventSubscription(event_type, func,
-                                         event_filter=event_filter,
-                                         order=order)
-        return self.add_subscription(subscription)
-
-    def add_subscription(self, subscription):
-        """Adds an existing Subscription to the subscription bundle.
-
-        If this SubscriptionBundle is registered, the added subscription will
-        also be registered.
-
-        Returns:
-            the subscription object.
-        """
-        registration_id = None
-        with self._subscription_lock:
-            if self.registered:
-                registration_id = event_bus.register_subscription(subscription)
-
-            self.subscriptions[subscription] = registration_id
-        return subscription
-
-    def remove_subscription(self, subscription):
-        """Removes a subscription from the SubscriptionBundle.
-
-        If the SubscriptionBundle is registered, removing the subscription will
-        also unregister it.
-        """
-        if subscription not in self.subscriptions.keys():
-            return False
-        with self._subscription_lock:
-            if self.registered:
-                event_bus.unregister(self.subscriptions[subscription])
-            del self.subscriptions[subscription]
-        return True
-
-    def register(self):
-        """Registers all subscriptions found within this object."""
-        if self.registered:
-            return
-        with self._subscription_lock:
-            self._registered = True
-            for subscription, registration_id in self.subscriptions.items():
-                if registration_id is not None:
-                    logging.warning('Registered subscription found in '
-                                    'unregistered SubscriptionBundle: %s, %s' %
-                                    (subscription, registration_id))
-                self.subscriptions[subscription] = (
-                    event_bus.register_subscription(subscription))
-
-    def unregister(self):
-        """Unregisters all subscriptions managed by this SubscriptionBundle."""
-        if not self.registered:
-            return
-        with self._subscription_lock:
-            self._registered = False
-            for subscription, registration_id in self.subscriptions.items():
-                if registration_id is None:
-                    logging.warning('Unregistered subscription found in '
-                                    'registered SubscriptionBundle: %s, %s' %
-                                    (subscription, registration_id))
-                event_bus.unregister(subscription)
-                self.subscriptions[subscription] = None
-
-
-def create_from_static(obj):
-    """Generates a SubscriptionBundle from @subscribe_static functions on obj.
-
-    Args:
-        obj: The object that contains @subscribe_static functions. Can either
-             be a module or a class.
-
-    Returns:
-        An unregistered SubscriptionBundle.
-    """
-    return _create_from_object(obj, obj, StaticSubscriptionHandle)
-
-
-def create_from_instance(instance):
-    """Generates a SubscriptionBundle from an instance's @subscribe functions.
-
-    Args:
-        instance: The instance object that contains @subscribe functions.
-
-    Returns:
-        An unregistered SubscriptionBundle.
-    """
-    return _create_from_object(instance, instance.__class__,
-                               InstanceSubscriptionHandle)
-
-
-def _create_from_object(obj, obj_to_search, subscription_handle_type):
-    """Generates a SubscriptionBundle from an object's SubscriptionHandles.
-
-    Note that instance variables do not have the class's functions as direct
-    attributes. The attributes are resolved from the type of the object. Here,
-    we need to search through the instance's class to find the correct types,
-    and subscribe the instance-specific subscriptions.
-
-    Args:
-        obj: The object that contains SubscriptionHandles.
-        obj_to_search: The class to search for SubscriptionHandles from.
-        subscription_handle_type: The type of the SubscriptionHandles to
-                                  capture.
-
-    Returns:
-        An unregistered SubscriptionBundle.
-    """
-    bundle = SubscriptionBundle()
-    for attr_name, attr_value in obj_to_search.__dict__.items():
-        if isinstance(attr_value, subscription_handle_type):
-            bundle.add_subscription(getattr(obj, attr_name).subscription)
-        if isinstance(attr_value, staticmethod):
-            if isinstance(getattr(obj, attr_name), subscription_handle_type):
-                bundle.add_subscription(getattr(obj, attr_name).subscription)
-    return bundle
diff --git a/src/antlion/event/subscription_handle.py b/src/antlion/event/subscription_handle.py
deleted file mode 100644
index 0123ab7..0000000
--- a/src/antlion/event/subscription_handle.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.event.event_subscription import EventSubscription
-
-
-class SubscriptionHandle(object):
-    """The object created by a method decorated with an event decorator."""
-
-    def __init__(self, event_type, func, event_filter=None, order=0):
-        self._event_type = event_type
-        self._func = func
-        self._event_filter = event_filter
-        self._order = order
-        self._subscription = None
-        self._owner = None
-
-    @property
-    def subscription(self):
-        if self._subscription:
-            return self._subscription
-        self._subscription = EventSubscription(self._event_type, self._func,
-                                               event_filter=self._event_filter,
-                                               order=self._order)
-        return self._subscription
-
-    def __get__(self, instance, owner):
-        # If our owner has been initialized, or do not have an instance owner,
-        # return self.
-        if self._owner is not None or instance is None:
-            return self
-
-        # Otherwise, we create a new SubscriptionHandle that will only be used
-        # for the instance that owns this SubscriptionHandle.
-        ret = SubscriptionHandle(self._event_type, self._func,
-                                 self._event_filter, self._order)
-        ret._owner = instance
-        ret._func = ret._wrap_call(ret._func)
-        for attr, value in owner.__dict__.items():
-            if value is self:
-                setattr(instance, attr, ret)
-                break
-        return ret
-
-    def _wrap_call(self, func):
-        def _wrapped_call(*args, **kwargs):
-            if self._owner is None:
-                return func(*args, **kwargs)
-            else:
-                return func(self._owner, *args, **kwargs)
-        return _wrapped_call
-
-    def __call__(self, *args, **kwargs):
-        return self._func(*args, **kwargs)
-
-
-class InstanceSubscriptionHandle(SubscriptionHandle):
-    """A SubscriptionHandle for instance methods."""
-
-
-class StaticSubscriptionHandle(SubscriptionHandle):
-    """A SubscriptionHandle for static methods."""
diff --git a/src/antlion/keys.py b/src/antlion/keys.py
deleted file mode 100644
index a89898c..0000000
--- a/src/antlion/keys.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-
-"""This module has the global key values that are used across framework
-modules.
-"""
-
-
-class Config(enum.Enum):
-    """Enum values for test config related lookups.
-    """
-    # Keys used to look up values from test config files.
-    # These keys define the wording of test configs and their internal
-    # references.
-    key_log_path = 'logpath'
-    key_testbeds_under_test = 'testbeds_under_test'
-    key_testbed = 'testbed'
-    key_testbed_name = 'name'
-    # configpath is the directory. key_config_full_path is the file path.
-    key_config_path = 'configpath'
-    key_config_full_path = 'config_full_path'
-    key_test_paths = 'testpaths'
-    key_port = 'Port'
-    key_address = 'Address'
-    key_test_case_iterations = 'test_case_iterations'
-    key_test_failure_tracebacks = 'test_failure_tracebacks'
-    # Config names for controllers packaged in ACTS.
-    key_android_device = 'AndroidDevice'
-    key_bits = 'Bits'
-    key_bluetooth_pts_device = 'BluetoothPtsDevice'
-    key_fuchsia_device = 'FuchsiaDevice'
-    key_buds_device = 'BudsDevice'
-    key_chameleon_device = 'ChameleonDevice'
-    key_native_android_device = 'NativeAndroidDevice'
-    key_relay_device = 'RelayDevice'
-    key_access_point = 'AccessPoint'
-    key_attenuator = 'Attenuator'
-    key_iperf_server = 'IPerfServer'
-    key_iperf_client = 'IPerfClient'
-    key_packet_sender = 'PacketSender'
-    key_monsoon = 'Monsoon'
-    key_sniffer = 'Sniffer'
-    key_arduino_wifi_dongle = 'ArduinoWifiDongle'
-    key_packet_capture = 'PacketCapture'
-    key_pdu = 'PduDevice'
-    key_openwrt_ap = 'OpenWrtAP'
-    key_tigertail = 'Tigertail'
-    key_asus_axe11000_ap = 'AsusAXE11000AP'
-    # Internal keys, used internally, not exposed to user's config files.
-    ikey_user_param = 'user_params'
-    ikey_testbed_name = 'testbed_name'
-    ikey_logger = 'log'
-    ikey_logpath = 'log_path'
-    ikey_summary_writer = 'summary_writer'
-    # module name of controllers packaged in ACTS.
-    m_key_bits = 'bits'
-    m_key_monsoon = 'monsoon'
-    m_key_android_device = 'android_device'
-    m_key_fuchsia_device = 'fuchsia_device'
-    m_key_bluetooth_pts_device = 'bluetooth_pts_device'
-    m_key_buds_device = 'buds_controller'
-    m_key_chameleon_device = 'chameleon_controller'
-    m_key_native_android_device = 'native_android_device'
-    m_key_relay_device = 'relay_device_controller'
-    m_key_access_point = 'access_point'
-    m_key_attenuator = 'attenuator'
-    m_key_iperf_server = 'iperf_server'
-    m_key_iperf_client = 'iperf_client'
-    m_key_packet_sender = 'packet_sender'
-    m_key_sniffer = 'sniffer'
-    m_key_arduino_wifi_dongle = 'arduino_wifi_dongle'
-    m_key_packet_capture = 'packet_capture'
-    m_key_pdu = 'pdu'
-    m_key_openwrt_ap = 'openwrt_ap'
-    m_key_tigertail = 'tigertail'
-    m_key_asus_axe11000_ap = 'asus_axe11000_ap'
-
-    # A list of keys whose values in configs should not be passed to test
-    # classes without unpacking first.
-    reserved_keys = (key_testbed, key_log_path, key_test_paths)
-
-    # Controller names packaged with ACTS.
-    builtin_controller_names = [
-        key_android_device,
-        key_bits,
-        key_bluetooth_pts_device,
-        key_fuchsia_device,
-        key_buds_device,
-        key_native_android_device,
-        key_relay_device,
-        key_access_point,
-        key_attenuator,
-        key_iperf_server,
-        key_iperf_client,
-        key_packet_sender,
-        key_monsoon,
-        key_sniffer,
-        key_chameleon_device,
-        key_arduino_wifi_dongle,
-        key_packet_capture,
-        key_pdu,
-        key_openwrt_ap,
-        key_tigertail,
-        key_asus_axe11000_ap,
-    ]
-
-    # Keys that are file or folder paths.
-    file_path_keys = [key_relay_device]
-
-
-def get_name_by_value(value):
-    for name, member in Config.__members__.items():
-        if member.value == value:
-            return name
-    return None
-
-
-def get_module_name(name_in_config):
-    """Translates the name of a controller in config file to its module name.
-    """
-    return value_to_value(name_in_config, 'm_%s')
-
-
-def value_to_value(ref_value, pattern):
-    """Translates the value of a key to the value of its corresponding key. The
-    corresponding key is chosen based on the variable name pattern.
-    """
-    ref_key_name = get_name_by_value(ref_value)
-    if not ref_key_name:
-        return None
-    target_key_name = pattern % ref_key_name
-    try:
-        return getattr(Config, target_key_name).value
-    except AttributeError:
-        return None
diff --git a/src/antlion/libs/logging/log_stream.py b/src/antlion/libs/logging/log_stream.py
deleted file mode 100644
index b457e46..0000000
--- a/src/antlion/libs/logging/log_stream.py
+++ /dev/null
@@ -1,436 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import os
-import sys
-from logging import FileHandler
-from logging import Handler
-from logging import StreamHandler
-from logging.handlers import RotatingFileHandler
-
-from antlion import context
-from antlion.context import ContextLevel
-from antlion.event import event_bus
-from antlion.event.decorators import subscribe_static
-
-
-# yapf: disable
-class LogStyles:
-    NONE         = 0x00
-    LOG_DEBUG    = 0x01
-    LOG_INFO     = 0x02
-    LOG_WARNING  = 0x04
-    LOG_ERROR    = 0x08
-    LOG_CRITICAL = 0x10
-
-    DEFAULT_LEVELS = LOG_DEBUG + LOG_INFO + LOG_ERROR
-    ALL_LEVELS = LOG_DEBUG + LOG_INFO + LOG_WARNING + LOG_ERROR + LOG_CRITICAL
-
-    MONOLITH_LOG  = 0x0100
-    TESTCLASS_LOG = 0x0200
-    TESTCASE_LOG  = 0x0400
-    TO_STDOUT     = 0x0800
-    TO_ACTS_LOG   = 0x1000
-    ROTATE_LOGS   = 0x2000
-
-    ALL_FILE_LOGS = MONOLITH_LOG + TESTCLASS_LOG + TESTCASE_LOG
-
-    LEVEL_NAMES = {
-        LOG_DEBUG: 'debug',
-        LOG_INFO: 'info',
-        LOG_WARNING: 'warning',
-        LOG_ERROR: 'error',
-        LOG_CRITICAL: 'critical',
-    }
-
-    LOG_LEVELS = [
-        LOG_DEBUG,
-        LOG_INFO,
-        LOG_WARNING,
-        LOG_ERROR,
-        LOG_CRITICAL,
-    ]
-
-    LOG_LOCATIONS = [
-        TO_STDOUT,
-        TO_ACTS_LOG,
-        MONOLITH_LOG,
-        TESTCLASS_LOG,
-        TESTCASE_LOG
-    ]
-
-    LEVEL_TO_NO = {
-        LOG_DEBUG: logging.DEBUG,
-        LOG_INFO: logging.INFO,
-        LOG_WARNING: logging.WARNING,
-        LOG_ERROR: logging.ERROR,
-        LOG_CRITICAL: logging.CRITICAL,
-    }
-
-    LOCATION_TO_CONTEXT_LEVEL = {
-        MONOLITH_LOG: ContextLevel.ROOT,
-        TESTCLASS_LOG: ContextLevel.TESTCLASS,
-        TESTCASE_LOG: ContextLevel.TESTCASE
-    }
-# yapf: enable
-
-
-_log_streams = dict()
-_null_handler = logging.NullHandler()
-
-
-@subscribe_static(context.NewContextEvent)
-def _update_handlers(event):
-    for log_stream in _log_streams.values():
-        log_stream.update_handlers(event)
-
-
-event_bus.register_subscription(_update_handlers.subscription)
-
-
-def create_logger(name, log_name=None, base_path='', subcontext='',
-                  log_styles=LogStyles.NONE, stream_format=None,
-                  file_format=None):
-    """Creates a Python Logger object with the given attributes.
-
-    Creation through this method will automatically manage the logger in the
-    background for test-related events, such as TestCaseBegin and TestCaseEnd
-    Events.
-
-    Args:
-        name: The name of the LogStream. Used as the file name prefix.
-        log_name: The name of the underlying logger. Use LogStream name as
-            default.
-        base_path: The base path used by the logger.
-        subcontext: Location of logs relative to the test context path.
-        log_styles: An integer or array of integers that are the sum of
-            corresponding flag values in LogStyles. Examples include:
-
-            >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
-
-            >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG
-
-            >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG]
-            >>>  LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG]
-        stream_format: Format used for log output to stream
-        file_format: Format used for log output to files
-    """
-    if name in _log_streams:
-        _log_streams[name].cleanup()
-    log_stream = _LogStream(name, log_name, base_path, subcontext, log_styles,
-                            stream_format, file_format)
-    _set_logger(log_stream)
-    return log_stream.logger
-
-
-def _set_logger(log_stream):
-    _log_streams[log_stream.name] = log_stream
-    return log_stream
-
-
-class AlsoToLogHandler(Handler):
-    """Logs a message at a given level also to another logger.
-
-    Used for logging messages at a high enough level to the main log, or another
-    logger.
-    """
-
-    def __init__(self, to_logger=None, *args, **kwargs):
-        super().__init__(*args, **kwargs)
-        self._log = logging.getLogger(to_logger)
-
-    def emit(self, record):
-        self._log.log(record.levelno, record.getMessage())
-
-
-class MovableFileHandler(FileHandler):
-    """FileHandler implementation that allows the output file to be changed
-    during operation.
-    """
-    def set_file(self, file_name):
-        """Set the target output file to file_name.
-
-        Args:
-            file_name: path to the new output file
-        """
-        self.baseFilename = os.path.abspath(file_name)
-        if self.stream is not None:
-            new_stream = self._open()
-            # An atomic operation redirects the output and closes the old file
-            os.dup2(new_stream.fileno(), self.stream.fileno())
-            self.stream = new_stream
-
-
-class MovableRotatingFileHandler(RotatingFileHandler):
-    """RotatingFileHandler implementation that allows the output file to be
-    changed during operation. Rotated files will automatically adopt the newest
-    output path.
-    """
-    set_file = MovableFileHandler.set_file
-
-
-class InvalidStyleSetError(Exception):
-    """Raised when the given LogStyles are an invalid set."""
-
-
-class _LogStream(object):
-    """A class that sets up a logging.Logger object.
-
-    The LogStream class creates a logging.Logger object. LogStream is also
-    responsible for managing the logger when events take place, such as
-    TestCaseEndedEvents and TestCaseBeginEvents.
-
-    Attributes:
-        name: The name of the LogStream.
-        logger: The logger created by this LogStream.
-        base_path: The base path used by the logger. Use logging.log_path
-            as default.
-        subcontext: Location of logs relative to the test context path.
-        stream_format: Format used for log output to stream
-        file_format: Format used for log output to files
-    """
-
-    def __init__(self, name, log_name=None, base_path='', subcontext='',
-                 log_styles=LogStyles.NONE, stream_format=None,
-                 file_format=None):
-        """Creates a LogStream.
-
-        Args:
-            name: The name of the LogStream. Used as the file name prefix.
-            log_name: The name of the underlying logger. Use LogStream name
-                as default.
-            base_path: The base path used by the logger. Use logging.log_path
-                as default.
-            subcontext: Location of logs relative to the test context path.
-            log_styles: An integer or array of integers that are the sum of
-                corresponding flag values in LogStyles. Examples include:
-
-                >>> LogStyles.LOG_INFO + LogStyles.TESTCASE_LOG
-
-                >>> LogStyles.ALL_LEVELS + LogStyles.MONOLITH_LOG
-
-                >>> [LogStyles.DEFAULT_LEVELS + LogStyles.MONOLITH_LOG]
-                >>>  LogStyles.LOG_ERROR + LogStyles.TO_ACTS_LOG]
-            stream_format: Format used for log output to stream
-            file_format: Format used for log output to files
-        """
-        self.name = name
-        if log_name is not None:
-            self.logger = logging.getLogger(log_name)
-        else:
-            self.logger = logging.getLogger(name)
-        # Add a NullHandler to suppress unwanted console output
-        self.logger.addHandler(_null_handler)
-        self.logger.propagate = False
-        self.base_path = base_path or getattr(logging, 'log_path',
-                                              '/tmp/acts_logs')
-        self.subcontext = subcontext
-        context.TestContext.add_base_output_path(self.logger.name, self.base_path)
-        context.TestContext.add_subcontext(self.logger.name, self.subcontext)
-        self.stream_format = stream_format
-        self.file_format = file_format
-        self._testclass_handlers = []
-        self._testcase_handlers = []
-        if not isinstance(log_styles, list):
-            log_styles = [log_styles]
-        self.__validate_styles(log_styles)
-        for log_style in log_styles:
-            self.__handle_style(log_style)
-
-    @staticmethod
-    def __validate_styles(_log_styles_list):
-        """Determines if the given list of styles is valid.
-
-        Terminology:
-            Log-level: any of [DEBUG, INFO, WARNING, ERROR, CRITICAL].
-            Log Location: any of [MONOLITH_LOG, TESTCLASS_LOG,
-                                  TESTCASE_LOG, TO_STDOUT, TO_ACTS_LOG].
-
-        Styles are invalid when any of the below criteria are met:
-            A log-level is not set within an element of the list.
-            A log location is not set within an element of the list.
-            A log-level, log location pair appears twice within the list.
-            A log-level has both TESTCLASS and TESTCASE locations set
-                within the list.
-            ROTATE_LOGS is set without MONOLITH_LOG,
-                TESTCLASS_LOG, or TESTCASE_LOG.
-
-        Raises:
-            InvalidStyleSetError if the given style cannot be achieved.
-        """
-
-        def invalid_style_error(message):
-            raise InvalidStyleSetError('{LogStyle Set: %s} %s' %
-                                       (_log_styles_list, message))
-
-        # Store the log locations that have already been set per level.
-        levels_dict = {}
-        for log_style in _log_styles_list:
-            for level in LogStyles.LOG_LEVELS:
-                if log_style & level:
-                    levels_dict[level] = levels_dict.get(level, LogStyles.NONE)
-                    # Check that a log-level, log location pair has not yet
-                    # been set.
-                    for log_location in LogStyles.LOG_LOCATIONS:
-                        if log_style & log_location:
-                            if log_location & levels_dict[level]:
-                                invalid_style_error(
-                                    'The log location %s for log level %s has '
-                                    'been set multiple times' %
-                                    (log_location, level))
-                            else:
-                                levels_dict[level] |= log_location
-                    # Check that for a given log-level, not more than one
-                    # of MONOLITH_LOG, TESTCLASS_LOG, TESTCASE_LOG is set.
-                    locations = levels_dict[level] & LogStyles.ALL_FILE_LOGS
-                    valid_locations = [
-                        LogStyles.TESTCASE_LOG, LogStyles.TESTCLASS_LOG,
-                        LogStyles.MONOLITH_LOG, LogStyles.NONE]
-                    if locations not in valid_locations:
-                        invalid_style_error(
-                            'More than one of MONOLITH_LOG, TESTCLASS_LOG, '
-                            'TESTCASE_LOG is set for log level %s.' % level)
-            if log_style & LogStyles.ALL_LEVELS == 0:
-                invalid_style_error('LogStyle %s needs to set a log '
-                                    'level.' % log_style)
-            if log_style & ~LogStyles.ALL_LEVELS == 0:
-                invalid_style_error('LogStyle %s needs to set a log '
-                                    'location.' % log_style)
-            if log_style & LogStyles.ROTATE_LOGS and not log_style & (
-                    LogStyles.MONOLITH_LOG | LogStyles.TESTCLASS_LOG |
-                    LogStyles.TESTCASE_LOG):
-                invalid_style_error('LogStyle %s has ROTATE_LOGS set, but does '
-                                    'not specify a log type.' % log_style)
-
-    @staticmethod
-    def __create_rotating_file_handler(filename):
-        """Generates a callable to create an appropriate RotatingFileHandler."""
-        # Magic number explanation: 10485760 == 10MB
-        return MovableRotatingFileHandler(filename, maxBytes=10485760,
-                                          backupCount=5)
-
-    @staticmethod
-    def __get_file_handler_creator(log_style):
-        """Gets the callable to create the correct FileLogHandler."""
-        create_file_handler = MovableFileHandler
-        if log_style & LogStyles.ROTATE_LOGS:
-            create_file_handler = _LogStream.__create_rotating_file_handler
-        return create_file_handler
-
-    @staticmethod
-    def __get_lowest_log_level(log_style):
-        """Returns the lowest log level's LogStyle for the given log_style."""
-        for log_level in LogStyles.LOG_LEVELS:
-            if log_level & log_style:
-                return log_level
-        return LogStyles.NONE
-
-    def __get_current_output_dir(self, depth=ContextLevel.TESTCASE):
-        """Gets the current output directory from the context system. Make the
-        directory if it doesn't exist.
-
-        Args:
-            depth: The desired level of the output directory. For example,
-                the TESTCLASS level would yield the directory associated with
-                the current test class context, even if the test is currently
-                within a test case.
-        """
-        curr_context = context.get_current_context(depth)
-        return curr_context.get_full_output_path(self.logger.name)
-
-    def __create_handler(self, creator, level, location):
-        """Creates the FileHandler.
-
-        Args:
-            creator: The callable that creates the FileHandler
-            level: The logging level (INFO, DEBUG, etc.) for this handler.
-            location: The log location (MONOLITH, TESTCLASS, TESTCASE) for this
-                handler.
-
-        Returns: A FileHandler
-        """
-        directory = self.__get_current_output_dir(
-            LogStyles.LOCATION_TO_CONTEXT_LEVEL[location])
-        base_name = '%s_%s.txt' % (self.name, LogStyles.LEVEL_NAMES[level])
-        handler = creator(os.path.join(directory, base_name))
-        handler.setLevel(LogStyles.LEVEL_TO_NO[level])
-        if self.file_format:
-            handler.setFormatter(self.file_format)
-        return handler
-
-    def __handle_style(self, log_style):
-        """Creates the handlers described in the given log_style."""
-        handler_creator = self.__get_file_handler_creator(log_style)
-
-        # Handle streaming logs to STDOUT or the ACTS Logger
-        if log_style & (LogStyles.TO_ACTS_LOG | LogStyles.TO_STDOUT):
-            lowest_log_level = self.__get_lowest_log_level(log_style)
-
-            if log_style & LogStyles.TO_ACTS_LOG:
-                handler = AlsoToLogHandler()
-            else:  # LogStyles.TO_STDOUT:
-                handler = StreamHandler(sys.stdout)
-                if self.stream_format:
-                    handler.setFormatter(self.stream_format)
-
-            handler.setLevel(LogStyles.LEVEL_TO_NO[lowest_log_level])
-            self.logger.addHandler(handler)
-
-        # Handle streaming logs to log-level files
-        for log_level in LogStyles.LOG_LEVELS:
-            log_location = log_style & LogStyles.ALL_FILE_LOGS
-            if not (log_style & log_level and log_location):
-                continue
-
-            handler = self.__create_handler(
-                handler_creator, log_level, log_location)
-            self.logger.addHandler(handler)
-
-            if log_style & LogStyles.TESTCLASS_LOG:
-                self._testclass_handlers.append(handler)
-            if log_style & LogStyles.TESTCASE_LOG:
-                self._testcase_handlers.append(handler)
-
-    def __remove_handler(self, handler):
-        """Removes a handler from the logger, unless it's a NullHandler."""
-        if handler is not _null_handler:
-            handler.close()
-            self.logger.removeHandler(handler)
-
-    def update_handlers(self, event):
-        """Update the output file paths for log handlers upon a change in
-        the test context.
-
-        Args:
-            event: An instance of NewContextEvent.
-        """
-        handlers = []
-        if isinstance(event, context.NewTestClassContextEvent):
-            handlers = self._testclass_handlers + self._testcase_handlers
-        if isinstance(event, context.NewTestCaseContextEvent):
-            handlers = self._testcase_handlers
-
-        if not handlers:
-            return
-        new_dir = self.__get_current_output_dir()
-        for handler in handlers:
-            filename = os.path.basename(handler.baseFilename)
-            handler.set_file(os.path.join(new_dir, filename))
-
-    def cleanup(self):
-        """Removes all LogHandlers from the logger."""
-        for handler in self.logger.handlers:
-            self.__remove_handler(handler)
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner.py b/src/antlion/libs/ota/ota_runners/ota_runner.py
deleted file mode 100644
index 4b20564..0000000
--- a/src/antlion/libs/ota/ota_runners/ota_runner.py
+++ /dev/null
@@ -1,213 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from zipfile import ZipFile
-
-"""The setup time in seconds."""
-SL4A_SERVICE_SETUP_TIME = 5
-
-
-"""The path to the metadata found within the OTA package."""
-OTA_PACKAGE_METADATA_PATH = 'META-INF/com/android/metadata'
-
-
-class OtaError(Exception):
-    """Raised when an error in the OTA Update process occurs."""
-
-
-class InvalidOtaUpdateError(OtaError):
-    """Raised when the update from one version to another is not valid."""
-
-
-class OtaRunner(object):
-    """The base class for all OTA Update Runners."""
-
-    def __init__(self, ota_tool, android_device):
-        self.ota_tool = ota_tool
-        self.android_device = android_device
-        self.serial = self.android_device.serial
-
-    def _update(self):
-        post_build_id = self.get_post_build_id()
-        log = self.android_device.log
-        old_info = self.android_device.adb.getprop('ro.build.fingerprint')
-        log.info('Starting Update. Beginning build info: %s', old_info)
-        log.info('Stopping services.')
-        self.android_device.stop_services()
-        log.info('Beginning tool.')
-        self.ota_tool.update(self)
-        log.info('Tool finished. Waiting for boot completion.')
-        self.android_device.wait_for_boot_completion()
-        new_info = self.android_device.adb.getprop('ro.build.fingerprint')
-        if not old_info or old_info == new_info:
-            raise OtaError('The device was not updated to a new build. '
-                           'Previous build: %s. Current build: %s. '
-                           'Expected build: %s' % (old_info, new_info,
-                                                   post_build_id))
-        log.info('Boot completed. Rooting adb.')
-        self.android_device.root_adb()
-        log.info('Root complete.')
-        if self.android_device.skip_sl4a:
-            self.android_device.log.info('Skipping SL4A install.')
-        else:
-            for _ in range(3):
-                self.android_device.log.info('Re-installing SL4A from "%s".',
-                                             self.get_sl4a_apk())
-                self.android_device.adb.install(
-                    '-r -g %s' % self.get_sl4a_apk(), ignore_status=True)
-                time.sleep(SL4A_SERVICE_SETUP_TIME)
-                if self.android_device.is_sl4a_installed():
-                    break
-        log.info('Starting services.')
-        self.android_device.start_services()
-        self.android_device.update_sdk_api_level()
-        log.info('Services started. Running ota tool cleanup.')
-        self.ota_tool.cleanup(self)
-        log.info('Cleanup complete.')
-
-    def get_ota_package_metadata(self, requested_field):
-        """Returns a variable found within the OTA package's metadata.
-
-        Args:
-            requested_field: the name of the metadata field
-
-        Will return None if the variable cannot be found.
-        """
-        ota_zip = ZipFile(self.get_ota_package(), 'r')
-        if OTA_PACKAGE_METADATA_PATH in ota_zip.namelist():
-            with ota_zip.open(OTA_PACKAGE_METADATA_PATH) as metadata:
-                timestamp_line = requested_field.encode('utf-8')
-                timestamp_offset = len(timestamp_line) + 1
-
-                for line in metadata.readlines():
-                    if line.startswith(timestamp_line):
-                        return line[timestamp_offset:].decode('utf-8').strip()
-        return None
-
-    def validate_update(self):
-        """Raises an error if updating to the next build is not valid.
-
-        Raises:
-            InvalidOtaUpdateError if the ota version is not valid, or cannot be
-                validated.
-        """
-        # The timestamp the current device build was created at.
-        cur_img_timestamp = self.android_device.adb.getprop('ro.build.date.utc')
-        ota_img_timestamp = self.get_ota_package_metadata('post-timestamp')
-
-        if ota_img_timestamp is None:
-            raise InvalidOtaUpdateError('Unable to find the timestamp '
-                                        'for the OTA build.')
-
-        try:
-            if int(ota_img_timestamp) <= int(cur_img_timestamp):
-                cur_fingerprint = self.android_device.adb.getprop(
-                    'ro.bootimage.build.fingerprint')
-                ota_fingerprint = self.get_post_build_id()
-                raise InvalidOtaUpdateError(
-                    'The OTA image comes from an earlier build than the '
-                    'source build. Current build: Time: %s -- %s, '
-                    'OTA build: Time: %s -- %s' %
-                    (cur_img_timestamp, cur_fingerprint,
-                     ota_img_timestamp, ota_fingerprint))
-        except ValueError:
-            raise InvalidOtaUpdateError(
-                'Unable to parse timestamps. Current timestamp: %s, OTA '
-                'timestamp: %s' % (ota_img_timestamp, cur_img_timestamp))
-
-    def get_post_build_id(self):
-        """Returns the post-build ID found within the OTA package metadata.
-
-        Raises:
-            InvalidOtaUpdateError if the post-build ID cannot be found.
-        """
-        return self.get_ota_package_metadata('post-build')
-
-    def can_update(self):
-        """Whether or not an update package is available for the device."""
-        return NotImplementedError()
-
-    def get_ota_package(self):
-        raise NotImplementedError()
-
-    def get_sl4a_apk(self):
-        raise NotImplementedError()
-
-
-class SingleUseOtaRunner(OtaRunner):
-    """A single use OtaRunner.
-
-    SingleUseOtaRunners can only be ran once. If a user attempts to run it more
-    than once, an error will be thrown. Users can avoid the error by checking
-    can_update() before calling update().
-    """
-
-    def __init__(self, ota_tool, android_device, ota_package, sl4a_apk):
-        super(SingleUseOtaRunner, self).__init__(ota_tool, android_device)
-        self._ota_package = ota_package
-        self._sl4a_apk = sl4a_apk
-        self._called = False
-
-    def can_update(self):
-        return not self._called
-
-    def update(self):
-        """Starts the update process."""
-        if not self.can_update():
-            raise OtaError('A SingleUseOtaTool instance cannot update a device '
-                           'multiple times.')
-        self._called = True
-        self._update()
-
-    def get_ota_package(self):
-        return self._ota_package
-
-    def get_sl4a_apk(self):
-        return self._sl4a_apk
-
-
-class MultiUseOtaRunner(OtaRunner):
-    """A multiple use OtaRunner.
-
-    MultiUseOtaRunner can only be ran for as many times as there have been
-    packages provided to them. If a user attempts to run it more than the number
-    of provided packages, an error will be thrown. Users can avoid the error by
-    checking can_update() before calling update().
-    """
-
-    def __init__(self, ota_tool, android_device, ota_packages, sl4a_apks):
-        super(MultiUseOtaRunner, self).__init__(ota_tool, android_device)
-        self._ota_packages = ota_packages
-        self._sl4a_apks = sl4a_apks
-        self.current_update_number = 0
-
-    def can_update(self):
-        return not self.current_update_number == len(self._ota_packages)
-
-    def update(self):
-        """Starts the update process."""
-        if not self.can_update():
-            raise OtaError('This MultiUseOtaRunner has already updated all '
-                           'given packages onto the phone.')
-        self._update()
-        self.current_update_number += 1
-
-    def get_ota_package(self):
-        return self._ota_packages[self.current_update_number]
-
-    def get_sl4a_apk(self):
-        return self._sl4a_apks[self.current_update_number]
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py b/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
deleted file mode 100644
index 311b045..0000000
--- a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
+++ /dev/null
@@ -1,204 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from antlion.config_parser import ActsConfigError
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import ota_tool_factory
-from antlion.libs.ota.ota_tools import adb_sideload_ota_tool
-
-_bound_devices = {}
-
-DEFAULT_OTA_TOOL = adb_sideload_ota_tool.AdbSideloadOtaTool.__name__
-DEFAULT_OTA_COMMAND = 'adb'
-
-
-def create_all_from_configs(config, android_devices):
-    """Creates a new OtaTool for each given AndroidDevice.
-
-    After an OtaTool is assigned to a device, another OtaTool cannot be created
-    for that device. This will prevent OTA Update tests that accidentally flash
-    the same build onto a device more than once.
-
-    Args:
-        config: the ACTS config user_params.
-        android_devices: The devices to run an OTA Update on.
-
-    Returns:
-        A list of OtaRunners responsible for updating the given devices. The
-        indexes match the indexes of the corresponding AndroidDevice in
-        android_devices.
-    """
-    return [create_from_configs(config, ad) for ad in android_devices]
-
-
-def create_from_configs(config, android_device):
-    """Creates a new OtaTool for the given AndroidDevice.
-
-    After an OtaTool is assigned to a device, another OtaTool cannot be created
-    for that device. This will prevent OTA Update tests that accidentally flash
-    the same build onto a device more than once.
-
-    Args:
-        config: the ACTS config user_params.
-        android_device: The device to run the OTA Update on.
-
-    Returns:
-        An OtaRunner responsible for updating the given device.
-    """
-    # Default to adb sideload
-    try:
-        ota_tool_class_name = get_ota_value_from_config(
-            config, 'ota_tool', android_device)
-    except ActsConfigError:
-        ota_tool_class_name = DEFAULT_OTA_TOOL
-
-    if ota_tool_class_name not in config:
-        if ota_tool_class_name is not DEFAULT_OTA_TOOL:
-            raise ActsConfigError(
-                'If the ota_tool is overloaded, the path to the tool must be '
-                'added to the ACTS config file under {"OtaToolName": '
-                '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.' %
-                ota_tool_class_name)
-        else:
-            command = DEFAULT_OTA_COMMAND
-    else:
-        command = config[ota_tool_class_name]
-        if type(command) is list:
-            # If file came as a list in the config.
-            if len(command) == 1:
-                command = command[0]
-            else:
-                raise ActsConfigError(
-                    'Config value for "%s" must be either a string or a list '
-                    'of exactly one element' % ota_tool_class_name)
-
-    ota_package = get_ota_value_from_config(config, 'ota_package',
-                                            android_device)
-    ota_sl4a = get_ota_value_from_config(config, 'ota_sl4a', android_device)
-    if type(ota_sl4a) != type(ota_package):
-        raise ActsConfigError(
-            'The ota_package and ota_sl4a must either both be strings, or '
-            'both be lists. Device with serial "%s" has mismatched types.' %
-            android_device.serial)
-    return create(ota_package, ota_sl4a, android_device, ota_tool_class_name,
-                  command)
-
-
-def create(ota_package,
-           ota_sl4a,
-           android_device,
-           ota_tool_class_name=DEFAULT_OTA_TOOL,
-           command=DEFAULT_OTA_COMMAND,
-           use_cached_runners=True):
-    """
-    Args:
-        ota_package: A string or list of strings corresponding to the
-            update.zip package location(s) for running an OTA update.
-        ota_sl4a: A string or list of strings corresponding to the
-            sl4a.apk package location(s) for running an OTA update.
-        ota_tool_class_name: The class name for the desired ota_tool
-        command: The command line tool name for the updater
-        android_device: The AndroidDevice to run the OTA Update on.
-        use_cached_runners: Whether or not to use runners cached by previous
-            create calls.
-
-    Returns:
-        An OtaRunner with the given properties from the arguments.
-    """
-    ota_tool = ota_tool_factory.create(ota_tool_class_name, command)
-    return create_from_package(ota_package, ota_sl4a, android_device, ota_tool,
-                               use_cached_runners)
-
-
-def create_from_package(ota_package,
-                        ota_sl4a,
-                        android_device,
-                        ota_tool,
-                        use_cached_runners=True):
-    """
-    Args:
-        ota_package: A string or list of strings corresponding to the
-            update.zip package location(s) for running an OTA update.
-        ota_sl4a: A string or list of strings corresponding to the
-            sl4a.apk package location(s) for running an OTA update.
-        ota_tool: The OtaTool to be paired with the returned OtaRunner
-        android_device: The AndroidDevice to run the OTA Update on.
-        use_cached_runners: Whether or not to use runners cached by previous
-            create calls.
-
-    Returns:
-        An OtaRunner with the given properties from the arguments.
-    """
-    if android_device in _bound_devices and use_cached_runners:
-        logging.warning('Android device %s has already been assigned an '
-                        'OtaRunner. Returning previously created runner.')
-        return _bound_devices[android_device]
-
-    if type(ota_package) != type(ota_sl4a):
-        raise TypeError(
-            'The ota_package and ota_sl4a must either both be strings, or '
-            'both be lists. Device with serial "%s" has requested mismatched '
-            'types.' % android_device.serial)
-
-    if type(ota_package) is str:
-        runner = ota_runner.SingleUseOtaRunner(ota_tool, android_device,
-                                               ota_package, ota_sl4a)
-    elif type(ota_package) is list:
-        runner = ota_runner.MultiUseOtaRunner(ota_tool, android_device,
-                                              ota_package, ota_sl4a)
-    else:
-        raise TypeError('The "ota_package" value in the acts config must be '
-                        'either a list or a string.')
-
-    _bound_devices[android_device] = runner
-    return runner
-
-
-def get_ota_value_from_config(config, key, android_device):
-    """Returns a key for the given AndroidDevice.
-
-    Args:
-        config: The ACTS config
-        key: The base key desired (ota_tool, ota_sl4a, or ota_package)
-        android_device: An AndroidDevice
-
-    Returns: The value at the specified key.
-    Throws: ActsConfigError if the value cannot be determined from the config.
-    """
-    suffix = ''
-    if 'ota_map' in config:
-        if android_device.serial in config['ota_map']:
-            suffix = '_%s' % config['ota_map'][android_device.serial]
-
-    ota_package_key = '%s%s' % (key, suffix)
-    if ota_package_key not in config:
-        if suffix != '':
-            raise ActsConfigError(
-                'Asked for an OTA Update without specifying a required value. '
-                '"ota_map" has entry {"%s": "%s"}, but there is no '
-                'corresponding entry {"%s":"/path/to/file"} found within the '
-                'ACTS config.' % (android_device.serial, suffix[1:],
-                                  ota_package_key))
-        else:
-            raise ActsConfigError(
-                'Asked for an OTA Update without specifying a required value. '
-                '"ota_map" does not exist or have a key for serial "%s", and '
-                'the default value entry "%s" cannot be found within the ACTS '
-                'config.' % (android_device.serial, ota_package_key))
-
-    return config[ota_package_key]
diff --git a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py b/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
deleted file mode 100644
index 5b45241..0000000
--- a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from antlion.libs.ota.ota_tools.ota_tool import OtaTool
-
-# OTA Packages can be upwards of 1 GB. This may take some time to transfer over
-# USB 2.0.
-PUSH_TIMEOUT = 10 * 60
-
-
-class AdbSideloadOtaTool(OtaTool):
-    """Updates an AndroidDevice using adb sideload."""
-
-    def __init__(self, ignored_command):
-        # "command" is ignored. The ACTS adb version is used to prevent
-        # differing adb versions from constantly killing adbd.
-        super(AdbSideloadOtaTool, self).__init__(ignored_command)
-
-    def update(self, ota_runner):
-        logging.info('Rooting adb')
-        ota_runner.android_device.root_adb()
-        logging.info('Rebooting to sideload')
-        ota_runner.android_device.adb.reboot('sideload')
-        ota_runner.android_device.adb.wait_for_sideload()
-        logging.info('Sideloading ota package')
-        package_path = ota_runner.get_ota_package()
-        logging.info('Running adb sideload with package "%s"' % package_path)
-        ota_runner.android_device.adb.sideload(
-            package_path, timeout=PUSH_TIMEOUT)
-        logging.info('Sideload complete. Waiting for device to come back up.')
-        ota_runner.android_device.adb.wait_for_recovery()
-        ota_runner.android_device.reboot(stop_at_lock_screen=True)
-        logging.info('Device is up. Update complete.')
diff --git a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py b/src/antlion/libs/ota/ota_tools/ota_tool_factory.py
deleted file mode 100644
index c889ddc..0000000
--- a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.libs.ota.ota_tools.adb_sideload_ota_tool import AdbSideloadOtaTool
-from antlion.libs.ota.ota_tools.update_device_ota_tool import UpdateDeviceOtaTool
-
-_CONSTRUCTORS = {
-    AdbSideloadOtaTool.__name__: lambda command: AdbSideloadOtaTool(command),
-    UpdateDeviceOtaTool.__name__: lambda command: UpdateDeviceOtaTool(command),
-}
-_constructed_tools = {}
-
-
-def create(ota_tool_class, command):
-    """Returns an OtaTool with the given class name.
-
-    If the tool has already been created, the existing instance will be
-    returned.
-
-    Args:
-        ota_tool_class: the class/type of the tool you wish to use.
-        command: the command line tool being used.
-
-    Returns:
-        An OtaTool.
-    """
-    if ota_tool_class in _constructed_tools:
-        return _constructed_tools[ota_tool_class]
-
-    if ota_tool_class not in _CONSTRUCTORS:
-        raise KeyError('Given Ota Tool class name does not match a known '
-                       'name. Found "%s". Expected any of %s. If this tool '
-                       'does exist, add it to the _CONSTRUCTORS dict in this '
-                       'module.' % (ota_tool_class, _CONSTRUCTORS.keys()))
-
-    new_update_tool = _CONSTRUCTORS[ota_tool_class](command)
-    _constructed_tools[ota_tool_class] = new_update_tool
-
-    return new_update_tool
diff --git a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py b/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
deleted file mode 100644
index 03d10c5..0000000
--- a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import shutil
-import tempfile
-
-from antlion.libs.ota.ota_tools import ota_tool
-from antlion.libs.proc import job
-from antlion import utils
-
-# OTA Packages can be upwards of 1 GB. This may take some time to transfer over
-# USB 2.0. A/B devices must also complete the update in the background.
-UPDATE_TIMEOUT = 60 * 60
-UPDATE_LOCATION = '/data/ota_package/update.zip'
-
-
-class UpdateDeviceOtaTool(ota_tool.OtaTool):
-    """Runs an OTA Update with system/update_engine/scripts/update_device.py."""
-    def __init__(self, command):
-        super(UpdateDeviceOtaTool, self).__init__(command)
-
-        self.unzip_path = tempfile.mkdtemp()
-        utils.unzip_maintain_permissions(self.command, self.unzip_path)
-
-        self.command = os.path.join(self.unzip_path, 'update_device.py')
-
-    def update(self, ota_runner):
-        logging.info('Forcing adb to be in root mode.')
-        ota_runner.android_device.root_adb()
-        update_command = 'python3 %s -s %s %s' % (
-            self.command, ota_runner.serial, ota_runner.get_ota_package())
-        logging.info('Running %s' % update_command)
-        result = job.run(update_command, timeout=UPDATE_TIMEOUT)
-        logging.info('Output: %s' % result.stdout)
-
-        logging.info('Rebooting device for update to go live.')
-        ota_runner.android_device.reboot(stop_at_lock_screen=True)
-        logging.info('Reboot sent.')
-
-    def __del__(self):
-        """Delete the unzipped update_device folder before ACTS exits."""
-        shutil.rmtree(self.unzip_path)
diff --git a/src/antlion/libs/ota/ota_updater.py b/src/antlion/libs/ota/ota_updater.py
deleted file mode 100644
index 1e434b3..0000000
--- a/src/antlion/libs/ota/ota_updater.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-from antlion.libs.ota.ota_runners import ota_runner_factory
-
-"""Maps AndroidDevices to OtaRunners."""
-ota_runners = {}
-
-
-def initialize(user_params, android_devices):
-    """Initialize OtaRunners for each device.
-
-    Args:
-        user_params: The user_params from the ACTS config.
-        android_devices: The android_devices in the test.
-    """
-    for ad in android_devices:
-        ota_runners[ad] = ota_runner_factory.create_from_configs(
-            user_params, ad)
-
-
-def _check_initialization(android_device):
-    """Check if a given device was initialized."""
-    if android_device not in ota_runners:
-        raise KeyError('Android Device with serial "%s" has not been '
-                       'initialized for OTA Updates. Did you forget to call'
-                       'ota_updater.initialize()?' % android_device.serial)
-
-
-def update(android_device, ignore_update_errors=False):
-    """Update a given AndroidDevice.
-
-    Args:
-        android_device: The device to update
-        ignore_update_errors: Whether or not to ignore update errors such as
-           no more updates available for a given device. Default is false.
-    Throws:
-        OtaError if ignore_update_errors is false and the OtaRunner has run out
-        of packages to update the phone with.
-    """
-    _check_initialization(android_device)
-    ota_runners[android_device].validate_update()
-    try:
-        ota_runners[android_device].update()
-    except Exception as e:
-        if ignore_update_errors:
-            return
-        android_device.log.error(e)
-        android_device.take_bug_report('ota_update',
-                                       utils.get_current_epoch_time())
-        raise e
-
-
-def can_update(android_device):
-    """Whether or not a device can be updated."""
-    _check_initialization(android_device)
-    return ota_runners[android_device].can_update()
diff --git a/src/antlion/libs/proc/job.py b/src/antlion/libs/proc/job.py
deleted file mode 100644
index b17d904..0000000
--- a/src/antlion/libs/proc/job.py
+++ /dev/null
@@ -1,197 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import sys
-import time
-
-if os.name == 'posix' and sys.version_info[0] < 3:
-    import subprocess32 as subprocess
-else:
-    import subprocess
-
-
-class Error(Exception):
-    """Indicates that a command failed, is fatal to the test unless caught."""
-
-    def __init__(self, result):
-        super(Error, self).__init__(result)
-        self.result: Result = result
-
-
-class TimeoutError(Error):
-    """Thrown when a BackgroundJob times out on wait."""
-
-
-class Result(object):
-    """Command execution result.
-
-    Contains information on subprocess execution after it has exited.
-
-    Attributes:
-        command: An array containing the command and all arguments that
-                 was executed.
-        exit_status: Integer exit code of the process.
-        stdout_raw: The raw bytes output from standard out.
-        stderr_raw: The raw bytes output from standard error
-        duration: How long the process ran for.
-        did_timeout: True if the program timed out and was killed.
-    """
-
-    @property
-    def stdout(self):
-        """String representation of standard output."""
-        if not self._stdout_str:
-            self._stdout_str = self._raw_stdout.decode(encoding=self._encoding,
-                                                       errors='replace')
-            self._stdout_str = self._stdout_str.strip()
-        return self._stdout_str
-
-    @property
-    def stderr(self):
-        """String representation of standard error."""
-        if not self._stderr_str:
-            self._stderr_str = self._raw_stderr.decode(encoding=self._encoding,
-                                                       errors='replace')
-            self._stderr_str = self._stderr_str.strip()
-        return self._stderr_str
-
-    def __init__(self,
-                 command=[],
-                 stdout=bytes(),
-                 stderr=bytes(),
-                 exit_status=None,
-                 duration=0,
-                 did_timeout=False,
-                 encoding='utf-8'):
-        """
-        Args:
-            command: The command that was run. This will be a list containing
-                     the executed command and all args.
-            stdout: The raw bytes that standard output gave.
-            stderr: The raw bytes that standard error gave.
-            exit_status: The exit status of the command.
-            duration: How long the command ran.
-            did_timeout: True if the command timed out.
-            encoding: The encoding standard that the program uses.
-        """
-        self.command = command
-        self.exit_status = exit_status
-        self._raw_stdout = stdout
-        self._raw_stderr = stderr
-        self._stdout_str = None
-        self._stderr_str = None
-        self._encoding = encoding
-        self.duration = duration
-        self.did_timeout = did_timeout
-
-    def __repr__(self):
-        return ('job.Result(command=%r, stdout=%r, stderr=%r, exit_status=%r, '
-                'duration=%r, did_timeout=%r, encoding=%r)') % (
-                    self.command, self._raw_stdout, self._raw_stderr,
-                    self.exit_status, self.duration, self.did_timeout,
-                    self._encoding)
-
-
-def run(command,
-        timeout=60,
-        ignore_status=False,
-        env=None,
-        io_encoding='utf-8'):
-    """Execute a command in a subproccess and return its output.
-
-    Commands can be either shell commands (given as strings) or the
-    path and arguments to an executable (given as a list).  This function
-    will block until the subprocess finishes or times out.
-
-    Args:
-        command: The command to execute. Can be either a string or a list.
-        timeout: number seconds to wait for command to finish.
-        ignore_status: bool True to ignore the exit code of the remote
-                       subprocess.  Note that if you do ignore status codes,
-                       you should handle non-zero exit codes explicitly.
-        env: dict enviroment variables to setup on the remote host.
-        io_encoding: str unicode encoding of command output.
-
-    Returns:
-        A job.Result containing the results of the ssh command.
-
-    Raises:
-        job.TimeoutError: When the remote command took to long to execute.
-        Error: When the command had an error executing and ignore_status==False.
-    """
-    start_time = time.time()
-    proc = subprocess.Popen(command,
-                            env=env,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            shell=not isinstance(command, list))
-    # Wait on the process terminating
-    timed_out = False
-    out = bytes()
-    err = bytes()
-    try:
-        (out, err) = proc.communicate(timeout=timeout)
-    except subprocess.TimeoutExpired:
-        timed_out = True
-        proc.kill()
-        proc.wait()
-
-    result = Result(command=command,
-                    stdout=out,
-                    stderr=err,
-                    exit_status=proc.returncode,
-                    duration=time.time() - start_time,
-                    encoding=io_encoding,
-                    did_timeout=timed_out)
-    logging.debug(result)
-
-    if timed_out:
-        logging.error("Command %s with %s timeout setting timed out", command,
-                      timeout)
-        raise TimeoutError(result)
-
-    if not ignore_status and proc.returncode != 0:
-        raise Error(result)
-
-    return result
-
-
-def run_async(command, env=None):
-    """Execute a command in a subproccess asynchronously.
-
-    It is the callers responsibility to kill/wait on the resulting
-    subprocess.Popen object.
-
-    Commands can be either shell commands (given as strings) or the
-    path and arguments to an executable (given as a list).  This function
-    will not block.
-
-    Args:
-        command: The command to execute. Can be either a string or a list.
-        env: dict enviroment variables to setup on the remote host.
-
-    Returns:
-        A subprocess.Popen object representing the created subprocess.
-
-    """
-    proc = subprocess.Popen(command,
-                            env=env,
-                            preexec_fn=os.setpgrp,
-                            shell=not isinstance(command, list),
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.STDOUT)
-    logging.debug("command %s started with pid %s", command, proc.pid)
-    return proc
diff --git a/src/antlion/libs/proc/process.py b/src/antlion/libs/proc/process.py
deleted file mode 100644
index 906be73..0000000
--- a/src/antlion/libs/proc/process.py
+++ /dev/null
@@ -1,279 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import shlex
-import signal
-import subprocess
-import sys
-import time
-from threading import Thread
-
-_on_windows = sys.platform == 'win32'
-
-
-class ProcessError(Exception):
-    """Raised when invalid operations are run on a Process."""
-
-
-class Process(object):
-    """A Process object used to run various commands.
-
-    Attributes:
-        _command: The initial command to run.
-        _subprocess_kwargs: The kwargs to send to Popen for more control over
-                            execution.
-        _process: The subprocess.Popen object currently executing a process.
-        _listening_thread: The thread that is listening for the process to stop.
-        _redirection_thread: The thread that is redirecting process output.
-        _on_output_callback: The callback to call when output is received.
-        _on_terminate_callback: The callback to call when the process terminates
-                                without stop() being called first.
-        _started: Whether or not start() was called.
-        _stopped: Whether or not stop() was called.
-    """
-
-    def __init__(self, command, **kwargs):
-        """Creates a Process object.
-
-        Note that this constructor does not begin the process. To start the
-        process, use Process.start().
-        """
-        # Split command string into list if shell=True is not specified
-        self._use_shell = kwargs.get('shell', False)
-        if not self._use_shell and isinstance(command, str):
-            command = shlex.split(command)
-        self._command = command
-        self._subprocess_kwargs = kwargs
-        if _on_windows:
-            self._subprocess_kwargs['creationflags'] = (
-                subprocess.CREATE_NEW_PROCESS_GROUP)
-        else:
-            self._subprocess_kwargs['start_new_session'] = True
-        self._process = None
-
-        self._listening_thread = None
-        self._redirection_thread = None
-        self._on_output_callback = lambda *args, **kw: None
-        self._binary_output = False
-        self._on_terminate_callback = lambda *args, **kw: ''
-
-        self._started = False
-        self._stopped = False
-
-    def set_on_output_callback(self, on_output_callback, binary=False):
-        """Sets the on_output_callback function.
-
-        Args:
-            on_output_callback: The function to be called when output is sent to
-                the output. The output callback has the following signature:
-
-                >>> def on_output_callback(output_line):
-                >>>     return None
-
-            binary: If True, read the process output as raw binary.
-        Returns:
-            self
-        """
-        self._on_output_callback = on_output_callback
-        self._binary_output = binary
-        return self
-
-    def set_on_terminate_callback(self, on_terminate_callback):
-        """Sets the on_self_terminate callback function.
-
-        Args:
-            on_terminate_callback: The function to be called when the process
-                has terminated on its own. The callback has the following
-                signature:
-
-                >>> def on_self_terminate_callback(popen_process):
-                >>>     return 'command to run' or None
-
-                If a string is returned, the string returned will be the command
-                line used to run the command again. If None is returned, the
-                process will end without restarting.
-
-        Returns:
-            self
-        """
-        self._on_terminate_callback = on_terminate_callback
-        return self
-
-    def start(self):
-        """Starts the process's execution."""
-        if self._started:
-            raise ProcessError('Process has already started.')
-        self._started = True
-        self._process = None
-
-        self._listening_thread = Thread(target=self._exec_loop)
-        self._listening_thread.start()
-
-        time_up_at = time.time() + 1
-
-        while self._process is None:
-            if time.time() > time_up_at:
-                raise OSError('Unable to open process!')
-
-        self._stopped = False
-
-    @staticmethod
-    def _get_timeout_left(timeout, start_time):
-        return max(.1, timeout - (time.time() - start_time))
-
-    def is_running(self):
-        """Checks that the underlying Popen process is still running
-
-        Returns:
-            True if the process is running.
-        """
-        return self._process is not None and self._process.poll() is None
-
-    def _join_threads(self):
-        """Waits for the threads associated with the process to terminate."""
-        if self._listening_thread is not None:
-            self._listening_thread.join()
-            self._listening_thread = None
-
-        if self._redirection_thread is not None:
-            self._redirection_thread.join()
-            self._redirection_thread = None
-
-    def _kill_process(self):
-        """Kills the underlying process/process group. Implementation is
-        platform-dependent."""
-        if _on_windows:
-            subprocess.check_call('taskkill /F /T /PID %s' % self._process.pid)
-        else:
-            self.signal(signal.SIGKILL)
-
-    def wait(self, kill_timeout=60.0):
-        """Waits for the process to finish execution.
-
-        If the process has reached the kill_timeout, the process will be killed
-        instead.
-
-        Note: the on_self_terminate callback will NOT be called when calling
-        this function.
-
-        Args:
-            kill_timeout: The amount of time to wait until killing the process.
-        """
-        if self._stopped:
-            raise ProcessError('Process is already being stopped.')
-        self._stopped = True
-
-        try:
-            self._process.wait(kill_timeout)
-        except subprocess.TimeoutExpired:
-            self._kill_process()
-        finally:
-            self._join_threads()
-            self._started = False
-
-    def signal(self, sig):
-        """Sends a signal to the process.
-
-        Args:
-            sig: The signal to be sent.
-        """
-        if _on_windows:
-            raise ProcessError('Unable to call Process.signal on windows.')
-
-        pgid = os.getpgid(self._process.pid)
-        os.killpg(pgid, sig)
-
-    def stop(self):
-        """Stops the process.
-
-        This command is effectively equivalent to kill, but gives time to clean
-        up any related work on the process, such as output redirection.
-
-        Note: the on_self_terminate callback will NOT be called when calling
-        this function.
-        """
-        self.wait(0)
-
-    def _redirect_output(self):
-        """Redirects the output from the command into the on_output_callback."""
-        if self._binary_output:
-            while True:
-                data = self._process.stdout.read(1024)
-
-                if not data:
-                    return
-                else:
-                    self._on_output_callback(data)
-        else:
-            while True:
-                line = self._process.stdout.readline().decode('utf-8',
-                                                              errors='replace')
-
-                if not line:
-                    return
-                else:
-                    # Output the line without trailing \n and whitespace.
-                    self._on_output_callback(line.rstrip())
-
-    @staticmethod
-    def __start_process(command, **kwargs):
-        """A convenient wrapper function for starting the process."""
-        acts_logger = logging.getLogger()
-        acts_logger.debug(
-            'Starting command "%s" with kwargs %s', command, kwargs)
-        return subprocess.Popen(command, **kwargs)
-
-    def _exec_loop(self):
-        """Executes Popen in a loop.
-
-        When Popen terminates without stop() being called,
-        self._on_terminate_callback() will be called. The returned value from
-        _on_terminate_callback will then be used to determine if the loop should
-        continue and start up the process again. See set_on_terminate_callback()
-        for more information.
-        """
-        command = self._command
-        while True:
-            self._process = self.__start_process(command,
-                                                 stdout=subprocess.PIPE,
-                                                 stderr=subprocess.STDOUT,
-                                                 bufsize=1,
-                                                 **self._subprocess_kwargs)
-            self._redirection_thread = Thread(target=self._redirect_output)
-            self._redirection_thread.start()
-            self._process.wait()
-
-            if self._stopped:
-                logging.debug('The process for command %s was stopped.',
-                              command)
-                break
-            else:
-                logging.debug('The process for command %s terminated.',
-                              command)
-                # Wait for all output to be processed before sending
-                # _on_terminate_callback()
-                self._redirection_thread.join()
-                logging.debug('Beginning on_terminate_callback for %s.',
-                              command)
-                retry_value = self._on_terminate_callback(self._process)
-                if retry_value:
-                    if not self._use_shell and isinstance(retry_value, str):
-                        retry_value = shlex.split(retry_value)
-                    command = retry_value
-                else:
-                    break
diff --git a/src/antlion/libs/test_binding/__init__.py b/src/antlion/libs/test_binding/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/libs/test_binding/__init__.py
+++ /dev/null
diff --git a/src/antlion/libs/test_binding/all_tests_decorator.py b/src/antlion/libs/test_binding/all_tests_decorator.py
deleted file mode 100644
index 906ac6b..0000000
--- a/src/antlion/libs/test_binding/all_tests_decorator.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import inspect
-
-
-def for_all_tests(decorator):
-    """Applies a decorator to all tests within a test class.
-
-    Args:
-        decorator: The decorator to apply.
-
-    Returns:
-        The class decorator function.
-    """
-
-    def _decorate(decorated):
-        test_names = []
-        for name, value in inspect.getmembers(decorated,
-                                              predicate=inspect.isfunction):
-            if name.startswith("test_"):
-                test_names.append(name)
-
-        for test_name in test_names:
-            setattr(decorated, test_name,
-                    decorator(getattr(decorated, test_name)))
-
-        return decorated
-
-    return _decorate
diff --git a/src/antlion/libs/test_binding/binding.py b/src/antlion/libs/test_binding/binding.py
deleted file mode 100644
index df6387b..0000000
--- a/src/antlion/libs/test_binding/binding.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from antlion import signals
-
-
-class Binding(object):
-    """Creates a binding for a test method with a decorator.
-
-    Python stores all functions as a variable bound to an object. When that
-    object is called it will execute the function logic. It is possible to
-    create a wrapper object around the real function object to perform custom
-    logic and store additional meta-data.
-
-    This object acts as a wrapper for test functions. It allows binding
-    additional test logic to a test.
-    """
-
-    def __init__(self, inner, arg_modifier=None, before=None, after=None,
-                 signal_modifier=None, instance_args=None):
-        """
-        Args:
-            inner: The inner method or other binding being bound to.
-            arg_modifier: A function of
-                (*args, **kwargs) => args kwargs that will modify the
-                arguments to pass to the bound target
-            before: A function of (*args, **kwargs) => None that will
-                be called before the bound target.
-            after: A function of (result, *args, **kwargs) => None
-                that will be called after the bound target.
-            signal_modifier:  A function of
-                (signal, *args, **kwargs) => signal that will be
-                called before the signal is sent to modify the signal to send.
-        """
-        self.instance_args = instance_args or []
-        self.arg_modifier = arg_modifier
-        self.signal_modifier = signal_modifier
-        self.after = after
-        self.before = before
-        self.inner = inner
-        self.__name__ = inner.__name__
-
-    def __get__(self, instance, owner):
-        """Called when a new isntance of the test class is created.
-
-        When a new instance of a class is created all method bindings must
-        be bound as instance bindings. This transforms the function call
-        signature to be func(self, *args, **kwargs) to func(*args, **kwargs).
-        The newly created binding handles inserting the self variable so the
-        caller does not have to.
-
-        This binding needs to do similar logic by creating a new binding for
-        the instance that memorizes the instance as a passed in arg.
-        """
-        return Binding(self.inner,
-                       arg_modifier=self.arg_modifier,
-                       before=self.before,
-                       after=self.after,
-                       signal_modifier=self.signal_modifier,
-                       instance_args=[instance] + self.instance_args)
-
-    def __call__(self, *args, **kwargs):
-        """Called when the test is executed."""
-        full_args = self.instance_args + list(args)
-
-        try:
-            if self.arg_modifier:
-                full_args, kwargs = self.arg_modifier(self.inner, *full_args,
-                                                      **kwargs)
-
-            if self.before:
-                self.before(self.inner, *full_args, **kwargs)
-
-            result = 'UNKNOWN ERROR'
-            try:
-                result = self.inner(*full_args, **kwargs)
-            finally:
-                if self.after:
-                    self.after(self.inner, result, *full_args, **kwargs)
-
-            if result or result is None:
-                new_signal = signals.TestPass('')
-            else:
-                new_signal = signals.TestFailure('')
-        except signals.TestSignal as signal:
-            new_signal = signal
-
-        if self.signal_modifier:
-            new_signal = self.signal_modifier(self.inner, new_signal,
-                                              *full_args,
-                                              **kwargs)
-
-        raise new_signal
-
-    def __getattr__(self, item):
-        """A simple pass through for any variable we do not known about."""
-        return getattr(self.inner, item)
diff --git a/src/antlion/libs/uicd/__init__.py b/src/antlion/libs/uicd/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/libs/uicd/__init__.py
+++ /dev/null
diff --git a/src/antlion/libs/uicd/uicd_cli.py b/src/antlion/libs/uicd/uicd_cli.py
deleted file mode 100644
index 3ca9bf4..0000000
--- a/src/antlion/libs/uicd/uicd_cli.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import shutil
-import tempfile
-
-from antlion import logger
-from antlion.libs.proc import job
-
-_UICD_JAR_CMD = 'java -jar %s/uicd-commandline.jar'
-_UNZIP_CMD = 'tar -xzf %s -C %s'
-
-
-class UicdError(Exception):
-    """Raised for exceptions that occur in UIConductor-related tasks"""
-
-
-class UicdCli(object):
-    """Provides an interface for running UIConductor (Uicd) workflows under its
-    CLI.
-
-    This class does not handle workflow creation, which requires the Uicd
-    frontend.
-    """
-    def __init__(self, uicd_zip, workflow_paths, log_path=None):
-        """Creates a UicdCli object. Extracts the required uicd-cli binaries.
-
-        Args:
-            uicd_zip: The path to uicd_cli.tar.gz
-            workflow_paths: List of paths to uicd workflows and/or directories
-                containing them.
-            log_path: Directory for storing logs generated by Uicd.
-        """
-        # This is done so unit tests can cache the mocked shutil.rmtree value
-        # and call it on __del__ when the patch has been lifted.
-        self._rm_tmpdir = shutil.rmtree
-
-        self._uicd_zip = uicd_zip[0] if isinstance(uicd_zip, list) else uicd_zip
-        self._uicd_path = tempfile.mkdtemp(prefix='uicd')
-        self._log_path = log_path
-        if self._log_path:
-            os.makedirs(self._log_path, exist_ok=True)
-        self._log = logger.create_tagged_trace_logger(tag='Uicd')
-        self._set_workflows(workflow_paths)
-        self._setup_cli()
-
-    def _set_workflows(self, workflow_paths):
-        """Set up a dictionary that maps workflow name to its file location.
-        This allows the user to specify workflows to run without having to
-        provide the full path.
-
-        Args:
-            workflow_paths: List of paths to uicd workflows and/or directories
-                containing them.
-
-        Raises:
-            UicdError if two or more Uicd workflows share the same file name
-        """
-        if isinstance(workflow_paths, str):
-            workflow_paths = [workflow_paths]
-
-        # get a list of workflow files from specified paths
-        def _raise(e):
-            raise e
-        workflow_files = []
-        for path in workflow_paths:
-            if os.path.isfile(path):
-                workflow_files.append(path)
-            else:
-                for (root, _, files) in os.walk(path, onerror=_raise):
-                    for file in files:
-                        workflow_files.append(os.path.join(root, file))
-
-        # populate the dictionary
-        self._workflows = {}
-        for path in workflow_files:
-            workflow_name = os.path.basename(path)
-            if workflow_name in self._workflows.keys():
-                raise UicdError('Uicd workflows may not share the same name.')
-            self._workflows[workflow_name] = path
-
-    def _setup_cli(self):
-        """Extract tar from uicd_zip and place unzipped files in uicd_path.
-
-        Raises:
-            Exception if the extraction fails.
-        """
-        self._log.debug('Extracting uicd-cli binaries from %s' % self._uicd_zip)
-        unzip_cmd = _UNZIP_CMD % (self._uicd_zip, self._uicd_path)
-        try:
-            job.run(unzip_cmd.split())
-        except job.Error:
-            self._log.exception('Failed to extract uicd-cli binaries.')
-            raise
-
-    def run(self, serial, workflows, timeout=120):
-        """Run specified workflows on the UIConductor CLI.
-
-        Args:
-            serial: Device serial
-            workflows: List or str of workflows to run.
-            timeout: Number seconds to wait for command to finish.
-        """
-        base_cmd = _UICD_JAR_CMD % self._uicd_path
-        if isinstance(workflows, str):
-            workflows = [workflows]
-        for workflow_name in workflows:
-            self._log.info('Running workflow "%s"' % workflow_name)
-            if workflow_name in self._workflows:
-                args = '-d %s -i %s' % (serial, self._workflows[workflow_name])
-            else:
-                self._log.error(
-                    'The workflow "%s" does not exist.' % workflow_name)
-                continue
-            if self._log_path:
-                args = '%s -o %s' % (args, self._log_path)
-            cmd = '%s %s' % (base_cmd, args)
-            try:
-                result = job.run(cmd.split(), timeout=timeout)
-            except job.Error:
-                self._log.exception(
-                    'Failed to run workflow "%s"' % workflow_name)
-                continue
-            if result.stdout:
-                stdout_split = result.stdout.splitlines()
-                if len(stdout_split) > 2:
-                    self._log.debug('Uicd logs stored at %s' % stdout_split[2])
-
-    def __del__(self):
-        """Delete the temp directory to Uicd CLI binaries upon ACTS exit."""
-        self._rm_tmpdir(self._uicd_path)
diff --git a/src/antlion/libs/utils/__init__.py b/src/antlion/libs/utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/libs/utils/__init__.py
+++ /dev/null
diff --git a/src/antlion/libs/utils/multithread.py b/src/antlion/libs/utils/multithread.py
deleted file mode 100644
index 31baaf7..0000000
--- a/src/antlion/libs/utils/multithread.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import concurrent.futures
-import logging
-
-def task_wrapper(task):
-    """Task wrapper for multithread_func
-
-    Args:
-        task[0]: function to be wrapped.
-        task[1]: function args.
-
-    Returns:
-        Return value of wrapped function call.
-    """
-    func = task[0]
-    params = task[1]
-    return func(*params)
-
-
-def run_multithread_func_async(log, task):
-    """Starts a multi-threaded function asynchronously.
-
-    Args:
-        log: log object.
-        task: a task to be executed in parallel.
-
-    Returns:
-        Future object representing the execution of the task.
-    """
-    executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
-    try:
-        future_object = executor.submit(task_wrapper, task)
-    except Exception as e:
-        log.error("Exception error %s", e)
-        raise
-    return future_object
-
-
-def run_multithread_func(log, tasks):
-    """Run multi-thread functions and return results.
-
-    Args:
-        log: log object.
-        tasks: a list of tasks to be executed in parallel.
-
-    Returns:
-        results for tasks.
-    """
-    MAX_NUMBER_OF_WORKERS = 10
-    number_of_workers = min(MAX_NUMBER_OF_WORKERS, len(tasks))
-    executor = concurrent.futures.ThreadPoolExecutor(
-        max_workers=number_of_workers)
-    if not log: log = logging
-    try:
-        results = list(executor.map(task_wrapper, tasks))
-    except Exception as e:
-        log.error("Exception error %s", e)
-        raise
-    executor.shutdown()
-    if log:
-        log.info("multithread_func %s result: %s",
-                 [task[0].__name__ for task in tasks], results)
-    return results
-
-
-def multithread_func(log, tasks):
-    """Multi-thread function wrapper.
-
-    Args:
-        log: log object.
-        tasks: tasks to be executed in parallel.
-
-    Returns:
-        True if all tasks return True.
-        False if any task return False.
-    """
-    results = run_multithread_func(log, tasks)
-    for r in results:
-        if not r:
-            return False
-    return True
-
-
-def multithread_func_and_check_results(log, tasks, expected_results):
-    """Multi-thread function wrapper.
-
-    Args:
-        log: log object.
-        tasks: tasks to be executed in parallel.
-        expected_results: check if the results from tasks match expected_results.
-
-    Returns:
-        True if expected_results are met.
-        False if expected_results are not met.
-    """
-    return_value = True
-    results = run_multithread_func(log, tasks)
-    log.info("multithread_func result: %s, expecting %s", results,
-             expected_results)
-    for task, result, expected_result in zip(tasks, results, expected_results):
-        if result != expected_result:
-            logging.info("Result for task %s is %s, expecting %s", task[0],
-                         result, expected_result)
-            return_value = False
-    return return_value
diff --git a/src/antlion/libs/utils/timer.py b/src/antlion/libs/utils/timer.py
deleted file mode 100644
index 2350aa9..0000000
--- a/src/antlion/libs/utils/timer.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""A simple timer class to keep record of the elapsed time."""
-
-import time
-
-
-class TimeRecorder(object):
-    """Main class to keep time records.
-
-    A timer record contains an ID, a start timestamp, and an optional stop
-    timestamps. The elapsed time calculated as stop - start.
-    If the stop timestamp is not set, current system time will be used.
-
-    Example usage:
-    >>> timer = TimeRecorder()
-    >>> # start a single timer, ID = 'lunch'
-    >>> timer.start_timer('lunch')
-    >>> # start two timers at the same time
-    >>> timer.start_timer(['salad', 'dessert'])
-    >>> # stop a single timer
-    >>> timer.stop_timer('salad')
-    >>> # get elapsed time of all timers
-    >>> timer.elapsed()
-    """
-
-    def __init__(self):
-        self.recorder = dict()
-
-    def start_timer(self, record_ids='Default', force=False):
-        """Start one or more timer.
-
-        Starts one or more timer at current system time with the record ID
-        specified in record_ids. Will overwrite/restart existing timer.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will start multiple timers
-                        at the same time.
-            force: Force update the timer's start time if the specified timer
-                   has already started. By default we won't update started timer
-                   again.
-
-        Returns:
-            Number of timer started.
-        """
-        if isinstance(record_ids, str):
-            record_ids = [record_ids]
-        start_time = time.time()
-        for rec in record_ids:
-            if force or rec not in self.recorder:
-                self.recorder[rec] = [start_time, None]
-        return len(record_ids)
-
-    def stop_timer(self, record_ids=None, force=False):
-        """Stop one or more timer.
-
-        Stops one or more timer at current system time.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will stop multiple timers at
-                        the same time. By default, it will stop all timers.
-            force: Force update the timer's stop time if the specified timer has
-                   already stopped. By default we won't update stopped timer
-                   again.
-
-        Returns:
-            Number of timer stopped.
-        """
-        # stop all record if id is not provided.
-        if record_ids is None:
-            record_ids = self.recorder.keys()
-        elif isinstance(record_ids, str):
-            record_ids = [record_ids]
-        stop_time = time.time()
-        num_rec = 0
-        for rec in record_ids:
-            if rec in self.recorder:
-                if force or self.recorder[rec][1] is None:
-                    self.recorder[rec][1] = stop_time
-                    num_rec += 1
-        return num_rec
-
-    def elapsed(self, record_ids=None):
-        """Return elapsed time in seconds.
-
-        For records with no stop time, will calculate based on the current
-        system time.
-
-        Args:
-            record_ids: timer record IDs. Can be a string or a list of strings.
-                        If the record ID is a list, will compute the elapsed
-                        time for all specified timers. Default value (None)
-                        calculates elapsed time for all existing timers.
-
-        Returns:
-            The elapsed time. If the record_ids is a string, will return the
-            time in seconds as float type. If the record_ids is a list or
-            default (None), will return a dict of the <record id, elapsed time>.
-        """
-        single_record = False
-        if record_ids is None:
-            record_ids = self.recorder.keys()
-        elif isinstance(record_ids, str):
-            record_ids = [record_ids]
-            single_record = True
-        results = dict()
-        curr_time = time.time()
-        for rec in record_ids:
-            if rec in self.recorder:
-                if self.recorder[rec][1] is not None:
-                    results[rec] = self.recorder[rec][1] - self.recorder[rec][0]
-                else:
-                    results[rec] = curr_time - self.recorder[rec][0]
-        if not results:  # no valid record found
-            return None
-        elif single_record and len(record_ids) == 1:
-            # only 1 record is requested, return results directly
-            return results[record_ids[0]]
-        else:
-            return results  # multiple records, return a dict.
-
-    def clear(self, record_ids=None):
-        """Clear existing time records."""
-        if record_ids is None:
-            self.recorder = dict()
-            return
-
-        if isinstance(record_ids, str):
-            record_ids = [record_ids]
-        for rec in record_ids:
-            if rec in self.recorder:
-                del self.recorder[rec]
diff --git a/src/antlion/libs/version_selector.py b/src/antlion/libs/version_selector.py
deleted file mode 100644
index 2e55eb5..0000000
--- a/src/antlion/libs/version_selector.py
+++ /dev/null
@@ -1,291 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import bisect
-from collections import namedtuple
-import inspect
-import numbers
-
-
-def _fully_qualified_name(func):
-    """Returns the fully-qualified name of a function.
-
-    Note: __qualname__ is not the fully qualified name. It is the the fully
-          qualified name without the module name.
-
-    See: https://www.python.org/dev/peps/pep-3155/#naming-choice
-    """
-    return '%s:%s' % (func.__module__, func.__qualname__)
-
-
-_FrameInfo = namedtuple('_FrameInfo', ['frame', 'filename', 'lineno',
-                                       'function', 'code_context', 'index'])
-
-
-def _inspect_stack():
-    """Returns named tuple for each tuple returned by inspect.stack().
-
-    For Python3.4 and earlier, which returns unnamed tuples for inspect.stack().
-
-    Returns:
-        list of _FrameInfo named tuples representing stack frame info.
-    """
-    return [_FrameInfo(*info) for info in inspect.stack()]
-
-
-def set_version(get_version_func, min_version, max_version):
-    """Returns a decorator returning a VersionSelector containing all versions
-    of the decorated func.
-
-    Args:
-        get_version_func: The lambda that returns the version level based on the
-                          arguments sent to versioned_func
-        min_version: The minimum API level for calling versioned_func.
-        max_version: The maximum API level for calling versioned_func.
-
-    Raises:
-        SyntaxError if get_version_func is different between versioned funcs.
-
-    Returns:
-        A VersionSelector containing all versioned calls to the decorated func.
-    """
-    func_owner_variables = None
-    for frame_info in _inspect_stack():
-        if frame_info.function == '<module>':
-            # We've reached the end of the most recently imported module in our
-            # stack without finding a class first. This indicates that the
-            # decorator is on a module-level function.
-            func_owner_variables = frame_info.frame.f_locals
-            break
-        elif '__qualname__' in frame_info.frame.f_locals:
-            # __qualname__ appears in stack frames of objects that have
-            # yet to be interpreted. Here we can guarantee that the object in
-            # question is the innermost class that contains the function.
-            func_owner_variables = frame_info.frame.f_locals
-            break
-
-    def decorator(func):
-        if isinstance(func, (staticmethod, classmethod)):
-            raise SyntaxError('@staticmethod and @classmethod decorators must '
-                              'be placed before the versioning decorator.')
-        func_name = func.__name__
-
-        if func_name in func_owner_variables:
-            # If the function already exists within the class/module, get it.
-            version_selector = func_owner_variables[func_name]
-            if isinstance(version_selector, (staticmethod, classmethod)):
-                # If the function was also decorated with @staticmethod or
-                # @classmethod, the version_selector will be stored in __func__.
-                version_selector = version_selector.__func__
-            if not isinstance(version_selector, _VersionSelector):
-                raise SyntaxError('The previously defined function "%s" is not '
-                                  'decorated with a versioning decorator.' %
-                                  version_selector.__qualname__)
-            if (version_selector.comparison_func_name !=
-                    _fully_qualified_name(get_version_func)):
-                raise SyntaxError('Functions of the same name must be decorated'
-                                  ' with the same versioning decorator.')
-        else:
-            version_selector = _VersionSelector(get_version_func)
-
-        version_selector.add_fn(func, min_version, max_version)
-        return version_selector
-
-    return decorator
-
-
-class _VersionSelector(object):
-    """A class that maps API levels to versioned functions for that API level.
-
-    Attributes:
-        entry_list: A sorted list of Entries that define which functions to call
-                    for a given API level.
-    """
-
-    class ListWrap(object):
-        """This class wraps a list of VersionSelector.Entry objects.
-
-        This is required to make the bisect functions work, since the underlying
-        implementation of those functions do not use __cmp__, __lt__, __gt__,
-        etc. because they are not implemented in Python.
-
-        See: https://docs.python.org/3/library/bisect.html#other-examples
-        """
-
-        def __init__(self, entry_list):
-            self.list = entry_list
-
-        def __len__(self):
-            return len(self.list)
-
-        def __getitem__(self, index):
-            return self.list[index].level
-
-    class Entry(object):
-        def __init__(self, level, func, direction):
-            """Creates an Entry object.
-
-            Args:
-                level: The API level for this point.
-                func: The function to call.
-                direction: (-1, 0 or 1) the  direction the ray from this level
-                           points towards.
-            """
-            self.level = level
-            self.func = func
-            self.direction = direction
-
-    def __init__(self, version_func):
-        """Creates a VersionSelector object.
-
-        Args:
-            version_func: The function that converts the arguments into an
-                          integer that represents the API level.
-        """
-        self.entry_list = list()
-        self.get_version = version_func
-        self.instance = None
-        self.comparison_func_name = _fully_qualified_name(version_func)
-
-    def __name__(self):
-        if len(self.entry_list) > 0:
-            return self.entry_list[0].func.__name__
-        return '%s<%s>' % (self.__class__.__name__, self.get_version.__name__)
-
-    def print_ranges(self):
-        """Returns all ranges as a string.
-
-        The string is formatted as '[min_a, max_a], [min_b, max_b], ...'
-        """
-        ranges = []
-        min_boundary = None
-        for entry in self.entry_list:
-            if entry.direction == 1:
-                min_boundary = entry.level
-            elif entry.direction == 0:
-                ranges.append(str([entry.level, entry.level]))
-            else:
-                ranges.append(str([min_boundary, entry.level]))
-        return ', '.join(ranges)
-
-    def add_fn(self, fn, min_version, max_version):
-        """Adds a function to the VersionSelector for the given API range.
-
-        Args:
-            fn: The function to call when the API level is met.
-            min_version: The minimum version level for calling this function.
-            max_version: The maximum version level for calling this function.
-
-        Raises:
-            ValueError if min_version > max_version or another versioned
-                       function overlaps this new range.
-        """
-        if min_version > max_version:
-            raise ValueError('The minimum API level must be greater than the'
-                             'maximum API level.')
-        insertion_index = bisect.bisect_left(
-            _VersionSelector.ListWrap(self.entry_list), min_version)
-        if insertion_index != len(self.entry_list):
-            right_neighbor = self.entry_list[insertion_index]
-            if not (min_version <= max_version < right_neighbor.level and
-                    right_neighbor.direction != -1):
-                raise ValueError('New range overlaps another API level. '
-                                 'New range: %s, Existing ranges: %s' %
-                                 ([min_version, max_version],
-                                  self.print_ranges()))
-        if min_version == max_version:
-            new_entry = _VersionSelector.Entry(min_version, fn, direction=0)
-            self.entry_list.insert(insertion_index, new_entry)
-        else:
-            # Inserts the 2 entries into the entry list at insertion_index.
-            self.entry_list[insertion_index:insertion_index] = [
-                _VersionSelector.Entry(min_version, fn, direction=1),
-                _VersionSelector.Entry(max_version, fn, direction=-1)]
-
-    def __call__(self, *args, **kwargs):
-        """Calls the proper versioned function for the given API level.
-
-        This is a magic python function that gets called whenever parentheses
-        immediately follow the attribute access (e.g. obj.version_selector()).
-
-        Args:
-            *args, **kwargs: The arguments passed into this call. These
-                             arguments are intended for the decorated function.
-
-        Returns:
-            The result of the called function.
-        """
-        if self.instance is not None:
-            # When the versioned function is a classmethod, the class is passed
-            # into __call__ as the first argument.
-            level = self.get_version(self.instance, *args, **kwargs)
-        else:
-            level = self.get_version(*args, **kwargs)
-        if not isinstance(level, numbers.Number):
-            kwargs_out = []
-            for key, value in kwargs.items():
-                kwargs_out.append('%s=%s' % (key, str(value)))
-            args_out = str(list(args))[1:-1]
-            kwargs_out = ', '.join(kwargs_out)
-            raise ValueError(
-                'The API level the function %s returned %s for the arguments '
-                '(%s). This function must return a number.' %
-                (self.get_version.__qualname__, repr(level),
-                 ', '.join(i for i in [args_out, kwargs_out] if i)))
-
-        index = bisect.bisect_left(_VersionSelector.ListWrap(self.entry_list),
-                                   level)
-
-        # Check to make sure the function being called is within the API range
-        if index == len(self.entry_list):
-            raise NotImplementedError('No function %s exists for API level %s'
-                                      % (self.entry_list[0].func.__qualname__,
-                                         level))
-        closest_entry = self.entry_list[index]
-        if (closest_entry.direction == 0 and closest_entry.level != level or
-                closest_entry.direction == 1 and closest_entry.level > level or
-                closest_entry.direction == -1 and closest_entry.level < level):
-            raise NotImplementedError('No function %s exists for API level %s'
-                                      % (self.entry_list[0].func.__qualname__,
-                                         level))
-
-        func = self.entry_list[index].func
-        if self.instance is None:
-            # __get__ was not called, so the function is module-level.
-            return func(*args, **kwargs)
-
-        return func(self.instance, *args, **kwargs)
-
-    def __get__(self, instance, owner):
-        """Gets the instance and owner whenever this function is obtained.
-
-        These arguments will be used to pass in the self to instance methods.
-        If the function is marked with @staticmethod or @classmethod, those
-        decorators will handle removing self or getting the class, respectively.
-
-        Note that this function will NOT be called on module-level functions.
-
-        Args:
-            instance: The instance of the object this function is being called
-                      from. If this function is static or a classmethod,
-                      instance will be None.
-            owner: The object that owns this function. This is the class object
-                   that defines the function.
-
-        Returns:
-            self, this VersionSelector instance.
-        """
-        self.instance = instance
-        return self
diff --git a/src/antlion/libs/yaml_writer.py b/src/antlion/libs/yaml_writer.py
deleted file mode 100644
index 8c710e1..0000000
--- a/src/antlion/libs/yaml_writer.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import yaml
-
-# Allow yaml to dump OrderedDict
-yaml.add_representer(collections.OrderedDict,
-                     lambda dumper, data: dumper.represent_dict(data),
-                     Dumper=yaml.SafeDumper)
-
-
-def _str_representer(dumper, data):
-    if len(data.splitlines()) > 1:
-        data = '\n'.join(line.replace('\t', '    ').rstrip()
-                         for line in data.splitlines())
-        return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
-    return dumper.represent_scalar('tag:yaml.org,2002:str', data)
-
-
-# Automatically convert multiline strings into block literals
-yaml.add_representer(str, _str_representer, Dumper=yaml.SafeDumper)
-
-
-_DUMP_KWARGS = dict(explicit_start=True, allow_unicode=True, indent=4)
-if yaml.__version__ >= '5.1':
-    _DUMP_KWARGS.update(sort_keys=False)
-
-
-def safe_dump(content, file):
-    """Calls yaml.safe_dump to write content to the file, with additional
-    parameters from _DUMP_KWARGS."""
-    yaml.safe_dump(content, file, **_DUMP_KWARGS)
diff --git a/src/antlion/logger.py b/src/antlion/logger.py
deleted file mode 100755
index 599e08b..0000000
--- a/src/antlion/logger.py
+++ /dev/null
@@ -1,317 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import logging
-import os
-import re
-
-from copy import copy
-
-from antlion import tracelogger
-from antlion.libs.logging import log_stream
-from antlion.libs.logging.log_stream import LogStyles
-
-log_line_format = "%(asctime)s.%(msecs).03d %(levelname)s %(message)s"
-# The micro seconds are added by the format string above,
-# so the time format does not include ms.
-log_line_time_format = "%Y-%m-%d %H:%M:%S"
-log_line_timestamp_len = 23
-
-logline_timestamp_re = re.compile("\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d\d\d")
-
-
-# yapf: disable
-class Style:
-    RESET  = '\033[0m'
-    BRIGHT = '\033[1m'
-    DIM    = '\033[2m'
-    NORMAL = '\033[22m'
-
-
-class Fore:
-    BLACK   = '\033[30m'
-    RED     = '\033[31m'
-    GREEN   = '\033[32m'
-    YELLOW  = '\033[33m'
-    BLUE    = '\033[34m'
-    MAGENTA = '\033[35m'
-    CYAN    = '\033[36m'
-    WHITE   = '\033[37m'
-    RESET   = '\033[39m'
-
-
-class Back:
-    BLACK   = '\033[40m'
-    RED     = '\033[41m'
-    GREEN   = '\033[42m'
-    YELLOW  = '\033[43m'
-    BLUE    = '\033[44m'
-    MAGENTA = '\033[45m'
-    CYAN    = '\033[46m'
-    WHITE   = '\033[47m'
-    RESET   = '\033[49m'
-
-
-LOG_LEVELS = {
-  'DEBUG':     {'level': 10, 'style': Fore.GREEN + Style.BRIGHT},
-  'CASE':      {'level': 11, 'style': Back.BLUE + Fore.WHITE + Style.BRIGHT},
-  'SUITE':     {'level': 12, 'style': Back.MAGENTA + Fore.WHITE + Style.BRIGHT},
-  'INFO':      {'level': 20, 'style': Style.NORMAL},
-  'STEP':      {'level': 15, 'style': Fore.WHITE + Style.BRIGHT},
-  'WARNING':   {'level': 30, 'style': Fore.YELLOW + Style.BRIGHT},
-  'ERROR':     {'level': 40, 'style': Fore.RED + Style.BRIGHT},
-  'EXCEPTION': {'level': 45, 'style': Back.RED + Fore.WHITE + Style.BRIGHT},
-  'DEVICE':    {'level': 51, 'style': Fore.CYAN + Style.BRIGHT},
-}
-# yapf: enable
-
-
-class ColoredLogFormatter(logging.Formatter):
-    def format(self, record):
-        colored_record = copy(record)
-        level_name = colored_record.levelname
-        style = LOG_LEVELS[level_name]['style']
-        formatted_level_name = '%s%s%s' % (style, level_name, Style.RESET)
-        colored_record.levelname = formatted_level_name
-        return super().format(colored_record)
-
-
-def _parse_logline_timestamp(t):
-    """Parses a logline timestamp into a tuple.
-
-    Args:
-        t: Timestamp in logline format.
-
-    Returns:
-        An iterable of date and time elements in the order of month, day, hour,
-        minute, second, microsecond.
-    """
-    date, time = t.split(' ')
-    year, month, day = date.split('-')
-    h, m, s = time.split(':')
-    s, ms = s.split('.')
-    return year, month, day, h, m, s, ms
-
-
-def is_valid_logline_timestamp(timestamp):
-    if len(timestamp) == log_line_timestamp_len:
-        if logline_timestamp_re.match(timestamp):
-            return True
-    return False
-
-
-def logline_timestamp_comparator(t1, t2):
-    """Comparator for timestamps in logline format.
-
-    Args:
-        t1: Timestamp in logline format.
-        t2: Timestamp in logline format.
-
-    Returns:
-        -1 if t1 < t2; 1 if t1 > t2; 0 if t1 == t2.
-    """
-    dt1 = _parse_logline_timestamp(t1)
-    dt2 = _parse_logline_timestamp(t2)
-    for u1, u2 in zip(dt1, dt2):
-        if u1 < u2:
-            return -1
-        elif u1 > u2:
-            return 1
-    return 0
-
-
-def _get_timestamp(time_format, delta=None):
-    t = datetime.datetime.now()
-    if delta:
-        t = t + datetime.timedelta(seconds=delta)
-    return t.strftime(time_format)[:-3]
-
-
-def epoch_to_log_line_timestamp(epoch_time):
-    """Converts an epoch timestamp in ms to log line timestamp format, which
-    is readable for humans.
-
-    Args:
-        epoch_time: integer, an epoch timestamp in ms.
-
-    Returns:
-        A string that is the corresponding timestamp in log line timestamp
-        format.
-    """
-    s, ms = divmod(epoch_time, 1000)
-    d = datetime.datetime.fromtimestamp(s)
-    return d.strftime("%Y-%m-%d %H:%M:%S.") + str(ms)
-
-
-def get_log_line_timestamp(delta=None):
-    """Returns a timestamp in the format used by log lines.
-
-    Default is current time. If a delta is set, the return value will be
-    the current time offset by delta seconds.
-
-    Args:
-        delta: Number of seconds to offset from current time; can be negative.
-
-    Returns:
-        A timestamp in log line format with an offset.
-    """
-    return _get_timestamp("%Y-%m-%d %H:%M:%S.%f", delta)
-
-
-def get_log_file_timestamp(delta=None):
-    """Returns a timestamp in the format used for log file names.
-
-    Default is current time. If a delta is set, the return value will be
-    the current time offset by delta seconds.
-
-    Args:
-        delta: Number of seconds to offset from current time; can be negative.
-
-    Returns:
-        A timestamp in log file name format with an offset.
-    """
-    return _get_timestamp("%Y-%m-%d_%H-%M-%S-%f", delta)
-
-
-def _setup_test_logger(log_path, prefix=None):
-    """Customizes the root logger for a test run.
-
-    The logger object has a stream handler and a file handler. The stream
-    handler logs INFO level to the terminal, the file handler logs DEBUG
-    level to files.
-
-    Args:
-        log_path: Location of the log file.
-        prefix: A prefix for each log line in terminal.
-    """
-    logging.log_path = log_path
-    log_styles = [
-        LogStyles.LOG_INFO + LogStyles.TO_STDOUT,
-        LogStyles.DEFAULT_LEVELS + LogStyles.TESTCASE_LOG
-    ]
-    terminal_format = log_line_format
-    if prefix:
-        terminal_format = "[{}] {}".format(prefix, log_line_format)
-    stream_formatter = ColoredLogFormatter(terminal_format,
-                                           log_line_time_format)
-    file_formatter = logging.Formatter(log_line_format, log_line_time_format)
-    log = log_stream.create_logger('test_run',
-                                   '',
-                                   log_styles=log_styles,
-                                   stream_format=stream_formatter,
-                                   file_format=file_formatter)
-    log.setLevel(logging.DEBUG)
-    _enable_additional_log_levels()
-
-
-def _enable_additional_log_levels():
-    """Enables logging levels used for tracing tests and debugging devices."""
-    for log_type, log_data in LOG_LEVELS.items():
-        logging.addLevelName(log_data['level'], log_type)
-
-
-def kill_test_logger(logger):
-    """Cleans up a test logger object by removing all of its handlers.
-
-    Args:
-        logger: The logging object to clean up.
-    """
-    for h in list(logger.handlers):
-        logger.removeHandler(h)
-        if isinstance(h, logging.FileHandler):
-            h.close()
-
-
-def create_latest_log_alias(actual_path):
-    """Creates a symlink to the latest test run logs.
-
-    Args:
-        actual_path: The source directory where the latest test run's logs are.
-    """
-    link_path = os.path.join(os.path.dirname(actual_path), "latest")
-    if os.path.islink(link_path):
-        os.remove(link_path)
-    try:
-        os.symlink(actual_path, link_path)
-    except OSError:
-        logging.warning('Failed to create symlink to latest logs dir.',
-                        exc_info=True)
-
-
-def setup_test_logger(log_path, prefix=None):
-    """Customizes the root logger for a test run.
-
-    Args:
-        log_path: Location of the report file.
-        prefix: A prefix for each log line in terminal.
-        filename: Name of the files. The default is the time the objects
-            are requested.
-    """
-    os.makedirs(log_path, exist_ok=True)
-    _setup_test_logger(log_path, prefix)
-    create_latest_log_alias(log_path)
-
-
-def normalize_log_line_timestamp(log_line_timestamp):
-    """Replace special characters in log line timestamp with normal characters.
-
-    Args:
-        log_line_timestamp: A string in the log line timestamp format. Obtained
-            with get_log_line_timestamp.
-
-    Returns:
-        A string representing the same time as input timestamp, but without
-        special characters.
-    """
-    norm_tp = log_line_timestamp.replace(' ', '_')
-    norm_tp = norm_tp.replace(':', '-')
-    return norm_tp
-
-
-class LoggerAdapter(logging.LoggerAdapter):
-    """A LoggerAdapter class that takes in a lambda for transforming logs."""
-    def __init__(self, logging_lambda):
-        self.logging_lambda = logging_lambda
-        super(LoggerAdapter, self).__init__(logging.getLogger(), {})
-
-    def process(self, msg, kwargs):
-        return self.logging_lambda(msg), kwargs
-
-
-def create_logger(logging_lambda=lambda message: message):
-    """Returns a logger with logging defined by a given lambda.
-
-    Args:
-        logging_lambda: A lambda of the form:
-            >>> lambda log_message: return 'string'
-    """
-    return tracelogger.TraceLogger(LoggerAdapter(logging_lambda))
-
-
-def create_tagged_trace_logger(tag=''):
-    """Returns a logger that logs each line with the given prefix.
-
-    Args:
-        tag: The tag of the log line, E.g. if tag == tag123, the output
-            line would be:
-
-            <TESTBED> <TIME> <LOG_LEVEL> [tag123] logged message
-    """
-    def logging_lambda(msg):
-        return '[%s] %s' % (tag, msg)
-
-    return create_logger(logging_lambda)
diff --git a/src/antlion/records.py b/src/antlion/records.py
deleted file mode 100644
index aee2385..0000000
--- a/src/antlion/records.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module is where all the record definitions and record containers live.
-"""
-
-import collections
-import copy
-import io
-import json
-
-from antlion import logger
-from antlion.libs import yaml_writer
-
-from mobly.records import ExceptionRecord
-from mobly.records import OUTPUT_FILE_SUMMARY
-from mobly.records import TestResultEnums as MoblyTestResultEnums
-from mobly.records import TestResultRecord as MoblyTestResultRecord
-from mobly.records import TestResult as MoblyTestResult
-from mobly.records import TestSummaryEntryType
-from mobly.records import TestSummaryWriter as MoblyTestSummaryWriter
-
-
-class TestSummaryWriter(MoblyTestSummaryWriter):
-    """Writes test results to a summary file in real time. Inherits from Mobly's
-    TestSummaryWriter.
-    """
-
-    def dump(self, content, entry_type):
-        """Update Mobly's implementation of dump to work on OrderedDict.
-
-        See MoblyTestSummaryWriter.dump for documentation.
-        """
-        new_content = collections.OrderedDict(copy.deepcopy(content))
-        new_content['Type'] = entry_type.value
-        new_content.move_to_end('Type', last=False)
-        # Both user code and Mobly code can trigger this dump, hence the lock.
-        with self._lock:
-            # For Python3, setting the encoding on yaml.safe_dump does not work
-            # because Python3 file descriptors set an encoding by default, which
-            # PyYAML uses instead of the encoding on yaml.safe_dump. So, the
-            # encoding has to be set on the open call instead.
-            with io.open(self._path, 'a', encoding='utf-8') as f:
-                # Use safe_dump here to avoid language-specific tags in final
-                # output.
-                yaml_writer.safe_dump(new_content, f)
-
-
-class TestResultEnums(MoblyTestResultEnums):
-    """Enums used for TestResultRecord class. Inherits from Mobly's
-    TestResultEnums.
-
-    Includes the tokens to mark test result with, and the string names for each
-    field in TestResultRecord.
-    """
-
-    RECORD_LOG_BEGIN_TIME = "Log Begin Time"
-    RECORD_LOG_END_TIME = "Log End Time"
-
-
-class TestResultRecord(MoblyTestResultRecord):
-    """A record that holds the information of a test case execution. This class
-    inherits from Mobly's TestResultRecord class.
-
-    Attributes:
-        test_name: A string representing the name of the test case.
-        begin_time: Epoch timestamp of when the test case started.
-        end_time: Epoch timestamp of when the test case ended.
-        self.uid: Unique identifier of a test case.
-        self.result: Test result, PASS/FAIL/SKIP.
-        self.extras: User defined extra information of the test result.
-        self.details: A string explaining the details of the test case.
-    """
-
-    def __init__(self, t_name, t_class=None):
-        super().__init__(t_name, t_class)
-        self.log_begin_time = None
-        self.log_end_time = None
-
-    def test_begin(self):
-        """Call this when the test case it records begins execution.
-
-        Sets the begin_time of this record.
-        """
-        super().test_begin()
-        self.log_begin_time = logger.epoch_to_log_line_timestamp(
-            self.begin_time)
-
-    def _test_end(self, result, e):
-        """Class internal function to signal the end of a test case execution.
-
-        Args:
-            result: One of the TEST_RESULT enums in TestResultEnums.
-            e: A test termination signal (usually an exception object). It can
-                be any exception instance or of any subclass of
-                acts.signals.TestSignal.
-        """
-        super()._test_end(result, e)
-        if self.end_time:
-            self.log_end_time = logger.epoch_to_log_line_timestamp(
-                self.end_time)
-
-    def to_dict(self):
-        """Gets a dictionary representing the content of this class.
-
-        Returns:
-            A dictionary representing the content of this class.
-        """
-        d = collections.OrderedDict()
-        d[TestResultEnums.RECORD_NAME] = self.test_name
-        d[TestResultEnums.RECORD_CLASS] = self.test_class
-        d[TestResultEnums.RECORD_BEGIN_TIME] = self.begin_time
-        d[TestResultEnums.RECORD_END_TIME] = self.end_time
-        d[TestResultEnums.RECORD_LOG_BEGIN_TIME] = self.log_begin_time
-        d[TestResultEnums.RECORD_LOG_END_TIME] = self.log_end_time
-        d[TestResultEnums.RECORD_RESULT] = self.result
-        d[TestResultEnums.RECORD_UID] = self.uid
-        d[TestResultEnums.RECORD_EXTRAS] = self.extras
-        d[TestResultEnums.RECORD_DETAILS] = self.details
-        d[TestResultEnums.RECORD_EXTRA_ERRORS] = {
-            key: value.to_dict()
-            for (key, value) in self.extra_errors.items()
-        }
-        d[TestResultEnums.RECORD_STACKTRACE] = self.stacktrace
-        return d
-
-    def json_str(self):
-        """Converts this test record to a string in json format.
-
-        Format of the json string is:
-            {
-                'Test Name': <test name>,
-                'Begin Time': <epoch timestamp>,
-                'Details': <details>,
-                ...
-            }
-
-        Returns:
-            A json-format string representing the test record.
-        """
-        return json.dumps(self.to_dict())
-
-
-class TestResult(MoblyTestResult):
-    """A class that contains metrics of a test run. This class inherits from
-    Mobly's TestResult class.
-
-    This class is essentially a container of TestResultRecord objects.
-
-    Attributes:
-        self.requested: A list of strings, each is the name of a test requested
-            by user.
-        self.failed: A list of records for tests failed.
-        self.executed: A list of records for tests that were actually executed.
-        self.passed: A list of records for tests passed.
-        self.skipped: A list of records for tests skipped.
-    """
-
-    def __add__(self, r):
-        """Overrides '+' operator for TestResult class.
-
-        The add operator merges two TestResult objects by concatenating all of
-        their lists together.
-
-        Args:
-            r: another instance of TestResult to be added
-
-        Returns:
-            A TestResult instance that's the sum of two TestResult instances.
-        """
-        if not isinstance(r, MoblyTestResult):
-            raise TypeError("Operand %s of type %s is not a TestResult." %
-                            (r, type(r)))
-        sum_result = TestResult()
-        for name in sum_result.__dict__:
-            r_value = getattr(r, name)
-            l_value = getattr(self, name)
-            if isinstance(r_value, list):
-                setattr(sum_result, name, l_value + r_value)
-        return sum_result
-
-    def json_str(self):
-        """Converts this test result to a string in json format.
-
-        Format of the json string is:
-            {
-                "Results": [
-                    {<executed test record 1>},
-                    {<executed test record 2>},
-                    ...
-                ],
-                "Summary": <summary dict>
-            }
-
-        Returns:
-            A json-format string representing the test results.
-        """
-        d = collections.OrderedDict()
-        d["ControllerInfo"] = {record.controller_name: record.controller_info
-                               for record in self.controller_info}
-        d["Results"] = [record.to_dict() for record in self.executed]
-        d["Summary"] = self.summary_dict()
-        d["Error"] = self.errors_list()
-        json_str = json.dumps(d, indent=4)
-        return json_str
-
-    def summary_str(self):
-        """Gets a string that summarizes the stats of this test result.
-
-        The summary provides the counts of how many test cases fall into each
-        category, like "Passed", "Failed" etc.
-
-        Format of the string is:
-            Requested <int>, Executed <int>, ...
-
-        Returns:
-            A summary string of this test result.
-        """
-        l = ["%s %s" % (k, v) for k, v in self.summary_dict().items()]
-        msg = ", ".join(l)
-        return msg
-
-    def errors_list(self):
-        l = list()
-        for record in self.error:
-            if isinstance(record, TestResultRecord):
-                keys = [TestResultEnums.RECORD_NAME,
-                        TestResultEnums.RECORD_DETAILS,
-                        TestResultEnums.RECORD_EXTRA_ERRORS]
-            elif isinstance(record, ExceptionRecord):
-                keys = [TestResultEnums.RECORD_DETAILS,
-                        TestResultEnums.RECORD_POSITION]
-            else:
-                return []
-            l.append({k: record.to_dict()[k] for k in keys})
-        return l
diff --git a/src/antlion/signals.py b/src/antlion/signals.py
deleted file mode 100644
index a3599f4..0000000
--- a/src/antlion/signals.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module is where all the test signal classes and related utilities live.
-"""
-
-from mobly.signals import *
diff --git a/src/antlion/test_decorators.py b/src/antlion/test_decorators.py
deleted file mode 100644
index 2ec1835..0000000
--- a/src/antlion/test_decorators.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import signals
-
-
-def __select_last(test_signals, _):
-    return test_signals[-1]
-
-
-def repeated_test(num_passes, acceptable_failures=0,
-                  result_selector=__select_last):
-    """A decorator that runs a test case multiple times.
-
-    This decorator can be used to run a test multiple times and aggregate the
-    data into a single test result. By setting `result_selector`, the user can
-    access the returned result of each run, allowing them to average results,
-    return the median, or gather and return standard deviation values.
-
-    This decorator should be used on test cases, and should not be used on
-    static or class methods. The test case must take in an additional argument,
-    `attempt_number`, which returns the current attempt number, starting from
-    1.
-
-    Note that any TestSignal intended to abort or skip the test will take
-    abort or skip immediately.
-
-    Args:
-        num_passes: The number of times the test needs to pass to report the
-            test case as passing.
-        acceptable_failures: The number of failures accepted. If the failures
-            exceeds this number, the test will stop repeating. The maximum
-            number of runs is `num_passes + acceptable_failures`. If the test
-            does fail, result_selector will still be called.
-        result_selector: A lambda that takes in the list of TestSignals and
-            returns the test signal to report the test case as. Note that the
-            list also contains any uncaught exceptions from the test execution.
-    """
-    def decorator(func):
-        if not func.__name__.startswith('test_'):
-            raise ValueError('Tests must start with "test_".')
-
-        def test_wrapper(self):
-            num_failures = 0
-            num_seen_passes = 0
-            test_signals_received = []
-            for i in range(num_passes + acceptable_failures):
-                try:
-                    func(self, i + 1)
-                except (signals.TestFailure, signals.TestError,
-                        AssertionError) as signal:
-                    test_signals_received.append(signal)
-                    num_failures += 1
-                except signals.TestPass as signal:
-                    test_signals_received.append(signal)
-                    num_seen_passes += 1
-                except (signals.TestSignal, KeyboardInterrupt):
-                    raise
-                except Exception as signal:
-                    test_signals_received.append(signal)
-                    num_failures += 1
-                else:
-                    num_seen_passes += 1
-                    test_signals_received.append(signals.TestPass(
-                        'Test iteration %s of %s passed without details.' % (
-                        i, func.__name__)))
-
-                if num_failures > acceptable_failures:
-                    break
-                elif num_seen_passes == num_passes:
-                    break
-                else:
-                    self.teardown_test()
-                    self.setup_test()
-
-            raise result_selector(test_signals_received, self)
-
-        return test_wrapper
-
-    return decorator
diff --git a/src/antlion/test_runner.py b/src/antlion/test_runner.py
deleted file mode 100644
index 261d0bd..0000000
--- a/src/antlion/test_runner.py
+++ /dev/null
@@ -1,313 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import itertools
-
-import importlib
-import inspect
-import fnmatch
-import json
-import logging
-import os
-import sys
-
-from antlion import base_test
-from antlion import keys
-from antlion import logger
-from antlion import records
-from antlion import signals
-from antlion import utils
-from antlion import error
-
-from mobly.records import ExceptionRecord
-
-
-def _find_test_class():
-    """Finds the test class in a test script.
-
-    Walk through module members and find the subclass of BaseTestClass. Only
-    one subclass is allowed in a test script.
-
-    Returns:
-        The test class in the test module.
-    """
-    test_classes = []
-    main_module_members = sys.modules['__main__']
-    for _, module_member in main_module_members.__dict__.items():
-        if inspect.isclass(module_member):
-            if issubclass(module_member, base_test.BaseTestClass):
-                test_classes.append(module_member)
-    if len(test_classes) != 1:
-        logging.error('Expected 1 test class per file, found %s.',
-                      [t.__name__ for t in test_classes])
-        sys.exit(1)
-    return test_classes[0]
-
-
-def execute_one_test_class(test_class, test_config, test_identifier):
-    """Executes one specific test class.
-
-    You could call this function in your own cli test entry point if you choose
-    not to use act.py.
-
-    Args:
-        test_class: A subclass of acts.base_test.BaseTestClass that has the test
-                    logic to be executed.
-        test_config: A dict representing one set of configs for a test run.
-        test_identifier: A list of tuples specifying which test cases to run in
-                         the test class.
-
-    Returns:
-        True if all tests passed without any error, False otherwise.
-
-    Raises:
-        If signals.TestAbortAll is raised by a test run, pipe it through.
-    """
-    tr = TestRunner(test_config, test_identifier)
-    try:
-        tr.run(test_class)
-        return tr.results.is_all_pass
-    except signals.TestAbortAll:
-        raise
-    except:
-        logging.exception('Exception when executing %s.', tr.testbed_name)
-    finally:
-        tr.stop()
-
-
-class TestRunner(object):
-    """The class that instantiates test classes, executes test cases, and
-    report results.
-
-    Attributes:
-        test_run_config: The TestRunConfig object specifying what tests to run.
-        id: A string that is the unique identifier of this test run.
-        log: The logger object used throughout this test run.
-        test_classes: A dictionary where we can look up the test classes by name
-            to instantiate. Supports unix shell style wildcards.
-        run_list: A list of tuples specifying what tests to run.
-        results: The test result object used to record the results of this test
-            run.
-        running: A boolean signifies whether this test run is ongoing or not.
-    """
-
-    def __init__(self, test_configs, run_list):
-        self.test_run_config = test_configs
-        self.testbed_name = self.test_run_config.testbed_name
-        start_time = logger.get_log_file_timestamp()
-        self.id = '{}@{}'.format(self.testbed_name, start_time)
-        self.test_run_config.log_path = os.path.abspath(
-            os.path.join(self.test_run_config.log_path, self.testbed_name,
-                         start_time))
-        logger.setup_test_logger(self.log_path, self.testbed_name)
-        self.log = logging.getLogger()
-        self.test_run_config.summary_writer = records.TestSummaryWriter(
-            os.path.join(self.log_path, records.OUTPUT_FILE_SUMMARY))
-        self.run_list = run_list
-        self.dump_config()
-        self.results = records.TestResult()
-        self.running = False
-
-    @property
-    def log_path(self):
-        """The path to write logs of this test run to."""
-        return self.test_run_config.log_path
-
-    @property
-    def summary_writer(self):
-        """The object responsible for writing summary and results data."""
-        return self.test_run_config.summary_writer
-
-    def import_test_modules(self, test_paths):
-        """Imports test classes from test scripts.
-
-        1. Locate all .py files under test paths.
-        2. Import the .py files as modules.
-        3. Find the module members that are test classes.
-        4. Categorize the test classes by name.
-
-        Args:
-            test_paths: A list of directory paths where the test files reside.
-
-        Returns:
-            A dictionary where keys are test class name strings, values are
-            actual test classes that can be instantiated.
-        """
-
-        def is_testfile_name(name, ext):
-            if ext == '.py':
-                if name.endswith('Test') or name.endswith('_test'):
-                    return True
-            return False
-
-        file_list = utils.find_files(test_paths, is_testfile_name)
-        test_classes = {}
-        for path, name, _ in file_list:
-            sys.path.append(path)
-            try:
-                with utils.SuppressLogOutput(
-                        log_levels=[logging.INFO, logging.ERROR]):
-                    module = importlib.import_module(name)
-            except Exception as e:
-                logging.debug('Failed to import %s: %s', path, str(e))
-                for test_cls_name, _ in self.run_list:
-                    alt_name = name.replace('_', '').lower()
-                    alt_cls_name = test_cls_name.lower()
-                    # Only block if a test class on the run list causes an
-                    # import error. We need to check against both naming
-                    # conventions: AaaBbb and aaa_bbb.
-                    if name == test_cls_name or alt_name == alt_cls_name:
-                        msg = ('Encountered error importing test class %s, '
-                               'abort.') % test_cls_name
-                        # This exception is logged here to help with debugging
-                        # under py2, because "raise X from Y" syntax is only
-                        # supported under py3.
-                        self.log.exception(msg)
-                        raise ValueError(msg)
-                continue
-            for member_name in dir(module):
-                if not member_name.startswith('__'):
-                    if member_name.endswith('Test'):
-                        test_class = getattr(module, member_name)
-                        if inspect.isclass(test_class):
-                            test_classes[member_name] = test_class
-        return test_classes
-
-    def run_test_class(self, test_cls_name, test_cases=None):
-        """Instantiates and executes a test class.
-
-        If test_cases is None, the test cases listed by self.tests will be
-        executed instead. If self.tests is empty as well, no test case in this
-        test class will be executed.
-
-        Args:
-            test_cls_name: Name of the test class to execute.
-            test_cases: List of test case names to execute within the class.
-
-        Raises:
-            ValueError is raised if the requested test class could not be found
-            in the test_paths directories.
-        """
-        matches = fnmatch.filter(self.test_classes.keys(), test_cls_name)
-        if not matches:
-            self.log.info(
-                'Cannot find test class %s or classes matching pattern, '
-                'skipping for now.' % test_cls_name)
-            record = records.TestResultRecord('*all*', test_cls_name)
-            record.test_skip(signals.TestSkip('Test class does not exist.'))
-            self.results.add_record(record)
-            return
-        if matches != [test_cls_name]:
-            self.log.info('Found classes matching pattern %s: %s',
-                          test_cls_name, matches)
-
-        for test_cls_name_match in matches:
-            test_cls = self.test_classes[test_cls_name_match]
-            test_cls_instance = test_cls(self.test_run_config)
-            try:
-                cls_result = test_cls_instance.run(test_cases)
-                self.results += cls_result
-            except signals.TestAbortAll as e:
-                self.results += e.results
-                raise e
-
-    def run(self, test_class=None):
-        """Executes test cases.
-
-        This will instantiate controller and test classes, and execute test
-        classes. This can be called multiple times to repeatedly execute the
-        requested test cases.
-
-        A call to TestRunner.stop should eventually happen to conclude the life
-        cycle of a TestRunner.
-
-        Args:
-            test_class: The python module of a test class. If provided, run this
-                        class; otherwise, import modules in under test_paths
-                        based on run_list.
-        """
-        if not self.running:
-            self.running = True
-
-        if test_class:
-            self.test_classes = {test_class.__name__: test_class}
-        else:
-            t_paths = self.test_run_config.controller_configs[
-                keys.Config.key_test_paths.value]
-            self.test_classes = self.import_test_modules(t_paths)
-        self.log.debug('Executing run list %s.', self.run_list)
-        for test_cls_name, test_case_names in self.run_list:
-            if not self.running:
-                break
-
-            if test_case_names:
-                self.log.debug('Executing test cases %s in test class %s.',
-                               test_case_names, test_cls_name)
-            else:
-                self.log.debug('Executing test class %s', test_cls_name)
-
-            try:
-                self.run_test_class(test_cls_name, test_case_names)
-            except error.ActsError as e:
-                self.results.error.append(ExceptionRecord(e))
-                self.log.error('Test Runner Error: %s' % e.details)
-            except signals.TestAbortAll as e:
-                self.log.warning(
-                    'Abort all subsequent test classes. Reason: %s', e)
-                raise
-
-    def stop(self):
-        """Releases resources from test run. Should always be called after
-        TestRunner.run finishes.
-
-        This function concludes a test run and writes out a test report.
-        """
-        if self.running:
-            msg = '\nSummary for test run %s: %s\n' % (
-                self.id, self.results.summary_str())
-            self._write_results_to_file()
-            self.log.info(msg.strip())
-            logger.kill_test_logger(self.log)
-            self.running = False
-
-    def _write_results_to_file(self):
-        """Writes test results to file(s) in a serializable format."""
-        # Old JSON format
-        path = os.path.join(self.log_path, 'test_run_summary.json')
-        with open(path, 'w') as f:
-            f.write(self.results.json_str())
-        # New YAML format
-        self.summary_writer.dump(self.results.summary_dict(),
-                                 records.TestSummaryEntryType.SUMMARY)
-
-    def dump_config(self):
-        """Writes the test config to a JSON file under self.log_path"""
-        config_path = os.path.join(self.log_path, 'test_configs.json')
-        with open(config_path, 'a') as f:
-            json.dump(dict(
-                itertools.chain(
-                    self.test_run_config.user_params.items(),
-                    self.test_run_config.controller_configs.items())),
-                      f,
-                      skipkeys=True,
-                      indent=4)
-
-    def write_test_campaign(self):
-        """Log test campaign file."""
-        path = os.path.join(self.log_path, 'test_campaign.log')
-        with open(path, 'w') as f:
-            for test_class, test_cases in self.run_list:
-                f.write('%s:\n%s' % (test_class, ',\n'.join(test_cases)))
-                f.write('\n\n')
diff --git a/src/antlion/test_utils/abstract_devices/bluetooth_device.py b/src/antlion/test_utils/abstract_devices/bluetooth_device.py
deleted file mode 100644
index c0cb29e..0000000
--- a/src/antlion/test_utils/abstract_devices/bluetooth_device.py
+++ /dev/null
@@ -1,1468 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-import logging
-
-from queue import Empty
-
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_event
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_gatt_utils import GattTestUtilsError
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-import antlion.test_utils.bt.bt_test_utils as bt_test_utils
-
-
-def create_bluetooth_device(hardware_device):
-    """Creates a generic Bluetooth device based on type of device that is sent
-    to the functions.
-
-    Args:
-        hardware_device: A Bluetooth hardware device that is supported by ACTS.
-    """
-    if isinstance(hardware_device, FuchsiaDevice):
-        return FuchsiaBluetoothDevice(hardware_device)
-    elif isinstance(hardware_device, AndroidDevice):
-        return AndroidBluetoothDevice(hardware_device)
-    else:
-        raise ValueError('Unable to create BluetoothDevice for type %s' %
-                         type(hardware_device))
-
-
-class BluetoothDevice(object):
-    """Class representing a generic Bluetooth device.
-
-    Each object of this class represents a generic Bluetooth device.
-    Android device and Fuchsia devices are the currently supported devices.
-
-    Attributes:
-        device: A generic Bluetooth device.
-    """
-
-    def __init__(self, device):
-        self.device = device
-        self.log = logging
-
-    def a2dp_initiate_open_stream(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_profile_a2dp_sink(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_pairing_helper(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def set_discoverable(self, is_discoverable):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def bluetooth_toggle_state(self, state):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_discover_characteristic_by_uuid(self, peer_identifier,
-                                                    uuid):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def initialize_bluetooth_controller(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_pairing_pin(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def input_pairing_pin(self, pin):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_bluetooth_local_address(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_characteristic_by_uuid(self, peer_identifier, uuid):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_long_read_descriptor_by_handle(self, peer_identifier,
-                                                   handle, offset, max_bytes):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_disconnect(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_refresh(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def log_info(self, log):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def reset_bluetooth(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_add_service(self, sdp_record):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_clean_up(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_init(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def sdp_remove_service(self, service_id):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_le_advertisement(self, adv_data, scan_response, adv_interval,
-                               connectable):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_le_advertisement(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def set_bluetooth_local_name(self, name):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def setup_gatt_server(self, database):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def close_gatt_server(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def unbond_device(self, peer_identifier):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def unbond_all_known_devices(self):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """Base generic Bluetooth interface. Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-
-class AndroidBluetoothDevice(BluetoothDevice):
-    """Class wrapper for an Android Bluetooth device.
-
-    Each object of this class represents a generic Bluetooth device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        android_device: An Android Bluetooth device.
-    """
-
-    def __init__(self, android_device):
-        super().__init__(android_device)
-        self.gatt_timeout = 10
-        self.peer_mapping = {}
-        self.discovered_services_index = None
-
-    def _client_wait(self, gatt_event, gatt_callback):
-        return self._timed_pop(gatt_event, gatt_callback)
-
-    def _timed_pop(self, gatt_event, gatt_callback):
-        expected_event = gatt_event["evt"].format(gatt_callback)
-        try:
-            return self.device.ed.pop_event(expected_event, self.gatt_timeout)
-        except Empty as emp:
-            raise AssertionError(gatt_event["err"].format(expected_event))
-
-    def _setup_discovered_services_index(self, bluetooth_gatt):
-        """ Sets the discovered services index for the gatt connection
-        related to the Bluetooth GATT callback object.
-
-        Args:
-            bluetooth_gatt: The BluetoothGatt callback id
-        """
-        if not self.discovered_services_index:
-            self.device.droid.gattClientDiscoverServices(bluetooth_gatt)
-            expected_event = gatt_cb_strings['gatt_serv_disc'].format(
-                self.gatt_callback)
-            event = self.dut.ed.pop_event(expected_event, self.gatt_timeout)
-            self.discovered_services_index = event['data']['ServicesIndex']
-
-    def a2dp_initiate_open_stream(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def stop_profile_a2dp_sink(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def bluetooth_toggle_state(self, state):
-        self.device.droid.bluetoothToggleState(state)
-
-    def set_discoverable(self, is_discoverable):
-        """ Sets the device's discoverability.
-
-        Args:
-            is_discoverable: True if discoverable, false if not discoverable
-        """
-        if is_discoverable:
-            self.device.droid.bluetoothMakeDiscoverable()
-        else:
-            self.device.droid.bluetoothMakeUndiscoverable()
-
-    def initialize_bluetooth_controller(self):
-        """ Just pass for Android as there is no concept of initializing
-        a Bluetooth controller.
-        """
-
-    def start_pairing_helper(self):
-        """ Starts the Android pairing helper.
-        """
-        self.device.droid.bluetoothStartPairingHelper(True)
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['char_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-            offset: Not used yet.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['char_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle (or instance id).
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['char_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Characteristic: {}".format(err))
-
-        return event['data']['CharacteristicValue']
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            offset: Not used yet.
-            handle: The characteristic handle (or instance id).
-            max_bytes: Not used yet.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadCharacteristicByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['char_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Characteristic: {}".format(err))
-
-        return event['data']['CharacteristicValue']
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client enable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client disable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """ Perform a GATT Client read Descriptor to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The Descriptor handle (or instance id).
-        Returns:
-            Value of Descriptor if success, None if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadDescriptorByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle)
-        try:
-            event = self._client_wait(gatt_event['desc_read'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to read Descriptor: {}".format(err))
-        # TODO: Implement sending Descriptor value in SL4A such that the data
-        # can be represented by: event['data']['DescriptorValue']
-        return ""
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """ Perform a GATT Client write Descriptor to the remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The mac address associated with the GATT connection
-            handle: The Descriptor handle (or instance id).
-            offset: Not used yet
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "Peer idenifier {} not currently connected or unknown.".format(
-                    peer_identifier))
-            return False
-        self._setup_discovered_services_index()
-        self.device.droid.gattClientWriteDescriptorByInstanceId(
-            peer_info.get('bluetooth_gatt'), self.discovered_services_index,
-            handle, value)
-        try:
-            event = self._client_wait(gatt_event['desc_write'],
-                                      peer_info.get('gatt_callback'))
-        except AssertionError as err:
-            self.log.error("Failed to write Characteristic: {}".format(err))
-        return True
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect=False):
-        """ Perform a GATT connection to a perihperal.
-
-        Args:
-            peer_identifier: The mac address to connect to.
-            transport: Which transport to use.
-            autoconnect: Set autocnnect to True or False.
-        Returns:
-            True if success, False if failure.
-        """
-        try:
-            bluetooth_gatt, gatt_callback = setup_gatt_connection(
-                self.device, peer_identifier, autoconnect, transport)
-            self.peer_mapping[peer_identifier] = {
-                "bluetooth_gatt": bluetooth_gatt,
-                "gatt_callback": gatt_callback
-            }
-        except GattTestUtilsError as err:
-            self.log.error(err)
-            return False
-        return True
-
-    def gatt_disconnect(self, peer_identifier):
-        """ Perform a GATT disconnect from a perihperal.
-
-        Args:
-            peer_identifier: The peer to disconnect from.
-        Returns:
-            True if success, False if failure.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "No previous connections made to {}".format(peer_identifier))
-            return False
-
-        try:
-            disconnect_gatt_connection(self.device,
-                                       peer_info.get("bluetooth_gatt"),
-                                       peer_info.get("gatt_callback"))
-            self.device.droid.gattClientClose(peer_info.get("bluetooth_gatt"))
-        except GattTestUtilsError as err:
-            self.log.error(err)
-            return False
-        self.device.droid.gattClientClose(peer_info.get("bluetooth_gatt"))
-
-    def gatt_client_refresh(self, peer_identifier):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        peer_info = self.peer_mapping.get(peer_identifier)
-        if not peer_info:
-            self.log.error(
-                "No previous connections made to {}".format(peer_identifier))
-            return False
-        self.device.droid.gattClientRefresh(peer_info["bluetooth_gatt"])
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """ Scan over LE for a specific device name.
-
-         Args:
-            name: The name filter to set.
-            timeout: The timeout to wait to find the advertisement.
-        Returns:
-            Discovered mac address or None
-        """
-        self.device.droid.bleSetScanSettingsScanMode(
-            ble_scan_settings_modes['low_latency'])
-        filter_list = self.device.droid.bleGenFilterList()
-        scan_settings = self.device.droid.bleBuildScanSetting()
-        scan_callback = self.device.droid.bleGenScanCallback()
-        self.device.droid.bleSetScanFilterDeviceName(name)
-        self.device.droid.bleBuildScanFilter(filter_list)
-        self.device.droid.bleSetScanFilterDeviceName(self.name)
-        self.device.droid.bleStartBleScan(filter_list, scan_settings,
-                                          scan_callback)
-        try:
-            event = self.device.ed.pop_event(scan_result.format(scan_callback),
-                                             timeout)
-            return event['data']['Result']['deviceInfo']['address']
-        except Empty as err:
-            self.log.info("Scanner did not find advertisement {}".format(err))
-            return None
-
-    def log_info(self, log):
-        """ Log directly onto the device.
-
-        Args:
-            log: The informative log.
-        """
-        self.device.droid.log.logI(log)
-
-    def set_bluetooth_local_name(self, name):
-        """ Sets the Bluetooth controller's local name
-        Args:
-            name: The name to set.
-        """
-        self.device.droid.bluetoothSetLocalName(name)
-
-    def get_local_bluetooth_address(self):
-        """ Returns the Bluetooth local address.
-        """
-        return self.device.droid.bluetoothGetLocalAddress()
-
-    def reset_bluetooth(self):
-        """ Resets Bluetooth on the Android Device.
-        """
-        bt_test_utils.reset_bluetooth([self.device])
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Adds an SDP search record.
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_add_service(self, sdp_record):
-        """Adds an SDP service record.
-        Args:
-            sdp_record: The dictionary representing the search record to add.
-        Returns:
-            service_id: The service id to track the service record published.
-                None if failed.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_clean_up(self):
-        """Cleans up all objects related to SDP.
-        """
-        self.device.sl4f.sdp_lib.cleanUp()
-
-    def sdp_init(self):
-        """Initializes SDP on the device.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def sdp_remove_service(self, service_id):
-        """Removes a service based on an input id.
-        Args:
-            service_id: The service ID to remove.
-        """
-        # Android devices currently have no hooks to modify the SDP record.
-
-    def unbond_all_known_devices(self):
-        """ Unbond all known remote devices.
-        """
-        self.device.droid.bluetoothFactoryReset()
-
-    def unbond_device(self, peer_identifier):
-        """ Unbond peer identifier.
-
-        Args:
-            peer_identifier: The mac address for the peer to unbond.
-
-        """
-        self.device.droid.bluetoothUnbond(peer_identifier)
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """ Send an outgoing pairing request the input peer_identifier.
-
-        Android currently does not support setting various security levels or
-        bondable modes. Making them available for other bluetooth_device
-        variants. Depending on the Address type, Android will figure out the
-        transport to pair automatically.
-
-        Args:
-            peer_identifier: A string representing the device id.
-            security_level: Not yet implemented. See Fuchsia device impl.
-            non_bondable: Not yet implemented. See Fuchsia device impl.
-            transport: Not yet implemented. See Fuchsia device impl.
-
-        """
-        self.dut.droid.bluetoothBond(self.peer_identifier)
-
-
-class FuchsiaBluetoothDevice(BluetoothDevice):
-    """Class wrapper for an Fuchsia Bluetooth device.
-
-    Each object of this class represents a generic luetooth device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        fuchsia_device: A Fuchsia Bluetooth device.
-    """
-
-    def __init__(self, fuchsia_device):
-        super().__init__(fuchsia_device)
-
-    def a2dp_initiate_open_stream(self):
-        raise NotImplementedError("{} not yet implemented.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def start_profile_a2dp_sink(self):
-        """ Starts the A2DP sink profile.
-        """
-        self.device.start_v1_component("bt-a2dp-sink")
-
-    def stop_profile_a2dp_sink(self):
-        """ Stops the A2DP sink profile.
-        """
-        self.device.stop_v1_component("bt-a2dp-sink")
-
-    def start_pairing_helper(self):
-        self.device.sl4f.bts_lib.acceptPairing()
-
-    def bluetooth_toggle_state(self, state):
-        """Stub for Fuchsia implementation."""
-
-    def set_discoverable(self, is_discoverable):
-        """ Sets the device's discoverability.
-
-        Args:
-            is_discoverable: True if discoverable, false if not discoverable
-        """
-        self.device.sl4f.bts_lib.setDiscoverable(is_discoverable)
-
-    def get_pairing_pin(self):
-        """ Get the pairing pin from the active pairing delegate.
-        """
-        return self.device.sl4f.bts_lib.getPairingPin()['result']
-
-    def input_pairing_pin(self, pin):
-        """ Input pairing pin to active pairing delegate.
-
-        Args:
-            pin: The pin to input.
-        """
-        self.device.sl4f.bts_lib.inputPairingPin(pin)
-
-    def initialize_bluetooth_controller(self):
-        """ Initialize Bluetooth controller for first time use.
-        """
-        self.device.sl4f.bts_lib.initBluetoothSys()
-
-    def get_local_bluetooth_address(self):
-        """ Returns the Bluetooth local address.
-        """
-        return self.device.sl4f.bts_lib.getActiveAdapterAddress().get("result")
-
-    def set_bluetooth_local_name(self, name):
-        """ Sets the Bluetooth controller's local name
-        Args:
-            name: The name to set.
-        """
-        self.device.sl4f.bts_lib.setName(name)
-
-    def gatt_client_write_characteristic_without_response_by_handle(
-            self, peer_identifier, handle, value):
-        """ Perform a GATT Client write Characteristic without response to
-        remote peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeCharByIdWithoutResponse(
-            handle, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_characteristic_by_handle(self, peer_identifier,
-                                                   handle, offset, value):
-        """ Perform a GATT Client write Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeCharById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_long_characteristic_by_handle(
-            self, peer_identifier, handle, offset, value, reliable_mode=False):
-        """ Perform a GATT Client write long Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-            reliable_mode: A bool value representing a reliable write or not.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.error(
-                "Unable to find handle {} in GATT server db.".format(handle))
-            return False
-        result = self.device.sl4f.gattc_lib.writeLongCharById(
-            handle, offset, value, reliable_mode)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write long characteristic handle {} with err: {}".
-                format(peer_identifier, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_write_long_descriptor_by_handle(self, peer_identifier,
-                                                    handle, offset, value):
-        """ Perform a GATT Client write long Descriptor to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The descriptor handle.
-            offset: The offset to start writing to.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.error(
-                "Unable to find handle {} in GATT server db.".format(handle))
-            return False
-        result = self.device.sl4f.gattc_lib.writeLongDescById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write long descriptor handle {} with err: {}".
-                format(peer_identifier, result.get("error")))
-            return False
-        return True
-
-    def gatt_client_read_characteristic_by_handle(self, peer_identifier,
-                                                  handle):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readCharacteristicById(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_characteristic_by_uuid(self, peer_identifier, uuid):
-        """ Perform a GATT Client read Characteristic by uuid to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            uuid: The characteristic uuid.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, uuid, uuid=True)):
-            self.log.warn(
-                "Unable to find uuid {} in GATT server db.".format(uuid))
-        result = self.device.sl4f.gattc_lib.readCharacteristicByType(uuid)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic uuid {} with err: {}".format(
-                    uuid, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_long_characteristic_by_handle(self, peer_identifier,
-                                                       handle, offset,
-                                                       max_bytes):
-        """ Perform a GATT Client read Characteristic to remote peer GATT
-        server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-            offset: The offset to start reading.
-            max_bytes: The max bytes to return for each read.
-        Returns:
-            Value of Characteristic if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readLongCharacteristicById(
-            handle, offset, max_bytes)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read characteristic handle {} with err: {}".format(
-                    handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_enable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client enable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.enableNotifyCharacteristic(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to enable characteristic notifications for handle {} "
-                "with err: {}".format(handle, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_disable_notifiy_characteristic_by_handle(
-            self, peer_identifier, handle):
-        """ Perform a GATT Client disable Characteristic notification to remote
-        peer GATT server database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The characteristic handle.
-        Returns:
-            True is success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.disableNotifyCharacteristic(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to disable characteristic notifications for handle {} "
-                "with err: {}".format(peer_identifier, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_read_descriptor_by_handle(self, peer_identifier, handle):
-        """ Perform a GATT Client read Descriptor to remote peer GATT server
-        database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The Descriptor handle.
-        Returns:
-            Value of Descriptor if success, None if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.readDescriptorById(handle)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to read descriptor for handle {} with err: {}".format(
-                    peer_identifier, result.get("error")))
-            return None
-        return result.get("result")
-
-    def gatt_client_write_descriptor_by_handle(self, peer_identifier, handle,
-                                               offset, value):
-        """ Perform a GATT Client write Descriptor to remote peer GATT server
-        database.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            handle: The Descriptor handle.
-            offset: The offset to start writing at.
-            value: The list of bytes to write.
-        Returns:
-            True if success, False if failure.
-        """
-        if (not self._find_service_id_and_connect_to_service_for_handle(
-                peer_identifier, handle)):
-            self.log.warn(
-                "Unable to find handle {} in GATT server db.".format(handle))
-        result = self.device.sl4f.gattc_lib.writeDescriptorById(
-            handle, offset, value)
-        if result.get("error") is not None:
-            self.log.error(
-                "Failed to write descriptor for handle {} with err: {}".format(
-                    peer_identifier, result.get("error")))
-            return None
-        return True
-
-    def gatt_connect(self, peer_identifier, transport, autoconnect):
-        """ Perform a GATT connection to a perihperal.
-
-        Args:
-            peer_identifier: The peer to connect to.
-            transport: Not implemented.
-            autoconnect: Not implemented.
-        Returns:
-            True if success, False if failure.
-        """
-        connection_result = self.device.sl4f.gattc_lib.bleConnectToPeripheral(
-            peer_identifier)
-        if connection_result.get("error") is not None:
-            self.log.error("Failed to connect to peer id {}: {}".format(
-                peer_identifier, connection_result.get("error")))
-            return False
-        return True
-
-    def gatt_client_refresh(self, peer_identifier):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device. In Fuchsia there is no FIDL api to automatically do this
-        yet. Therefore just read all Characteristics which satisfies the same
-        requirements.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        self._read_all_characteristics(peer_identifier)
-
-    def gatt_client_discover_characteristic_by_uuid(self, peer_identifier,
-                                                    uuid):
-        """ Perform a GATT Client Refresh of a perihperal.
-
-        Clears the internal cache and forces a refresh of the services from the
-        remote device. In Fuchsia there is no FIDL api to automatically do this
-        yet. Therefore just read all Characteristics which satisfies the same
-        requirements.
-
-        Args:
-            peer_identifier: The peer to refresh.
-        """
-        self._read_all_characteristics(peer_identifier, uuid)
-
-    def gatt_disconnect(self, peer_identifier):
-        """ Perform a GATT disconnect from a perihperal.
-
-        Args:
-            peer_identifier: The peer to disconnect from.
-        Returns:
-            True if success, False if failure.
-        """
-        disconnect_result = self.device.sl4f.gattc_lib.bleDisconnectPeripheral(
-            peer_identifier)
-        if disconnect_result.get("error") is not None:
-            self.log.error("Failed to disconnect from peer id {}: {}".format(
-                peer_identifier, disconnect_result.get("error")))
-            return False
-        return True
-
-    def reset_bluetooth(self):
-        """Stub for Fuchsia implementation."""
-
-    def sdp_add_search(self, attribute_list, profile_id):
-        """Adds an SDP search record.
-        Args:
-            attribute_list: The list of attributes to set
-            profile_id: The profile ID to set.
-        """
-        return self.device.sl4f.sdp_lib.addSearch(attribute_list, profile_id)
-
-    def sdp_add_service(self, sdp_record):
-        """Adds an SDP service record.
-        Args:
-            sdp_record: The dictionary representing the search record to add.
-        """
-        return self.device.sl4f.sdp_lib.addService(sdp_record)
-
-    def sdp_clean_up(self):
-        """Cleans up all objects related to SDP.
-        """
-        return self.device.sl4f.sdp_lib.cleanUp()
-
-    def sdp_init(self):
-        """Initializes SDP on the device.
-        """
-        return self.device.sl4f.sdp_lib.init()
-
-    def sdp_remove_service(self, service_id):
-        """Removes a service based on an input id.
-        Args:
-            service_id: The service ID to remove.
-        """
-        return self.device.sl4f.sdp_lib.init()
-
-    def start_le_advertisement(self, adv_data, scan_response, adv_interval,
-                               connectable):
-        """ Starts an LE advertisement
-
-        Args:
-            adv_data: Advertisement data.
-            adv_interval: Advertisement interval.
-        """
-        self.device.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, adv_interval, connectable)
-
-    def stop_le_advertisement(self):
-        """ Stop active LE advertisement.
-        """
-        self.device.sl4f.ble_lib.bleStopBleAdvertising()
-
-    def setup_gatt_server(self, database):
-        """ Sets up an input GATT server.
-
-        Args:
-            database: A dictionary representing the GATT database to setup.
-        """
-        self.device.sl4f.gatts_lib.publishServer(database)
-
-    def close_gatt_server(self):
-        """ Closes an existing GATT server.
-        """
-        self.device.sl4f.gatts_lib.closeServer()
-
-    def le_scan_with_name_filter(self, name, timeout):
-        """ Scan over LE for a specific device name.
-
-        Args:
-            name: The name filter to set.
-            timeout: The timeout to wait to find the advertisement.
-        Returns:
-            Discovered device id or None
-        """
-        partial_match = True
-        return le_scan_for_device_by_name(self.device, self.device.log, name,
-                                          timeout, partial_match)
-
-    def log_info(self, log):
-        """ Log directly onto the device.
-
-        Args:
-            log: The informative log.
-        """
-        self.device.sl4f.logging_lib.logI(log)
-
-    def unbond_all_known_devices(self):
-        """ Unbond all known remote devices.
-        """
-        try:
-            device_list = self.device.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for device_info in device_list:
-                device = device_list[device_info]
-                if device['bonded']:
-                    self.device.sl4f.bts_lib.forgetDevice(device['id'])
-        except Exception as err:
-            self.log.err("Unable to unbond all devices: {}".format(err))
-
-    def unbond_device(self, peer_identifier):
-        """ Unbond peer identifier.
-
-        Args:
-            peer_identifier: The peer identifier for the peer to unbond.
-
-        """
-        self.device.sl4f.bts_lib.forgetDevice(peer_identifier)
-
-    def _find_service_id_and_connect_to_service_for_handle(
-            self, peer_identifier, handle, uuid=False):
-        fail_err = "Failed to find handle {} in Peer database."
-        if uuid:
-            handle = handle.lower()
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    if uuid:
-                        char_id = char['uuid_type']
-                    if handle == char_id:
-                        return True
-                    descriptors = char['descriptors']
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        if uuid:
-                            desc_id = desc['uuid_type']
-                        if handle == desc_id:
-                            return True
-        except Exception as err:
-            self.log.error(fail_err.format(err))
-            return False
-
-    def _read_all_characteristics(self, peer_identifier, uuid=None):
-        fail_err = "Failed to read all characteristics with: {}"
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-                self.log.info(
-                    "Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    if uuid and uuid.lower() not in char_uuid.lower():
-                        continue
-                    try:
-                        read_val =  \
-                            self.device.sl4f.gattc_lib.readCharacteristicById(
-                                char_id)
-                        self.log.info(
-                            "\tCharacteristic uuid / Value: {} / {}".format(
-                                char_uuid, read_val['result']))
-                        str_value = ""
-                        for val in read_val['result']:
-                            str_value += chr(val)
-                        self.log.info("\t\tstr val: {}".format(str_value))
-                    except Exception as err:
-                        self.log.error(err)
-        except Exception as err:
-            self.log.error(fail_err.forma(err))
-
-    def _perform_read_all_descriptors(self, peer_identifier):
-        fail_err = "Failed to read all characteristics with: {}"
-        try:
-            services = self.device.sl4f.gattc_lib.listServices(peer_identifier)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.device.sl4f.gattc_lib.connectToService(
-                    peer_identifier, service_id)
-                chars = self.device.sl4f.gattc_lib.discoverCharacteristics()
-                self.log.info(
-                    "Reading descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    self.log.info(
-                        "\tReading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        read_val = self.device.sl4f.gattc_lib.readDescriptorById(
-                            desc_id)
-                        self.log.info(
-                            "\t\tDescriptor uuid / Value: {} / {}".format(
-                                desc_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(fail_err.format(err))
-
-    def init_pair(self, peer_identifier, security_level, non_bondable,
-                  transport):
-        """ Send an outgoing pairing request the input peer_identifier.
-
-        Android currently does not support setting various security levels or
-        bondable modes. Making them available for other bluetooth_device
-        variants. Depending on the Address type, Android will figure out the
-        transport to pair automatically.
-
-        Args:
-            peer_identifier: A string representing the device id.
-            security_level: The security level required for this pairing request
-                represented as a u64. (Only for LE pairing)
-                Available Values
-                1 - ENCRYPTED: Encrypted without MITM protection
-                    (unauthenticated)
-                2 - AUTHENTICATED: Encrypted with MITM protection
-                    (authenticated)
-                None: No pairing security level.
-            non_bondable: A bool representing whether the pairing mode is
-                bondable or not. None is also accepted. False if bondable, True
-                if non-bondable
-            transport: A u64 representing the transport type.
-                Available Values
-                1 - BREDR: Classic BR/EDR transport
-                2 - LE: Bluetooth Low Energy Transport
-        Returns:
-            True if successful, False if failed.
-        """
-        try:
-            self.device.sl4f.bts_lib.pair(peer_identifier, security_level,
-                                          non_bondable, transport)
-            return True
-        except Exception as err:
-            fail_err = "Failed to pair to peer_identifier {} with: {}".format(
-                peer_identifier)
-            self.log.error(fail_err.format(err))
diff --git a/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py b/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py
deleted file mode 100644
index 2367b94..0000000
--- a/src/antlion/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py
+++ /dev/null
@@ -1,340 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import inspect
-import time
-from antlion import asserts
-from antlion.controllers.buds_lib.dev_utils import apollo_sink_events
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-
-
-
-def validate_controller(controller, abstract_device_class):
-    """Ensure controller has all methods in abstract_device_class.
-    Also checks method signatures to ensure parameters are satisfied.
-
-    Args:
-        controller: instance of a device controller.
-        abstract_device_class: class definition of an abstract_device interface.
-    Raises:
-         NotImplementedError: if controller is missing one or more methods.
-    """
-    ctlr_methods = inspect.getmembers(controller, predicate=callable)
-    reqd_methods = inspect.getmembers(
-        abstract_device_class, predicate=inspect.ismethod)
-    expected_func_names = {method[0] for method in reqd_methods}
-    controller_func_names = {method[0] for method in ctlr_methods}
-
-    if not controller_func_names.issuperset(expected_func_names):
-        raise NotImplementedError(
-            'Controller {} is missing the following functions: {}'.format(
-                controller.__class__.__name__,
-                repr(expected_func_names - controller_func_names)))
-
-    for func_name in expected_func_names:
-        controller_func = getattr(controller, func_name)
-        required_func = getattr(abstract_device_class, func_name)
-        required_signature = inspect.signature(required_func)
-        if inspect.signature(controller_func) != required_signature:
-            raise NotImplementedError(
-                'Method {} must have the signature {}{}.'.format(
-                    controller_func.__qualname__, controller_func.__name__,
-                    required_signature))
-
-
-class BluetoothHandsfreeAbstractDevice:
-    """Base class for all Bluetooth handsfree abstract devices.
-
-    Desired controller classes should have a corresponding Bluetooth handsfree
-    abstract device class defined in this module.
-    """
-
-    @property
-    def mac_address(self):
-        raise NotImplementedError
-
-    def accept_call(self):
-        raise NotImplementedError()
-
-    def end_call(self):
-        raise NotImplementedError()
-
-    def enter_pairing_mode(self):
-        raise NotImplementedError()
-
-    def next_track(self):
-        raise NotImplementedError()
-
-    def pause(self):
-        raise NotImplementedError()
-
-    def play(self):
-        raise NotImplementedError()
-
-    def power_off(self):
-        raise NotImplementedError()
-
-    def power_on(self):
-        raise NotImplementedError()
-
-    def previous_track(self):
-        raise NotImplementedError()
-
-    def reject_call(self):
-        raise NotImplementedError()
-
-    def volume_down(self):
-        raise NotImplementedError()
-
-    def volume_up(self):
-        raise NotImplementedError()
-
-
-class PixelBudsBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-
-    CMD_EVENT = 'EvtHex'
-
-    def __init__(self, pixel_buds_controller):
-        self.pixel_buds_controller = pixel_buds_controller
-
-    def format_cmd(self, cmd_name):
-        return self.CMD_EVENT + ' ' + apollo_sink_events.SINK_EVENTS[cmd_name]
-
-    @property
-    def mac_address(self):
-        return self.pixel_buds_controller.bluetooth_address
-
-    def accept_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAnswer'))
-
-    def end_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrCancelEnd'))
-
-    def enter_pairing_mode(self):
-        return self.pixel_buds_controller.set_pairing_mode()
-
-    def next_track(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpSkipForward'))
-
-    def pause(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpPause'))
-
-    def play(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpPlay'))
-
-    def power_off(self):
-        return self.pixel_buds_controller.power('Off')
-
-    def power_on(self):
-        return self.pixel_buds_controller.power('On')
-
-    def previous_track(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrAvrcpSkipBackward'))
-
-    def reject_call(self):
-        return self.pixel_buds_controller.cmd(
-            self.format_cmd('EventUsrReject'))
-
-    def volume_down(self):
-        return self.pixel_buds_controller.volume('Down')
-
-    def volume_up(self):
-        return self.pixel_buds_controller.volume('Up')
-
-
-class EarstudioReceiverBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, earstudio_controller):
-        self.earstudio_controller = earstudio_controller
-
-    @property
-    def mac_address(self):
-        return self.earstudio_controller.mac_address
-
-    def accept_call(self):
-        return self.earstudio_controller.press_accept_call()
-
-    def end_call(self):
-        return self.earstudio_controller.press_end_call()
-
-    def enter_pairing_mode(self):
-        return self.earstudio_controller.enter_pairing_mode()
-
-    def next_track(self):
-        return self.earstudio_controller.press_next()
-
-    def pause(self):
-        return self.earstudio_controller.press_play_pause()
-
-    def play(self):
-        return self.earstudio_controller.press_play_pause()
-
-    def power_off(self):
-        return self.earstudio_controller.power_off()
-
-    def power_on(self):
-        return self.earstudio_controller.power_on()
-
-    def previous_track(self):
-        return self.earstudio_controller.press_previous()
-
-    def reject_call(self):
-        return self.earstudio_controller.press_reject_call()
-
-    def volume_down(self):
-        return self.earstudio_controller.press_volume_down()
-
-    def volume_up(self):
-        return self.earstudio_controller.press_volume_up()
-
-
-class JaybirdX3EarbudsBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, jaybird_controller):
-        self.jaybird_controller = jaybird_controller
-
-    @property
-    def mac_address(self):
-        return self.jaybird_controller.mac_address
-
-    def accept_call(self):
-        return self.jaybird_controller.press_accept_call()
-
-    def end_call(self):
-        return self.jaybird_controller.press_reject_call()
-
-    def enter_pairing_mode(self):
-        return self.jaybird_controller.enter_pairing_mode()
-
-    def next_track(self):
-        return self.jaybird_controller.press_next()
-
-    def pause(self):
-        return self.jaybird_controller.press_play_pause()
-
-    def play(self):
-        return self.jaybird_controller.press_play_pause()
-
-    def power_off(self):
-        return self.jaybird_controller.power_off()
-
-    def power_on(self):
-        return self.jaybird_controller.power_on()
-
-    def previous_track(self):
-        return self.jaybird_controller.press_previous()
-
-    def reject_call(self):
-        return self.jaybird_controller.press_reject_call()
-
-    def volume_down(self):
-        return self.jaybird_controller.press_volume_down()
-
-    def volume_up(self):
-        return self.jaybird_controller.press_volume_up()
-
-
-class AndroidHeadsetBluetoothHandsfreeAbstractDevice(
-        BluetoothHandsfreeAbstractDevice):
-    def __init__(self, ad_controller):
-        self.ad_controller = ad_controller
-
-    @property
-    def mac_address(self):
-        """Getting device mac with more stability ensurance.
-
-        Sometime, getting mac address is flaky that it returns None. Adding a
-        loop to add more ensurance of getting correct mac address.
-        """
-        device_mac = None
-        start_time = time.time()
-        end_time = start_time + bt_default_timeout
-        while not device_mac and time.time() < end_time:
-            device_mac = self.ad_controller.droid.bluetoothGetLocalAddress()
-        asserts.assert_true(device_mac, 'Can not get the MAC address')
-        return device_mac
-
-    def accept_call(self):
-        return self.ad_controller.droid.telecomAcceptRingingCall(None)
-
-    def end_call(self):
-        return self.ad_controller.droid.telecomEndCall()
-
-    def enter_pairing_mode(self):
-        self.ad_controller.droid.bluetoothStartPairingHelper(True)
-        return self.ad_controller.droid.bluetoothMakeDiscoverable()
-
-    def next_track(self):
-        return (self.ad_controller.droid.bluetoothMediaPassthrough("skipNext"))
-
-    def pause(self):
-        return self.ad_controller.droid.bluetoothMediaPassthrough("pause")
-
-    def play(self):
-        return self.ad_controller.droid.bluetoothMediaPassthrough("play")
-
-    def power_off(self):
-        return self.ad_controller.droid.bluetoothToggleState(False)
-
-    def power_on(self):
-        return self.ad_controller.droid.bluetoothToggleState(True)
-
-    def previous_track(self):
-        return (self.ad_controller.droid.bluetoothMediaPassthrough("skipPrev"))
-
-    def reject_call(self):
-        return self.ad_controller.droid.telecomCallDisconnect(
-            self.ad_controller.droid.telecomCallGetCallIds()[0])
-
-    def reset(self):
-        return self.ad_controller.droid.bluetoothFactoryReset()
-
-    def volume_down(self):
-        target_step = self.ad_controller.droid.getMediaVolume() - 1
-        target_step = max(target_step, 0)
-        return self.ad_controller.droid.setMediaVolume(target_step)
-
-    def volume_up(self):
-        target_step = self.ad_controller.droid.getMediaVolume() + 1
-        max_step = self.ad_controller.droid.getMaxMediaVolume()
-        target_step = min(target_step, max_step)
-        return self.ad_controller.droid.setMediaVolume(target_step)
-
-
-class BluetoothHandsfreeAbstractDeviceFactory:
-    """Generates a BluetoothHandsfreeAbstractDevice for any device controller.
-    """
-
-    _controller_abstract_devices = {
-        'EarstudioReceiver': EarstudioReceiverBluetoothHandsfreeAbstractDevice,
-        'JaybirdX3Earbuds': JaybirdX3EarbudsBluetoothHandsfreeAbstractDevice,
-        'ParentDevice': PixelBudsBluetoothHandsfreeAbstractDevice,
-        'AndroidDevice': AndroidHeadsetBluetoothHandsfreeAbstractDevice
-    }
-
-    def generate(self, controller):
-        class_name = controller.__class__.__name__
-        if class_name in self._controller_abstract_devices:
-            return self._controller_abstract_devices[class_name](controller)
-        else:
-            validate_controller(controller, BluetoothHandsfreeAbstractDevice)
-            return controller
diff --git a/src/antlion/test_utils/abstract_devices/wlan_device.py b/src/antlion/test_utils/abstract_devices/wlan_device.py
deleted file mode 100644
index 2d11a44..0000000
--- a/src/antlion/test_utils/abstract_devices/wlan_device.py
+++ /dev/null
@@ -1,551 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-import logging
-import time
-
-import antlion.test_utils.wifi.wifi_test_utils as awutils
-from antlion.utils import adb_shell_ping
-
-from antlion import asserts
-from antlion.controllers import iperf_client
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.android_device import AndroidDevice
-
-FUCHSIA_VALID_SECURITY_TYPES = {"none", "wep", "wpa", "wpa2", "wpa3"}
-
-
-def create_wlan_device(hardware_device):
-    """Creates a generic WLAN device based on type of device that is sent to
-    the functions.
-
-    Args:
-        hardware_device: A WLAN hardware device that is supported by ACTS.
-    """
-    if isinstance(hardware_device, FuchsiaDevice):
-        return FuchsiaWlanDevice(hardware_device)
-    elif isinstance(hardware_device, AndroidDevice):
-        return AndroidWlanDevice(hardware_device)
-    else:
-        raise ValueError('Unable to create WlanDevice for type %s' %
-                         type(hardware_device))
-
-
-class WlanDevice(object):
-    """Class representing a generic WLAN device.
-
-    Each object of this class represents a generic WLAN device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        device: A generic WLAN device.
-    """
-
-    def __init__(self, device):
-        self.device = device
-        self.log = logging
-        self.identifier = None
-
-    def wifi_toggle_state(self, state):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def reset_wifi(self):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_log(self, test_name, begin_time):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def turn_location_off_and_scan_toggle_off(self):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
-        """Base generic WLAN interface.  Only called if not overriden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def disconnect(self):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_wlan_interface_id_list(self):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def get_default_wlan_test_interface(self):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def destroy_wlan_interface(self, iface_id):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def send_command(self, command):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def is_connected(self, ssid=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def hard_power_cycle(self, pdus=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def save_network(self, ssid):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def clear_saved_networks(self):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def create_iperf_client(self, test_interface=None):
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-    def feature_is_present(self, feature: str) -> bool:
-        raise NotImplementedError("{} must be defined.".format(
-            inspect.currentframe().f_code.co_name))
-
-
-class AndroidWlanDevice(WlanDevice):
-    """Class wrapper for an Android WLAN device.
-
-    Each object of this class represents a generic WLAN device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        android_device: An Android WLAN device.
-    """
-
-    def __init__(self, android_device):
-        super().__init__(android_device)
-        self.identifier = android_device.serial
-
-    def wifi_toggle_state(self, state):
-        awutils.wifi_toggle_state(self.device, state)
-
-    def reset_wifi(self):
-        awutils.reset_wifi(self.device)
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        self.device.take_bug_report(test_name, begin_time)
-
-    def get_log(self, test_name, begin_time):
-        self.device.cat_adb_log(test_name, begin_time)
-
-    def turn_location_off_and_scan_toggle_off(self):
-        awutils.turn_location_off_and_scan_toggle_off(self.device)
-
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  key_mgmt=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
-        """Function to associate an Android WLAN device.
-
-        Args:
-            target_ssid: SSID to associate to.
-            target_pwd: Password for the SSID, if necessary.
-            key_mgmt: The hostapd wpa_key_mgmt value, distinguishes wpa3 from
-                wpa2 for android tests.
-            check_connectivity: Whether to check for internet connectivity.
-            hidden: Whether the network is hidden.
-        Returns:
-            True if successfully connected to WLAN, False if not.
-        """
-        network = {'SSID': target_ssid, 'hiddenSSID': hidden}
-        if target_pwd:
-            network['password'] = target_pwd
-        if key_mgmt:
-            network['security'] = key_mgmt
-        try:
-            awutils.connect_to_wifi_network(
-                self.device,
-                network,
-                check_connectivity=check_connectivity,
-                hidden=hidden)
-            return True
-        except Exception as e:
-            self.device.log.info('Failed to associated (%s)' % e)
-            return False
-
-    def disconnect(self):
-        awutils.turn_location_off_and_scan_toggle_off(self.device)
-
-    def get_wlan_interface_id_list(self):
-        pass
-
-    def get_default_wlan_test_interface(self):
-        return 'wlan0'
-
-    def destroy_wlan_interface(self, iface_id):
-        pass
-
-    def send_command(self, command):
-        return self.device.adb.shell(str(command))
-
-    def is_connected(self, ssid=None):
-        wifi_info = self.device.droid.wifiGetConnectionInfo()
-        if ssid:
-            return 'BSSID' in wifi_info and wifi_info['SSID'] == ssid
-        return 'BSSID' in wifi_info
-
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
-        return adb_shell_ping(self.device,
-                              dest_ip=dest_ip,
-                              count=count,
-                              timeout=timeout)
-
-    def ping(self, dest_ip, count=3, interval=1000, timeout=1000, size=25):
-        pass
-
-    def hard_power_cycle(self, pdus):
-        pass
-
-    def save_network(self, ssid):
-        pass
-
-    def clear_saved_networks(self):
-        pass
-
-    def create_iperf_client(self, test_interface=None):
-        """ Returns an iperf client on the Android, without requiring a
-        specific config.
-
-        Args:
-            test_interface: optional, string, name of test interface.
-
-        Returns:
-            IPerfClient object
-        """
-        if not test_interface:
-            test_interface = self.get_default_wlan_test_interface()
-
-        return iperf_client.IPerfClientOverAdb(
-            android_device_or_serial=self.device,
-            test_interface=test_interface)
-
-    def feature_is_present(self, feature: str) -> bool:
-        pass
-
-
-class FuchsiaWlanDevice(WlanDevice):
-    """Class wrapper for an Fuchsia WLAN device.
-
-    Each object of this class represents a generic WLAN device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        fuchsia_device: A Fuchsia WLAN device.
-    """
-
-    def __init__(self, fuchsia_device):
-        super().__init__(fuchsia_device)
-        self.identifier = fuchsia_device.ip
-        self.device.configure_wlan()
-
-    def wifi_toggle_state(self, state):
-        """Stub for Fuchsia implementation."""
-
-    def reset_wifi(self):
-        """Stub for Fuchsia implementation."""
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        """Stub for Fuchsia implementation."""
-        self.device.take_bug_report(test_name, begin_time)
-
-    def get_log(self, test_name, begin_time):
-        """Stub for Fuchsia implementation."""
-
-    def turn_location_off_and_scan_toggle_off(self):
-        """Stub for Fuchsia implementation."""
-
-    def associate(self,
-                  target_ssid,
-                  target_pwd=None,
-                  key_mgmt=None,
-                  check_connectivity=True,
-                  hidden=False,
-                  target_security=None):
-        """Function to associate a Fuchsia WLAN device.
-
-        Args:
-            target_ssid: SSID to associate to.
-            target_pwd: Password for the SSID, if necessary.
-            key_mgmt: the hostapd wpa_key_mgmt, if specified.
-            check_connectivity: Whether to check for internet connectivity.
-            hidden: Whether the network is hidden.
-            target_security: string, target security for network, used to
-                save the network in policy connects (see wlan_policy_lib)
-        Returns:
-            True if successfully connected to WLAN, False if not.
-        """
-        if self.device.association_mechanism == 'drivers':
-            bss_scan_response = self.device.sl4f.wlan_lib.wlanScanForBSSInfo()
-            if bss_scan_response.get('error'):
-                self.log.error('Scan for BSS info failed. Err: %s' %
-                               bss_scan_response['error'])
-                return False
-
-            bss_descs_for_ssid = bss_scan_response['result'].get(
-                target_ssid, None)
-            if not bss_descs_for_ssid or len(bss_descs_for_ssid) < 1:
-                self.log.error(
-                    'Scan failed to find a BSS description for target_ssid %s'
-                    % target_ssid)
-                return False
-
-            connection_response = self.device.sl4f.wlan_lib.wlanConnectToNetwork(
-                target_ssid, bss_descs_for_ssid[0], target_pwd=target_pwd)
-            return self.device.check_connect_response(connection_response)
-        else:
-            return self.device.wlan_policy_controller.save_and_connect(
-                target_ssid, target_security, password=target_pwd)
-
-    def disconnect(self):
-        """Function to disconnect from a Fuchsia WLAN device.
-           Asserts if disconnect was not successful.
-        """
-        if self.device.association_mechanism == 'drivers':
-            disconnect_response = self.device.sl4f.wlan_lib.wlanDisconnect()
-            return self.device.check_disconnect_response(disconnect_response)
-        else:
-            return self.device.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections(
-            )
-
-    def status(self):
-        return self.device.sl4f.wlan_lib.wlanStatus()
-
-    def can_ping(self,
-                 dest_ip,
-                 count=3,
-                 interval=1000,
-                 timeout=1000,
-                 size=25,
-                 additional_ping_params=None):
-        return self.device.can_ping(
-            dest_ip,
-            count=count,
-            interval=interval,
-            timeout=timeout,
-            size=size,
-            additional_ping_params=additional_ping_params)
-
-    def ping(self,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=25,
-             additional_ping_params=None):
-        return self.device.ping(dest_ip,
-                                count=count,
-                                interval=interval,
-                                timeout=timeout,
-                                size=size,
-                                additional_ping_params=additional_ping_params)
-
-    def get_wlan_interface_id_list(self):
-        """Function to list available WLAN interfaces.
-
-        Returns:
-            A list of wlan interface IDs.
-        """
-        return self.device.sl4f.wlan_lib.wlanGetIfaceIdList().get('result')
-
-    def get_default_wlan_test_interface(self):
-        """Returns name of the WLAN client interface"""
-        return self.device.wlan_client_test_interface_name
-
-    def destroy_wlan_interface(self, iface_id):
-        """Function to associate a Fuchsia WLAN device.
-
-        Args:
-            target_ssid: SSID to associate to.
-            target_pwd: Password for the SSID, if necessary.
-            check_connectivity: Whether to check for internet connectivity.
-            hidden: Whether the network is hidden.
-        Returns:
-            True if successfully destroyed wlan interface, False if not.
-        """
-        result = self.device.sl4f.wlan_lib.wlanDestroyIface(iface_id)
-        if result.get('error') is None:
-            return True
-        else:
-            self.log.error("Failed to destroy interface with: {}".format(
-                result.get('error')))
-            return False
-
-    def send_command(self, command):
-        return self.device.ssh.run(str(command)).stdout
-
-    def is_connected(self, ssid=None):
-        """ Determines if wlan_device is connected to wlan network.
-
-        Args:
-            ssid (optional): string, to check if device is connect to a specific
-                network.
-
-        Returns:
-            True, if connected to a network or to the correct network when SSID
-                is provided.
-            False, if not connected or connect to incorrect network when SSID is
-                provided.
-        """
-        response = self.status()
-        if response.get('error'):
-            raise ConnectionError(
-                'Failed to get client network connection status')
-        result = response.get('result')
-        if isinstance(result, dict):
-            connected_to = result.get('Connected')
-            # TODO(https://fxbug.dev/85938): Remove backwards compatibility once
-            # ACTS is versioned with Fuchsia.
-            if not connected_to:
-                connected_to = result.get('connected_to')
-            if not connected_to:
-                return False
-
-            if ssid:
-                # Replace encoding errors instead of raising an exception.
-                # Since `ssid` is a string, this will not affect the test
-                # for equality.
-                connected_ssid = bytearray(connected_to['ssid']).decode(
-                    encoding='utf-8', errors='replace')
-                return ssid == connected_ssid
-            return True
-        return False
-
-    def hard_power_cycle(self, pdus):
-        self.device.reboot(reboot_type='hard', testbed_pdus=pdus)
-
-    def save_network(self, target_ssid, security_type=None, target_pwd=None):
-        if self.device.association_mechanism == 'drivers':
-            raise EnvironmentError(
-                'Cannot save network using the drivers. Saved networks are a '
-                'policy layer concept.')
-        if security_type and security_type not in FUCHSIA_VALID_SECURITY_TYPES:
-            raise TypeError('Invalid security type: %s' % security_type)
-        if not self.device.wlan_policy_controller.save_network(
-                target_ssid, security_type, password=target_pwd):
-            raise EnvironmentError('Failed to save network: %s' % target_ssid)
-
-    def clear_saved_networks(self):
-        if self.device.association_mechanism == 'drivers':
-            raise EnvironmentError(
-                'Cannot clear saved network using the drivers. Saved networks '
-                'are a policy layer concept.')
-        if not self.device.wlan_policy_controller.remove_all_networks():
-            raise EnvironmentError('Failed to clear saved networks')
-
-    def create_iperf_client(self, test_interface=None):
-        """ Returns an iperf client on the FuchsiaDevice, without requiring a
-        specific config.
-
-        Args:
-            test_interface: optional, string, name of test interface. Defaults
-                to first found wlan client interface.
-
-        Returns:
-            IPerfClient object
-        """
-        if not test_interface:
-            test_interface = self.get_default_wlan_test_interface()
-
-        # A package server is necessary to acquire the iperf3 client for
-        # some builds.
-        self.device.start_package_server()
-
-        return iperf_client.IPerfClientOverSsh(
-            {
-                'user': 'fuchsia',
-                'host': self.device.ip,
-                'ssh_config': self.device.ssh_config
-            },
-            ssh_provider=self.device.ssh,
-            test_interface=test_interface)
-
-    def feature_is_present(self, feature: str) -> bool:
-        return feature in self.device.wlan_features
diff --git a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py b/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
deleted file mode 100644
index c8b61cc..0000000
--- a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
+++ /dev/null
@@ -1,665 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import multiprocessing
-import time
-
-from datetime import datetime
-from uuid import uuid4
-
-from antlion import signals
-from antlion import tracelogger
-from antlion import utils
-from antlion.controllers import iperf_client
-from antlion.controllers import iperf_server
-
-AC_VO = 'AC_VO'
-AC_VI = 'AC_VI'
-AC_BE = 'AC_BE'
-AC_BK = 'AC_BK'
-
-# TODO(fxb/61421): Add tests to check all DSCP classes are mapped to the correct
-# AC (there are many that aren't included here). Requires implementation of
-# sniffer.
-DEFAULT_AC_TO_TOS_TAG_MAP = {
-    AC_VO: '0xC0',
-    AC_VI: '0x80',
-    AC_BE: '0x0',
-    AC_BK: '0x20'
-}
-UDP = 'udp'
-TCP = 'tcp'
-DEFAULT_IPERF_PORT = 5201
-DEFAULT_STREAM_TIME = 10
-DEFAULT_IP_ADDR_TIMEOUT = 15
-PROCESS_JOIN_TIMEOUT = 60
-AVAILABLE = True
-UNAVAILABLE = False
-
-
-class WmmTransceiverError(signals.ControllerError):
-    pass
-
-
-def create(config, identifier=None, wlan_devices=None, access_points=None):
-    """Creates a WmmTransceiver from a config.
-
-    Args:
-        config: dict, config parameters for the transceiver. Contains:
-            - iperf_config: dict, the config to use for creating IPerfClients
-                and IPerfServers (excluding port).
-            - port_range_start: int, the lower bound of the port range to use
-                for creating IPerfServers. Defaults to 5201.
-            - wlan_device: string, the identifier of the wlan_device used for
-                this WmmTransceiver (optional)
-
-        identifier: string, identifier for the WmmTransceiver. Must be provided
-            either as arg or in the config.
-        wlan_devices: list of WlanDevice objects from which to get the
-            wlan_device, if any, used as this transceiver
-        access_points: list of AccessPoint objects from which to get the
-            access_point, if any, used as this transceiver
-    """
-    try:
-        # If identifier is not provided as func arg, it must be provided via
-        # config file.
-        if not identifier:
-            identifier = config['identifier']
-        iperf_config = config['iperf_config']
-
-    except KeyError as err:
-        raise WmmTransceiverError(
-            'Parameter not provided as func arg, nor found in config: %s' %
-            err)
-
-    if wlan_devices is None:
-        wlan_devices = []
-
-    if access_points is None:
-        access_points = []
-
-    port_range_start = config.get('port_range_start', DEFAULT_IPERF_PORT)
-
-    wd = None
-    ap = None
-    if 'wlan_device' in config:
-        wd = _find_wlan_device(config['wlan_device'], wlan_devices)
-    elif 'access_point' in config:
-        ap = _find_access_point(config['access_point'], access_points)
-
-    return WmmTransceiver(iperf_config,
-                          identifier,
-                          wlan_device=wd,
-                          access_point=ap,
-                          port_range_start=port_range_start)
-
-
-def _find_wlan_device(wlan_device_identifier, wlan_devices):
-    """Returns WlanDevice based on string identifier (e.g. ip, serial, etc.)
-
-    Args:
-        wlan_device_identifier: string, identifier for the desired WlanDevice
-        wlan_devices: list, WlanDevices to search through
-
-    Returns:
-        WlanDevice, with identifier matching wlan_device_identifier
-
-    Raises:
-        WmmTransceiverError, if no WlanDevice matches identifier
-    """
-    for wd in wlan_devices:
-        if wlan_device_identifier == wd.identifier:
-            return wd
-    raise WmmTransceiverError('No WlanDevice with identifier: %s' %
-                              wlan_device_identifier)
-
-
-def _find_access_point(access_point_ip, access_points):
-    """Returns AccessPoint based on string ip address
-
-    Args:
-        access_point_ip: string, control plane ip addr of the desired AP,
-        access_points: list, AccessPoints to search through
-
-    Returns:
-        AccessPoint, with hostname matching access_point_ip
-
-    Raises:
-        WmmTransceiverError, if no AccessPoint matches ip"""
-    for ap in access_points:
-        if ap.ssh_settings.hostname == access_point_ip:
-            return ap
-    raise WmmTransceiverError('No AccessPoint with ip: %s' % access_point_ip)
-
-
-class WmmTransceiver(object):
-    """Object for handling WMM tagged streams between devices"""
-    def __init__(self,
-                 iperf_config,
-                 identifier,
-                 wlan_device=None,
-                 access_point=None,
-                 port_range_start=5201):
-
-        self.identifier = identifier
-        self.log = tracelogger.TraceLogger(
-            WmmTransceiverLoggerAdapter(logging.getLogger(),
-                                        {'identifier': self.identifier}))
-        # WlanDevice or AccessPoint, that is used as the transceiver. Only one
-        # will be set. This helps consolodate association, setup, teardown, etc.
-        self.wlan_device = wlan_device
-        self.access_point = access_point
-
-        # Parameters used to create IPerfClient and IPerfServer objects on
-        # device
-        self._iperf_config = iperf_config
-        self._test_interface = self._iperf_config.get('test_interface')
-        self._port_range_start = port_range_start
-        self._next_server_port = port_range_start
-
-        # Maps IPerfClients, used for streams from this device, to True if
-        # available, False if reserved
-        self._iperf_clients = {}
-
-        # Maps IPerfServers, used to receive streams from other devices, to True
-        # if available, False if reserved
-        self._iperf_servers = {}
-
-        # Maps ports of servers, which are provided to other transceivers, to
-        # the actual IPerfServer objects
-        self._iperf_server_ports = {}
-
-        # Maps stream UUIDs to IPerfClients reserved for that streams use
-        self._reserved_clients = {}
-
-        # Maps stream UUIDs to (WmmTransceiver, IPerfServer) tuples, where the
-        # server is reserved on the transceiver for that streams use
-        self._reserved_servers = {}
-
-        # Maps with shared memory functionality to be used across the parallel
-        # streams. active_streams holds UUIDs of streams that are currently
-        # running on this device (mapped to True, since there is no
-        # multiprocessing set). stream_results maps UUIDs of streams completed
-        # on this device to IPerfResult results for that stream.
-        self._manager = multiprocessing.Manager()
-        self._active_streams = self._manager.dict()
-        self._stream_results = self._manager.dict()
-
-        # Holds parameters for streams that are prepared to run asynchronously
-        # (i.e. resources have been allocated). Maps UUIDs of the future streams
-        # to a dict, containing the stream parameters.
-        self._pending_async_streams = {}
-
-        # Set of UUIDs of asynchronous streams that have at least started, but
-        # have not had their resources reclaimed yet
-        self._ran_async_streams = set()
-
-        # Set of stream parallel process, which can be joined if completed
-        # successfully, or  terminated and joined in the event of an error
-        self._running_processes = set()
-
-    def run_synchronous_traffic_stream(self, stream_parameters, subnet):
-        """Runs a traffic stream with IPerf3 between two WmmTransceivers and
-        saves the results.
-
-        Args:
-            stream_parameters: dict, containing parameters to used for the
-                stream. See _parse_stream_parameters for details.
-            subnet: string, the subnet of the network to use for the stream
-
-        Returns:
-            uuid: UUID object, identifier of the stream
-        """
-        (receiver, access_category, bandwidth,
-         stream_time) = self._parse_stream_parameters(stream_parameters)
-        uuid = uuid4()
-
-        (client, server_ip,
-         server_port) = self._get_stream_resources(uuid, receiver, subnet)
-
-        self._validate_server_address(server_ip, uuid)
-
-        self.log.info('Running synchronous stream to %s WmmTransceiver' %
-                      receiver.identifier)
-        self._run_traffic(uuid,
-                          client,
-                          server_ip,
-                          server_port,
-                          self._active_streams,
-                          self._stream_results,
-                          access_category=access_category,
-                          bandwidth=bandwidth,
-                          stream_time=stream_time)
-
-        self._return_stream_resources(uuid)
-        return uuid
-
-    def prepare_asynchronous_stream(self, stream_parameters, subnet):
-        """Reserves resources and saves configs for upcoming asynchronous
-        traffic streams, so they can be started more simultaneously.
-
-        Args:
-            stream_parameters: dict, containing parameters to used for the
-                stream. See _parse_stream_parameters for details.
-            subnet: string, the subnet of the network to use for the stream
-
-        Returns:
-            uuid: UUID object, identifier of the stream
-        """
-        (receiver, access_category, bandwidth,
-         time) = self._parse_stream_parameters(stream_parameters)
-        uuid = uuid4()
-
-        (client, server_ip,
-         server_port) = self._get_stream_resources(uuid, receiver, subnet)
-
-        self._validate_server_address(server_ip, uuid)
-
-        pending_stream_config = {
-            'client': client,
-            'server_ip': server_ip,
-            'server_port': server_port,
-            'access_category': access_category,
-            'bandwidth': bandwidth,
-            'time': time
-        }
-
-        self._pending_async_streams[uuid] = pending_stream_config
-        self.log.info('Stream to %s WmmTransceiver prepared.' %
-                      receiver.identifier)
-        return uuid
-
-    def start_asynchronous_streams(self, start_time=None):
-        """Starts pending asynchronous streams between two WmmTransceivers as
-        parallel processes.
-
-        Args:
-            start_time: float, time, seconds since epoch, at which to start the
-                stream (for better synchronicity). If None, start immediately.
-        """
-        for uuid in self._pending_async_streams:
-            pending_stream_config = self._pending_async_streams[uuid]
-            client = pending_stream_config['client']
-            server_ip = pending_stream_config['server_ip']
-            server_port = pending_stream_config['server_port']
-            access_category = pending_stream_config['access_category']
-            bandwidth = pending_stream_config['bandwidth']
-            time = pending_stream_config['time']
-
-            process = multiprocessing.Process(target=self._run_traffic,
-                                              args=[
-                                                  uuid, client, server_ip,
-                                                  server_port,
-                                                  self._active_streams,
-                                                  self._stream_results
-                                              ],
-                                              kwargs={
-                                                  'access_category':
-                                                  access_category,
-                                                  'bandwidth': bandwidth,
-                                                  'stream_time': time,
-                                                  'start_time': start_time
-                                              })
-
-            # This needs to be set here to ensure its marked active before
-            # it even starts.
-            self._active_streams[uuid] = True
-            process.start()
-            self._ran_async_streams.add(uuid)
-            self._running_processes.add(process)
-
-        self._pending_async_streams.clear()
-
-    def cleanup_asynchronous_streams(self, timeout=PROCESS_JOIN_TIMEOUT):
-        """Releases reservations on resources (IPerfClients and IPerfServers)
-        that were held for asynchronous streams, both pending and finished.
-        Attempts to join any running processes, logging an error if timeout is
-        exceeded.
-
-        Args:
-            timeout: time, in seconds, to wait for each running process, if any,
-                to join
-        """
-        self.log.info('Cleaning up any asynchronous streams.')
-
-        # Releases resources for any streams that were prepared, but no run
-        for uuid in self._pending_async_streams:
-            self.log.error(
-                'Pending asynchronous stream %s never ran. Cleaning.' % uuid)
-            self._return_stream_resources(uuid)
-        self._pending_async_streams.clear()
-
-        # Attempts to join any running streams, terminating them after timeout
-        # if necessary.
-        while self._running_processes:
-            process = self._running_processes.pop()
-            process.join(timeout)
-            if process.is_alive():
-                self.log.error(
-                    'Stream process failed to join in %s seconds. Terminating.'
-                    % timeout)
-                process.terminate()
-                process.join()
-        self._active_streams.clear()
-
-        # Release resources for any finished streams
-        while self._ran_async_streams:
-            uuid = self._ran_async_streams.pop()
-            self._return_stream_resources(uuid)
-
-    def get_results(self, uuid):
-        """Retrieves a streams IPerfResults from stream_results
-
-        Args:
-            uuid: UUID object, identifier of the stream
-        """
-        return self._stream_results.get(uuid, None)
-
-    def destroy_resources(self):
-        for server in self._iperf_servers:
-            server.stop()
-        self._iperf_servers.clear()
-        self._iperf_server_ports.clear()
-        self._iperf_clients.clear()
-        self._next_server_port = self._port_range_start
-        self._stream_results.clear()
-
-    @property
-    def has_active_streams(self):
-        return bool(self._active_streams)
-
-    # Helper Functions
-
-    def _run_traffic(self,
-                     uuid,
-                     client,
-                     server_ip,
-                     server_port,
-                     active_streams,
-                     stream_results,
-                     access_category=None,
-                     bandwidth=None,
-                     stream_time=DEFAULT_STREAM_TIME,
-                     start_time=None):
-        """Runs an iperf3 stream.
-
-        1. Adds stream UUID to active_streams
-        2. Runs stream
-        3. Saves results to stream_results
-        4. Removes stream UUID from active_streams
-
-        Args:
-            uuid: UUID object, identifier for stream
-            client: IPerfClient object on device
-            server_ip: string, ip address of IPerfServer for stream
-            server_port: int, port of the IPerfServer for stream
-            active_streams: multiprocessing.Manager.dict, which holds stream
-                UUIDs of active streams on the device
-            stream_results: multiprocessing.Manager.dict, which maps stream
-                UUIDs of streams to IPerfResult objects
-            access_category: string, WMM access category to use with iperf
-                (AC_BK, AC_BE, AC_VI, AC_VO). Unset if None.
-            bandwidth: int, bandwidth in mbps to use with iperf. Implies UDP.
-                Unlimited if None.
-            stream_time: int, time in seconds, to run iperf stream
-            start_time: float, time, seconds since epoch, at which to start the
-                stream (for better synchronicity). If None, start immediately.
-        """
-        active_streams[uuid] = True
-        # SSH sessions must be started within the process that is going to
-        # use it.
-        if type(client) == iperf_client.IPerfClientOverSsh:
-            with utils.SuppressLogOutput():
-                client.start_ssh()
-
-        ac_flag = ''
-        bandwidth_flag = ''
-        time_flag = '-t %s' % stream_time
-
-        if access_category:
-            ac_flag = ' -S %s' % DEFAULT_AC_TO_TOS_TAG_MAP[access_category]
-
-        if bandwidth:
-            bandwidth_flag = ' -u -b %sM' % bandwidth
-
-        iperf_flags = '-p %s -i 1 %s%s%s -J' % (server_port, time_flag,
-                                                ac_flag, bandwidth_flag)
-        if not start_time:
-            start_time = time.time()
-        time_str = datetime.fromtimestamp(start_time).strftime('%H:%M:%S.%f')
-        self.log.info(
-            'At %s, starting %s second stream to %s:%s with (AC: %s, Bandwidth: %s)'
-            % (time_str, stream_time, server_ip, server_port, access_category,
-               bandwidth if bandwidth else 'Unlimited'))
-
-        # If present, wait for stream start time
-        if start_time:
-            current_time = time.time()
-            while current_time < start_time:
-                current_time = time.time()
-        path = client.start(server_ip, iperf_flags, '%s' % uuid)
-        stream_results[uuid] = iperf_server.IPerfResult(
-            path, reporting_speed_units='mbps')
-
-        if type(client) == iperf_client.IPerfClientOverSsh:
-            client.close_ssh()
-        active_streams.pop(uuid)
-
-    def _get_stream_resources(self, uuid, receiver, subnet):
-        """Reserves an IPerfClient and IPerfServer for a stream.
-
-        Args:
-            uuid: UUID object, identifier of the stream
-            receiver: WmmTransceiver object, which will be the streams receiver
-            subnet: string, subnet of test network, to retrieve the appropriate
-                server address
-
-        Returns:
-            (IPerfClient, string, int) representing the client, server address,
-            and server port to use for the stream
-        """
-        client = self._get_client(uuid)
-        server_ip, server_port = self._get_server(receiver, uuid, subnet)
-        return (client, server_ip, server_port)
-
-    def _return_stream_resources(self, uuid):
-        """Releases reservations on a streams IPerfClient and IPerfServer, so
-        they can be used by a future stream.
-
-        Args:
-            uuid: UUID object, identifier of the stream
-        """
-        if uuid in self._active_streams:
-            raise EnvironmentError('Resource still being used by stream %s' %
-                                   uuid)
-        (receiver, server_port) = self._reserved_servers.pop(uuid)
-        receiver._release_server(server_port)
-        client = self._reserved_clients.pop(uuid)
-        self._iperf_clients[client] = AVAILABLE
-
-    def _get_client(self, uuid):
-        """Retrieves and reserves IPerfClient for use in a stream. If none are
-        available, a new one is created.
-
-        Args:
-            uuid: UUID object, identifier for stream, used to link client to
-                stream for teardown
-
-        Returns:
-            IPerfClient on device
-        """
-        reserved_client = None
-        for client in self._iperf_clients:
-            if self._iperf_clients[client] == AVAILABLE:
-                reserved_client = client
-                break
-        else:
-            reserved_client = iperf_client.create([self._iperf_config])[0]
-            # Due to the nature of multiprocessing, ssh connections must
-            # be started inside the parallel processes, so it must be closed
-            # here.
-            if type(reserved_client) == iperf_client.IPerfClientOverSsh:
-                reserved_client.close_ssh()
-
-        self._iperf_clients[reserved_client] = UNAVAILABLE
-        self._reserved_clients[uuid] = reserved_client
-        return reserved_client
-
-    def _get_server(self, receiver, uuid, subnet):
-        """Retrieves the address and port of a reserved IPerfServer object from
-        the receiver object for use in a stream.
-
-        Args:
-            receiver: WmmTransceiver, to get an IPerfServer from
-            uuid: UUID, identifier for stream, used to link server to stream
-                for teardown
-            subnet: string, subnet of test network, to retrieve the appropriate
-                server address
-
-        Returns:
-            (string, int) representing the IPerfServer address and port
-        """
-        (server_ip, server_port) = receiver._reserve_server(subnet)
-        self._reserved_servers[uuid] = (receiver, server_port)
-        return (server_ip, server_port)
-
-    def _reserve_server(self, subnet):
-        """Reserves an available IPerfServer for use in a stream from another
-        WmmTransceiver. If none are available, a new one is created.
-
-        Args:
-            subnet: string, subnet of test network, to retrieve the appropriate
-                server address
-
-        Returns:
-            (string, int) representing the IPerfServer address and port
-        """
-        reserved_server = None
-        for server in self._iperf_servers:
-            if self._iperf_servers[server] == AVAILABLE:
-                reserved_server = server
-                break
-        else:
-            iperf_server_config = self._iperf_config
-            iperf_server_config.update({'port': self._next_server_port})
-            self._next_server_port += 1
-            reserved_server = iperf_server.create([iperf_server_config])[0]
-            self._iperf_server_ports[reserved_server.port] = reserved_server
-
-        self._iperf_servers[reserved_server] = UNAVAILABLE
-        reserved_server.start()
-        end_time = time.time() + DEFAULT_IP_ADDR_TIMEOUT
-        while time.time() < end_time:
-            if self.wlan_device:
-                addresses = utils.get_interface_ip_addresses(
-                    self.wlan_device.device, self._test_interface)
-            else:
-                addresses = reserved_server.get_interface_ip_addresses(
-                    self._test_interface)
-            for addr in addresses['ipv4_private']:
-                if utils.ip_in_subnet(addr, subnet):
-                    return (addr, reserved_server.port)
-        raise AttributeError(
-            'Reserved server has no ipv4 address in the %s subnet' % subnet)
-
-    def _release_server(self, server_port):
-        """Releases reservation on IPerfServer, which was held for a stream
-        from another WmmTransceiver.
-
-        Args:
-            server_port: int, the port of the IPerfServer being returned (since)
-                it is the identifying characteristic
-        """
-        server = self._iperf_server_ports[server_port]
-        server.stop()
-        self._iperf_servers[server] = AVAILABLE
-
-    def _validate_server_address(self, server_ip, uuid, timeout=60):
-        """ Verifies server address can be pinged before attempting to run
-        traffic, since iperf is unforgiving when the server is unreachable.
-
-        Args:
-            server_ip: string, ip address of the iperf server
-            uuid: string, uuid of the stream to use this server
-            timeout: int, time in seconds to wait for server to respond to pings
-
-        Raises:
-            WmmTransceiverError, if, after timeout, server ip is unreachable.
-        """
-        self.log.info('Verifying server address (%s) is reachable.' %
-                      server_ip)
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            if self.can_ping(server_ip):
-                break
-            else:
-                self.log.debug(
-                    'Could not ping server address (%s). Retrying in 1 second.'
-                    % (server_ip))
-                time.sleep(1)
-        else:
-            self._return_stream_resources(uuid)
-            raise WmmTransceiverError('IPerfServer address (%s) unreachable.' %
-                                      server_ip)
-
-    def can_ping(self, dest_ip):
-        """ Utilizes can_ping function in wlan_device or access_point device to
-        ping dest_ip
-
-        Args:
-            dest_ip: string, ip address to ping
-
-        Returns:
-            True, if dest address is reachable
-            False, otherwise
-        """
-        if self.wlan_device:
-            return self.wlan_device.can_ping(dest_ip)
-        else:
-            return self.access_point.can_ping(dest_ip)
-
-    def _parse_stream_parameters(self, stream_parameters):
-        """Parses stream_parameters from dictionary.
-
-        Args:
-            stream_parameters: dict of stream parameters
-                'receiver': WmmTransceiver, the receiver for the stream
-                'access_category': String, the access category to use for the
-                    stream. Unset if None.
-                'bandwidth': int, bandwidth in mbps for the stream. If set,
-                    implies UDP. If unset, implies TCP and unlimited bandwidth.
-                'time': int, time in seconds to run stream.
-
-        Returns:
-            (receiver, access_category, bandwidth, time) as
-            (WmmTransceiver, String, int, int)
-        """
-        receiver = stream_parameters['receiver']
-        access_category = stream_parameters.get('access_category', None)
-        bandwidth = stream_parameters.get('bandwidth', None)
-        time = stream_parameters.get('time', DEFAULT_STREAM_TIME)
-        return (receiver, access_category, bandwidth, time)
-
-
-class WmmTransceiverLoggerAdapter(logging.LoggerAdapter):
-    def process(self, msg, kwargs):
-        if self.extra['identifier']:
-            log_identifier = ' | %s' % self.extra['identifier']
-        else:
-            log_identifier = ''
-        msg = "[WmmTransceiver%s] %s" % (log_identifier, msg)
-        return (msg, kwargs)
diff --git a/src/antlion/test_utils/audio_analysis_lib/__init__.py b/src/antlion/test_utils/audio_analysis_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py b/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py
deleted file mode 100644
index a4273c5..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/audio_analysis.py
+++ /dev/null
@@ -1,669 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module provides utilities to do audio data analysis."""
-
-import logging
-import numpy
-import soundfile
-from scipy.signal import blackmanharris
-from scipy.signal import iirnotch
-from scipy.signal import lfilter
-
-# The default block size of pattern matching.
-ANOMALY_DETECTION_BLOCK_SIZE = 120
-
-# Only peaks with coefficient greater than 0.01 of the first peak should be
-# considered. Note that this correspond to -40dB in the spectrum.
-DEFAULT_MIN_PEAK_RATIO = 0.01
-
-# The minimum RMS value of meaningful audio data.
-MEANINGFUL_RMS_THRESHOLD = 0.001
-
-# The minimal signal norm value.
-_MINIMUM_SIGNAL_NORM = 0.001
-
-# The default pattern mathing threshold. By experiment, this threshold
-# can tolerate normal noise of 0.3 amplitude when sine wave signal
-# amplitude is 1.
-PATTERN_MATCHING_THRESHOLD = 0.85
-
-# The default number of samples within the analysis step size that the
-# difference between two anomaly time values can be to be grouped together.
-ANOMALY_GROUPING_TOLERANCE = 1.0
-
-# Window size for peak detection.
-PEAK_WINDOW_SIZE_HZ = 20
-
-
-class RMSTooSmallError(Exception):
-    """Error when signal RMS is too small."""
-
-
-class EmptyDataError(Exception):
-    """Error when signal is empty."""
-
-
-def normalize_signal(signal, saturate_value):
-    """Normalizes the signal with respect to the saturate value.
-
-    Args:
-        signal: A list for one-channel PCM data.
-        saturate_value: The maximum value that the PCM data might be.
-
-    Returns:
-        A numpy array containing normalized signal. The normalized signal has
-            value -1 and 1 when it saturates.
-
-    """
-    signal = numpy.array(signal)
-    return signal / float(saturate_value)
-
-
-def spectral_analysis(signal,
-                      rate,
-                      min_peak_ratio=DEFAULT_MIN_PEAK_RATIO,
-                      peak_window_size_hz=PEAK_WINDOW_SIZE_HZ):
-    """Gets the dominant frequencies by spectral analysis.
-
-    Args:
-        signal: A list of numbers for one-channel PCM data. This should be
-                   normalized to [-1, 1] so the function can check if signal RMS
-                   is too small to be meaningful.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        min_peak_ratio: The minimum peak_i/peak_0 ratio such that the
-                           peaks other than the greatest one should be
-                           considered.
-                           This is to ignore peaks that are too small compared
-                           to the first peak peak_0.
-        peak_window_size_hz: The window size in Hz to find the peaks.
-                                The minimum differences between found peaks will
-                                be half of this value.
-
-    Returns:
-        A list of tuples:
-              [(peak_frequency_0, peak_coefficient_0),
-               (peak_frequency_1, peak_coefficient_1),
-               (peak_frequency_2, peak_coefficient_2), ...]
-              where the tuples are sorted by coefficients. The last
-              peak_coefficient will be no less than peak_coefficient *
-              min_peak_ratio. If RMS is less than MEANINGFUL_RMS_THRESHOLD,
-              returns [(0, 0)].
-
-    """
-    # Checks the signal is meaningful.
-    if len(signal) == 0:
-        raise EmptyDataError('Signal data is empty')
-
-    signal_rms = numpy.linalg.norm(signal) / numpy.sqrt(len(signal))
-    logging.debug('signal RMS = %s', signal_rms)
-
-    # If RMS is too small, set dominant frequency and coefficient to 0.
-    if signal_rms < MEANINGFUL_RMS_THRESHOLD:
-        logging.warning(
-            'RMS %s is too small to be meaningful. Set frequency to 0.',
-            signal_rms)
-        return [(0, 0)]
-
-    logging.debug('Doing spectral analysis ...')
-
-    # First, pass signal through a window function to mitigate spectral leakage.
-    y_conv_w = signal * numpy.hanning(len(signal))
-
-    length = len(y_conv_w)
-
-    # x_f is the frequency in Hz, y_f is the transformed coefficient.
-    x_f = _rfft_freq(length, rate)
-    y_f = 2.0 / length * numpy.fft.rfft(y_conv_w)
-
-    # y_f is complex so consider its absolute value for magnitude.
-    abs_y_f = numpy.abs(y_f)
-    threshold = max(abs_y_f) * min_peak_ratio
-
-    # Suppresses all coefficients that are below threshold.
-    for i in range(len(abs_y_f)):
-        if abs_y_f[i] < threshold:
-            abs_y_f[i] = 0
-
-    # Gets the peak detection window size in indice.
-    # x_f[1] is the frequency difference per index.
-    peak_window_size = int(peak_window_size_hz / x_f[1])
-
-    # Detects peaks.
-    peaks = peak_detection(abs_y_f, peak_window_size)
-
-    # Transform back the peak location from index to frequency.
-    results = []
-    for index, value in peaks:
-        results.append((x_f[int(index)], value))
-    return results
-
-
-def _rfft_freq(length, rate):
-    """Gets the frequency at each index of real FFT.
-
-    Args:
-        length: The window length of FFT.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-
-    Returns:
-        A numpy array containing frequency corresponding to numpy.fft.rfft
-            result at each index.
-
-    """
-    # The difference in Hz between each index.
-    val = rate / float(length)
-    # Only care half of frequencies for FFT on real signal.
-    result_length = length // 2 + 1
-    return numpy.linspace(0, (result_length - 1) * val, result_length)
-
-
-def peak_detection(array, window_size):
-    """Detects peaks in an array.
-
-    A point (i, array[i]) is a peak if array[i] is the maximum among
-    array[i - half_window_size] to array[i + half_window_size].
-    If array[i - half_window_size] to array[i + half_window_size] are all equal,
-    then there is no peak in this window.
-    Note that we only consider peak with value greater than 0.
-
-    Args:
-        array: The input array to detect peaks in. Array is a list of
-        absolute values of the magnitude of transformed coefficient.
-
-        window_size: The window to detect peaks.
-
-    Returns:
-        A list of tuples:
-              [(peak_index_1, peak_value_1), (peak_index_2, peak_value_2), ...]
-              where the tuples are sorted by peak values.
-
-    """
-    half_window_size = window_size / 2
-    length = len(array)
-
-    def mid_is_peak(array, mid, left, right):
-        """Checks if value at mid is the largest among left to right in array.
-
-        Args:
-            array: A list of numbers.
-            mid: The mid index.
-            left: The left index.
-            rigth: The right index.
-
-        Returns:
-            A tuple (is_peak, next_candidate)
-                  is_peak is True if array[index] is the maximum among numbers
-                  in array between index [left, right] inclusively.
-                  next_candidate is the index of next candidate for peak if
-                  is_peak is False. It is the index of maximum value in
-                  [mid + 1, right]. If is_peak is True, next_candidate is
-                  right + 1.
-
-        """
-        value_mid = array[int(mid)]
-        is_peak = True
-        next_peak_candidate_index = None
-
-        # Check the left half window.
-        for index in range(int(left), int(mid)):
-            if array[index] >= value_mid:
-                is_peak = False
-                break
-
-        # Mid is at the end of array.
-        if mid == right:
-            return is_peak, right + 1
-
-        # Check the right half window and also record next candidate.
-        # Favor the larger index for next_peak_candidate_index.
-        for index in range(int(right), int(mid), -1):
-            if (next_peak_candidate_index is None
-                    or array[index] > array[next_peak_candidate_index]):
-                next_peak_candidate_index = index
-
-        if array[next_peak_candidate_index] >= value_mid:
-            is_peak = False
-
-        if is_peak:
-            next_peak_candidate_index = right + 1
-
-        return is_peak, next_peak_candidate_index
-
-    results = []
-    mid = 0
-    next_candidate_idx = None
-    while mid < length:
-        left = max(0, mid - half_window_size)
-        right = min(length - 1, mid + half_window_size)
-
-        # Only consider value greater than 0.
-        if array[int(mid)] == 0:
-            mid = mid + 1
-            continue
-
-        is_peak, next_candidate_idx = mid_is_peak(array, mid, left, right)
-
-        if is_peak:
-            results.append((mid, array[int(mid)]))
-
-        # Use the next candidate found in [mid + 1, right], or right + 1.
-        mid = next_candidate_idx
-
-    # Sort the peaks by values.
-    return sorted(results, key=lambda x: x[1], reverse=True)
-
-
-def anomaly_detection(signal,
-                      rate,
-                      freq,
-                      block_size=ANOMALY_DETECTION_BLOCK_SIZE,
-                      threshold=PATTERN_MATCHING_THRESHOLD):
-    """Detects anomaly in a sine wave signal.
-
-    This method detects anomaly in a sine wave signal by matching
-    patterns of each block.
-    For each moving window of block in the test signal, checks if there
-    is any block in golden signal that is similar to this block of test signal.
-    If there is such a block in golden signal, then this block of test
-    signal is matched and there is no anomaly in this block of test signal.
-    If there is any block in test signal that is not matched, then this block
-    covers an anomaly.
-    The block of test signal starts from index 0, and proceeds in steps of
-    half block size. The overlapping of test signal blocks makes sure there must
-    be at least one block covering the transition from sine wave to anomaly.
-
-    Args:
-        signal: A 1-D array-like object for 1-channel PCM data.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        freq: The expected frequency of signal.
-        block_size: The block size in samples to detect anomaly.
-        threshold: The threshold of correlation index to be judge as matched.
-
-    Returns:
-        A list containing time markers in seconds that have an anomaly within
-            block_size samples.
-
-    """
-    if len(signal) == 0:
-        raise EmptyDataError('Signal data is empty')
-
-    golden_y = _generate_golden_pattern(rate, freq, block_size)
-
-    results = []
-
-    for start in range(0, len(signal), int(block_size / 2)):
-        end = start + block_size
-        test_signal = signal[start:end]
-        matched = _moving_pattern_matching(golden_y, test_signal, threshold)
-        if not matched:
-            results.append(start)
-
-    results = [float(x) / rate for x in results]
-
-    return results
-
-
-def get_anomaly_durations(signal,
-                          rate,
-                          freq,
-                          block_size=ANOMALY_DETECTION_BLOCK_SIZE,
-                          threshold=PATTERN_MATCHING_THRESHOLD,
-                          tolerance=ANOMALY_GROUPING_TOLERANCE):
-    """Detect anomalies in a sine wav and return their start and end times.
-
-    Run anomaly_detection function and parse resulting array of time values into
-    discrete anomalies defined by a start and end time tuple. Time values are
-    judged to be part of the same anomaly if they lie within a given tolerance
-    of half the block_size number of samples of each other.
-
-    Args:
-        signal: A 1-D array-like object for 1-channel PCM data.
-        rate (int): Sampling rate in samples per second.
-            Example inputs: 44100, 48000
-        freq (int): The expected frequency of signal.
-        block_size (int): The block size in samples to detect anomaly.
-        threshold (float): The threshold of correlation index to be judge as
-            matched.
-        tolerance (float): The number of samples greater than block_size / 2
-            that the sample distance between two anomaly time values can be and
-            still be grouped as the same anomaly.
-    Returns:
-        bounds (list): a list of (start, end) tuples where start and end are the
-            boundaries in seconds of the detected anomaly.
-    """
-    bounds = []
-    anoms = anomaly_detection(signal, rate, freq, block_size, threshold)
-    if len(anoms) == 0:
-        return bounds
-    end = anoms[0]
-    start = anoms[0]
-    for i in range(len(anoms) - 1):
-        end = anoms[i]
-        sample_diff = abs(anoms[i] - anoms[i + 1]) * rate
-        # We require a tolerance because sample_diff may be slightly off due to
-        # float rounding errors in Python.
-        if sample_diff > block_size / 2 + tolerance:
-            bounds.append((start, end))
-            start = anoms[i + 1]
-    bounds.append((start, end))
-    return bounds
-
-
-def _generate_golden_pattern(rate, freq, block_size):
-    """Generates a golden pattern of certain frequency.
-
-    The golden pattern must cover all the possibilities of waveforms in a
-    block. So, we need a golden pattern covering 1 period + 1 block size,
-    such that the test block can start anywhere in a period, and extends
-    a block size.
-
-    |period |1 bk|
-    |       |    |
-     . .     . .
-    .   .   .   .
-         . .     .
-
-    Args:
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        freq: The frequency of golden pattern.
-        block_size: The block size in samples to detect anomaly.
-
-    Returns:
-        A 1-D array for golden pattern.
-
-    """
-    samples_in_a_period = int(rate / freq) + 1
-    samples_in_golden_pattern = samples_in_a_period + block_size
-    golden_x = numpy.linspace(0.0,
-                              (samples_in_golden_pattern - 1) * 1.0 / rate,
-                              samples_in_golden_pattern)
-    golden_y = numpy.sin(freq * 2.0 * numpy.pi * golden_x)
-    return golden_y
-
-
-def _moving_pattern_matching(golden_signal, test_signal, threshold):
-    """Checks if test_signal is similar to any block of golden_signal.
-
-    Compares test signal with each block of golden signal by correlation
-    index. If there is any block of golden signal that is similar to
-    test signal, then it is matched.
-
-    Args:
-        golden_signal: A 1-D array for golden signal.
-        test_signal: A 1-D array for test signal.
-        threshold: The threshold of correlation index to be judge as matched.
-
-    Returns:
-        True if there is a match. False otherwise.
-
-        ValueError: if test signal is longer than golden signal.
-
-    """
-    if len(golden_signal) < len(test_signal):
-        raise ValueError('Test signal is longer than golden signal')
-
-    block_length = len(test_signal)
-    number_of_movings = len(golden_signal) - block_length + 1
-    correlation_indices = []
-    for moving_index in range(number_of_movings):
-        # Cuts one block of golden signal from start index.
-        # The block length is the same as test signal.
-        start = moving_index
-        end = start + block_length
-        golden_signal_block = golden_signal[start:end]
-        try:
-            correlation_index = _get_correlation_index(golden_signal_block,
-                                                       test_signal)
-        except TestSignalNormTooSmallError:
-            logging.info(
-                'Caught one block of test signal that has no meaningful norm')
-            return False
-        correlation_indices.append(correlation_index)
-
-    # Checks if the maximum correlation index is high enough.
-    max_corr = max(correlation_indices)
-    if max_corr < threshold:
-        logging.debug('Got one unmatched block with max_corr: %s', max_corr)
-        return False
-    return True
-
-
-class GoldenSignalNormTooSmallError(Exception):
-    """Exception when golden signal norm is too small."""
-
-
-class TestSignalNormTooSmallError(Exception):
-    """Exception when test signal norm is too small."""
-
-
-def _get_correlation_index(golden_signal, test_signal):
-    """Computes correlation index of two signal of same length.
-
-    Args:
-        golden_signal: An 1-D array-like object.
-        test_signal: An 1-D array-like object.
-
-    Raises:
-        ValueError: if two signal have different lengths.
-        GoldenSignalNormTooSmallError: if golden signal norm is too small
-        TestSignalNormTooSmallError: if test signal norm is too small.
-
-    Returns:
-        The correlation index.
-    """
-    if len(golden_signal) != len(test_signal):
-        raise ValueError('Only accepts signal of same length: %s, %s' %
-                         (len(golden_signal), len(test_signal)))
-
-    norm_golden = numpy.linalg.norm(golden_signal)
-    norm_test = numpy.linalg.norm(test_signal)
-    if norm_golden <= _MINIMUM_SIGNAL_NORM:
-        raise GoldenSignalNormTooSmallError(
-            'No meaningful data as norm is too small.')
-    if norm_test <= _MINIMUM_SIGNAL_NORM:
-        raise TestSignalNormTooSmallError(
-            'No meaningful data as norm is too small.')
-
-    # The 'valid' cross correlation result of two signals of same length will
-    # contain only one number.
-    correlation = numpy.correlate(golden_signal, test_signal, 'valid')[0]
-    return correlation / (norm_golden * norm_test)
-
-
-def fundamental_freq(signal, rate):
-    """Return fundamental frequency of signal by finding max in freq domain.
-    """
-    dft = numpy.fft.rfft(signal)
-    fund_freq = rate * (numpy.argmax(numpy.abs(dft)) / len(signal))
-    return fund_freq
-
-
-def rms(array):
-    """Return the root mean square of array.
-    """
-    return numpy.sqrt(numpy.mean(numpy.absolute(array)**2))
-
-
-def THDN(signal, rate, q, freq):
-    """Measure the THD+N for a signal and return the results.
-    Subtract mean to center signal around 0, remove fundamental frequency from
-    dft using notch filter and transform back into signal to get noise. Compute
-    ratio of RMS of noise signal to RMS of entire signal.
-
-    Args:
-        signal: array of values representing an audio signal.
-        rate: sample rate in Hz of the signal.
-        q: quality factor for the notch filter.
-        freq: fundamental frequency of the signal. All other frequencies
-            are noise. If not specified, will be calculated using FFT.
-    Returns:
-        THDN: THD+N ratio calculated from the ratio of RMS of pure harmonics
-            and noise signal to RMS of original signal.
-    """
-    # Normalize and window signal.
-    signal -= numpy.mean(signal)
-    windowed = signal * blackmanharris(len(signal))
-    # Find fundamental frequency to remove if not specified.
-    freq = freq or fundamental_freq(windowed, rate)
-    # Create notch filter to isolate noise.
-    w0 = freq / (rate / 2.0)
-    b, a = iirnotch(w0, q)
-    noise = lfilter(b, a, windowed)
-    # Calculate THD+N.
-    THDN = rms(noise) / rms(windowed)
-    return THDN
-
-
-def max_THDN(signal, rate, step_size, window_size, q, freq):
-    """Analyze signal with moving window and find maximum THD+N value.
-    Args:
-        signal: array representing the signal
-        rate: sample rate of the signal.
-        step_size: how many samples to move the window by for each analysis.
-        window_size: how many samples to analyze each time.
-        q: quality factor for the notch filter.
-        freq: fundamental frequency of the signal. All other frequencies
-            are noise. If not specified, will be calculated using FFT.
-    Returns:
-        greatest_THDN: the greatest THD+N value found across all windows
-    """
-    greatest_THDN = 0
-    cur = 0
-    while cur + window_size < len(signal):
-        window = signal[cur:cur + window_size]
-        res = THDN(window, rate, q, freq)
-        cur += step_size
-        if res > greatest_THDN:
-            greatest_THDN = res
-    return greatest_THDN
-
-
-def get_file_THDN(filename, q, freq=None):
-    """Get THD+N values for each channel of an audio file.
-
-    Args:
-        filename (str): path to the audio file.
-          (supported file types: http://www.mega-nerd.com/libsndfile/#Features)
-        q (float): quality factor for the notch filter.
-        freq (int|float): fundamental frequency of the signal. All other
-            frequencies are noise. If None, will be calculated with FFT.
-    Returns:
-        channel_results (list): THD+N value for each channel's signal.
-            List index corresponds to channel index.
-    """
-    audio_file = soundfile.SoundFile(filename)
-    channel_results = []
-    if audio_file.channels == 1:
-        channel_results.append(
-            THDN(signal=audio_file.read(),
-                 rate=audio_file.samplerate,
-                 q=q,
-                 freq=freq))
-    else:
-        for ch_no, channel in enumerate(audio_file.read().transpose()):
-            channel_results.append(
-                THDN(signal=channel,
-                     rate=audio_file.samplerate,
-                     q=q,
-                     freq=freq))
-    return channel_results
-
-
-def get_file_max_THDN(filename, step_size, window_size, q, freq=None):
-    """Get max THD+N value across analysis windows for each channel of file.
-
-    Args:
-        filename (str): path to the audio file.
-          (supported file types: http://www.mega-nerd.com/libsndfile/#Features)
-        step_size: how many samples to move the window by for each analysis.
-        window_size: how many samples to analyze each time.
-        q (float): quality factor for the notch filter.
-        freq (int|float): fundamental frequency of the signal. All other
-            frequencies are noise. If None, will be calculated with FFT.
-    Returns:
-        channel_results (list): max THD+N value for each channel's signal.
-            List index corresponds to channel index.
-    """
-    audio_file = soundfile.SoundFile(filename)
-    channel_results = []
-    if audio_file.channels == 1:
-        channel_results.append(
-            max_THDN(signal=audio_file.read(),
-                     rate=audio_file.samplerate,
-                     step_size=step_size,
-                     window_size=window_size,
-                     q=q,
-                     freq=freq))
-    else:
-        for ch_no, channel in enumerate(audio_file.read().transpose()):
-            channel_results.append(
-                max_THDN(signal=channel,
-                         rate=audio_file.samplerate,
-                         step_size=step_size,
-                         window_size=window_size,
-                         q=q,
-                         freq=freq))
-    return channel_results
-
-
-def get_file_anomaly_durations(filename,
-                               freq=None,
-                               block_size=ANOMALY_DETECTION_BLOCK_SIZE,
-                               threshold=PATTERN_MATCHING_THRESHOLD,
-                               tolerance=ANOMALY_GROUPING_TOLERANCE):
-    """Get durations of anomalies for each channel of audio file.
-
-    Args:
-        filename (str): path to the audio file.
-          (supported file types: http://www.mega-nerd.com/libsndfile/#Features)
-        freq (int|float): fundamental frequency of the signal. All other
-            frequencies are noise. If None, will be calculated with FFT.
-        block_size (int): The block size in samples to detect anomaly.
-        threshold (float): The threshold of correlation index to be judge as
-            matched.
-        tolerance (float): The number of samples greater than block_size / 2
-            that the sample distance between two anomaly time values can be and
-            still be grouped as the same anomaly.
-    Returns:
-        channel_results (list): anomaly durations for each channel's signal.
-            List index corresponds to channel index.
-    """
-    audio_file = soundfile.SoundFile(filename)
-    signal = audio_file.read()
-    freq = freq or fundamental_freq(signal, audio_file.samplerate)
-    channel_results = []
-    if audio_file.channels == 1:
-        channel_results.append(
-            get_anomaly_durations(signal=signal,
-                                  rate=audio_file.samplerate,
-                                  freq=freq,
-                                  block_size=block_size,
-                                  threshold=threshold,
-                                  tolerance=tolerance))
-    else:
-        for ch_no, channel in enumerate(signal.transpose()):
-            channel_results.append(
-                get_anomaly_durations(signal=channel,
-                                      rate=audio_file.samplerate,
-                                      freq=freq,
-                                      block_size=block_size,
-                                      threshold=threshold,
-                                      tolerance=tolerance))
-    return channel_results
diff --git a/src/antlion/test_utils/audio_analysis_lib/audio_data.py b/src/antlion/test_utils/audio_analysis_lib/audio_data.py
deleted file mode 100644
index 4b74730..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/audio_data.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module provides abstraction of audio data."""
-
-import numpy
-"""The dict containing information on how to parse sample from raw data.
-
-Keys: The sample format as in aplay command.
-Values: A dict containing:
-    message: Human-readable sample format.
-    dtype_str: Data type used in numpy dtype.  Check
-               https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html
-               for supported data type.
-    size_bytes: Number of bytes for one sample.
-"""
-SAMPLE_FORMATS = dict(
-    S32_LE=dict(message='Signed 32-bit integer, little-endian',
-                dtype_str='<i',
-                size_bytes=4),
-    S16_LE=dict(message='Signed 16-bit integer, little-endian',
-                dtype_str='<i',
-                size_bytes=2))
-
-
-def get_maximum_value_from_sample_format(sample_format):
-    """Gets the maximum value from sample format.
-
-    Args:
-        sample_format: A key in SAMPLE_FORMAT.
-
-    Returns:The maximum value the sample can hold + 1.
-
-    """
-    size_bits = SAMPLE_FORMATS[sample_format]['size_bytes'] * 8
-    return 1 << (size_bits - 1)
-
-
-class AudioRawDataError(Exception):
-    """Error in AudioRawData."""
-
-
-class AudioRawData(object):
-    """The abstraction of audio raw data.
-
-    @property channel: The number of channels.
-    @property channel_data: A list of lists containing samples in each channel.
-                            E.g., The third sample in the second channel is
-                            channel_data[1][2].
-    @property sample_format: The sample format which should be one of the keys
-                             in audio_data.SAMPLE_FORMATS.
-    """
-
-    def __init__(self, binary, channel, sample_format):
-        """Initializes an AudioRawData.
-
-        Args:
-            binary: A string containing binary data. If binary is not None,
-                       The samples in binary will be parsed and be filled into
-                       channel_data.
-            channel: The number of channels.
-            sample_format: One of the keys in audio_data.SAMPLE_FORMATS.
-        """
-        self.channel = channel
-        self.channel_data = [[] for _ in range(self.channel)]
-        self.sample_format = sample_format
-        if binary:
-            self.read_binary(binary)
-
-    def read_binary(self, binary):
-        """Reads samples from binary and fills channel_data.
-
-        Reads samples of fixed width from binary string into a numpy array
-        and shapes them into each channel.
-
-        Args:
-            binary: A string containing binary data.
-        """
-        sample_format_dict = SAMPLE_FORMATS[self.sample_format]
-
-        # The data type used in numpy fromstring function. For example,
-        # <i4 for 32-bit signed int.
-        np_dtype = '%s%d' % (sample_format_dict['dtype_str'],
-                             sample_format_dict['size_bytes'])
-
-        # Reads data from a string into 1-D array.
-        np_array = numpy.fromstring(binary, dtype=np_dtype)
-
-        n_frames = len(np_array) / self.channel
-        # Reshape np_array into an array of shape (n_frames, channel).
-        np_array = np_array.reshape(int(n_frames), self.channel)
-        # Transpose np_arrya so it becomes of shape (channel, n_frames).
-        self.channel_data = np_array.transpose()
diff --git a/src/antlion/test_utils/audio_analysis_lib/audio_quality_measurement.py b/src/antlion/test_utils/audio_analysis_lib/audio_quality_measurement.py
deleted file mode 100644
index c347636..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/audio_quality_measurement.py
+++ /dev/null
@@ -1,927 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module provides utilities to detect some artifacts and measure the
-    quality of audio."""
-
-import logging
-import math
-import numpy
-
-import antlion.test_utils.audio_analysis_lib.audio_analysis as audio_analysis
-
-# The input signal should be one sine wave with fixed frequency which
-# can have silence before and/or after sine wave.
-# For example:
-#   silence      sine wave      silence
-#  -----------|VVVVVVVVVVVVV|-----------
-#     (a)           (b)           (c)
-# This module detects these artifacts:
-#   1. Detect noise in (a) and (c).
-#   2. Detect delay in (b).
-#   3. Detect burst in (b).
-# Assume the transitions between (a)(b) and (b)(c) are smooth and
-# amplitude increases/decreases linearly.
-# This module will detect artifacts in the sine wave.
-# This module also estimates the equivalent noise level by teager operator.
-# This module also detects volume changes in the sine wave. However, volume
-# changes may be affected by delay or burst.
-# Some artifacts may cause each other.
-
-# In this module, amplitude and frequency are derived from Hilbert transform.
-# Both amplitude and frequency are a function of time.
-
-# To detect each artifact, each point will be compared with
-# average amplitude of its block. The block size will be 1.5 ms.
-# Using average amplitude can mitigate the error caused by
-# Hilbert transform and noise.
-# In some case, for more accuracy, the block size may be modified
-# to other values.
-DEFAULT_BLOCK_SIZE_SECS = 0.0015
-
-# If the difference between average frequency of this block and
-# dominant frequency of full signal is less than 0.5 times of
-# dominant frequency, this block is considered to be within the
-# sine wave. In most cases, if there is no sine wave(only noise),
-# average frequency will be much greater than 5 times of
-# dominant frequency.
-# Also, for delay during playback, the frequency will be about 0
-# in perfect situation or much greater than 5 times of dominant
-# frequency if it's noised.
-DEFAULT_FREQUENCY_ERROR = 0.5
-
-# If the amplitude of some sample is less than 0.6 times of the
-# average amplitude of its left/right block, it will be considered
-# as a delay during playing.
-DEFAULT_DELAY_AMPLITUDE_THRESHOLD = 0.6
-
-# If the average amplitude of the block before or after playing
-# is more than 0.5 times to the average amplitude of the wave,
-# it will be considered as a noise artifact.
-DEFAULT_NOISE_AMPLITUDE_THRESHOLD = 0.5
-
-# In the sine wave, if the amplitude is more than 1.4 times of
-# its left side and its right side, it will be considered as
-# a burst.
-DEFAULT_BURST_AMPLITUDE_THRESHOLD = 1.4
-
-# When detecting burst, if the amplitude is lower than 0.5 times
-# average amplitude, we ignore it.
-DEFAULT_BURST_TOO_SMALL = 0.5
-
-# For a signal which is the combination of sine wave with fixed frequency f and
-# amplitude 1 and standard noise with amplitude k, the average teager value is
-# nearly linear to the noise level k.
-# Given frequency f, we simulate a sine wave with default noise level and
-# calculate its average teager value. Then, we can estimate the equivalent
-# noise level of input signal by the average teager value of input signal.
-DEFAULT_STANDARD_NOISE = 0.005
-
-# For delay, burst, volume increasing/decreasing, if two delay(
-# burst, volume increasing/decreasing) happen within
-# DEFAULT_SAME_EVENT_SECS seconds, we consider they are the
-# same event.
-DEFAULT_SAME_EVENT_SECS = 0.001
-
-# When detecting increasing/decreasing volume of signal, if the amplitude
-# is lower than 0.1 times average amplitude, we ignore it.
-DEFAULT_VOLUME_CHANGE_TOO_SMALL = 0.1
-
-# If average amplitude of right block is less/more than average
-# amplitude of left block times DEFAULT_VOLUME_CHANGE_AMPLITUDE, it will be
-# considered as decreasing/increasing on volume.
-DEFAULT_VOLUME_CHANGE_AMPLITUDE = 0.1
-
-# If the increasing/decreasing volume event is too close to the start or the end
-# of sine wave, we consider its volume change as part of rising/falling phase in
-# the start/end.
-NEAR_START_OR_END_SECS = 0.01
-
-# After applying Hilbert transform, the resulting amplitude and frequency may be
-# extremely large in the start and/or the end part. Thus, we will append zeros
-# before and after the whole wave for 0.1 secs.
-APPEND_ZEROS_SECS = 0.1
-
-# If the noise event is too close to the start or the end of the data, we
-# consider its noise as part of artifacts caused by edge effect of Hilbert
-# transform.
-# For example, originally, the data duration is 10 seconds.
-# We append 0.1 seconds of zeros in the beginning and the end of the data, so
-# the data becomes 10.2 seocnds long.
-# Then, we apply Hilbert transform to 10.2 seconds of data.
-# Near 0.1 seconds and 10.1 seconds, there will be edge effect of Hilbert
-# transform. We do not want these be treated as noise.
-# If NEAR_DATA_START_OR_END_SECS is set to 0.01, then the noise happened
-# at [0, 0.11] and [10.09, 10.1] will be ignored.
-NEAR_DATA_START_OR_END_SECS = 0.01
-
-# If the noise event is too close to the start or the end of the sine wave in
-# the data, we consider its noise as part of artifacts caused by edge effect of
-# Hilbert transform.
-# A |-------------|vvvvvvvvvvvvvvvvvvvvvvv|-------------|
-# B |ooooooooo| d |                       | d |ooooooooo|
-#
-# A is full signal. It contains a sine wave and silence before and after sine
-# wave.
-# In B, |oooo| shows the parts that we are going to check for noise before/after
-# sine wave. | d | is determined by NEAR_SINE_START_OR_END_SECS.
-NEAR_SINE_START_OR_END_SECS = 0.01
-
-
-class SineWaveNotFound(Exception):
-    """Error when there's no sine wave found in the signal"""
-
-
-def hilbert(x):
-    """Hilbert transform copied from scipy.
-
-    More information can be found here:
-    http://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.hilbert.html
-
-    Args:
-        x: Real signal data to transform.
-
-    Returns:
-        Analytic signal of x, we can further extract amplitude and
-              frequency from it.
-
-    """
-    x = numpy.asarray(x)
-    if numpy.iscomplexobj(x):
-        raise ValueError("x must be real.")
-    axis = -1
-    N = x.shape[axis]
-    if N <= 0:
-        raise ValueError("N must be positive.")
-
-    Xf = numpy.fft.fft(x, N, axis=axis)
-    h = numpy.zeros(N)
-    if N % 2 == 0:
-        h[0] = h[N // 2] = 1
-        h[1:N // 2] = 2
-    else:
-        h[0] = 1
-        h[1:(N + 1) // 2] = 2
-
-    if len(x.shape) > 1:
-        ind = [newaxis] * x.ndim
-        ind[axis] = slice(None)
-        h = h[ind]
-    x = numpy.fft.ifft(Xf * h, axis=axis)
-    return x
-
-
-def noised_sine_wave(frequency, rate, noise_level):
-    """Generates a sine wave of 2 second with specified noise level.
-
-    Args:
-        frequency: Frequency of sine wave.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        noise_level: Required noise level.
-
-    Returns:
-        A sine wave with specified noise level.
-
-    """
-    wave = []
-    for index in range(0, rate * 2):
-        sample = 2.0 * math.pi * frequency * float(index) / float(rate)
-        sine_wave = math.sin(sample)
-        noise = noise_level * numpy.random.standard_normal()
-        wave.append(sine_wave + noise)
-    return wave
-
-
-def average_teager_value(wave, amplitude):
-    """Computes the normalized average teager value.
-
-    After averaging the teager value, we will normalize the value by
-    dividing square of amplitude.
-
-    Args:
-        wave: Wave to apply teager operator.
-        amplitude: Average amplitude of given wave.
-
-    Returns:
-        Average teager value.
-
-    """
-    teager_value, length = 0, len(wave)
-    for i in range(1, length - 1):
-        ith_teager_value = abs(wave[i] * wave[i] - wave[i - 1] * wave[i + 1])
-        ith_teager_value *= max(1, abs(wave[i]))
-        teager_value += ith_teager_value
-    teager_value = (float(teager_value) / length) / (amplitude**2)
-    return teager_value
-
-
-def noise_level(amplitude, frequency, rate, teager_value_of_input):
-    """Computes the noise level compared with standard_noise.
-
-    For a signal which is the combination of sine wave with fixed frequency f
-    and amplitude 1 and standard noise with amplitude k, the average teager
-    value is nearly linear to the noise level k.
-    Thus, we can compute the average teager value of a sine wave with
-    standard_noise. Then, we can estimate the noise level of given input.
-
-    Args:
-        amplitude: Amplitude of input audio.
-        frequency: Dominant frequency of input audio.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        teager_value_of_input: Average teager value of input audio.
-
-    Returns:
-        A float value denotes the audio is equivalent to have how many times of
-            noise compared with its amplitude.For example, 0.02 denotes that the
-            wave has a noise which has standard distribution with standard
-            deviation being 0.02 times the amplitude of the wave.
-
-    """
-    standard_noise = DEFAULT_STANDARD_NOISE
-
-    # Generates the standard sine wave with stdandard_noise level of noise.
-    standard_wave = noised_sine_wave(frequency, rate, standard_noise)
-
-    # Calculates the average teager value.
-    teager_value_of_std_wave = average_teager_value(standard_wave, amplitude)
-
-    return (teager_value_of_input / teager_value_of_std_wave) * standard_noise
-
-
-def error(f1, f2):
-    """Calculates the relative error between f1 and f2.
-
-    Args:
-        f1: Exact value.
-        f2: Test value.
-
-    Returns:
-        Relative error between f1 and f2.
-
-    """
-    return abs(float(f1) - float(f2)) / float(f1)
-
-
-def hilbert_analysis(signal, rate, block_size):
-    """Finds amplitude and frequency of each time of signal by Hilbert transform.
-
-    Args:
-        signal: The wave to analyze.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        block_size: The size of block to transform.
-
-    Returns:
-        A tuple of list: (amplitude, frequency) composed of amplitude and
-            frequency of each time.
-
-    """
-    # To apply Hilbert transform, the wave will be transformed
-    # segment by segment. For each segment, its size will be
-    # block_size and we will only take middle part of it.
-    # Thus, each segment looks like: |-----|=====|=====|-----|.
-    # "=...=" part will be taken while "-...-" part will be ignored.
-    #
-    # The whole size of taken part will be half of block_size
-    # which will be hilbert_block.
-    # The size of each ignored part will be half of hilbert_block
-    # which will be half_hilbert_block.
-    hilbert_block = block_size // 2
-    half_hilbert_block = hilbert_block // 2
-    # As mentioned above, for each block, we will only take middle
-    # part of it. Thus, the whole transformation will be completed as:
-    # |=====|=====|-----|           |-----|=====|=====|-----|
-    #       |-----|=====|=====|-----|           |-----|=====|=====|
-    #                   |-----|=====|=====|-----|
-    # Specially, beginning and ending part may not have ignored part.
-    length = len(signal)
-    result = []
-    for left_border in range(0, length, hilbert_block):
-        right_border = min(length, left_border + hilbert_block)
-        temp_left_border = max(0, left_border - half_hilbert_block)
-        temp_right_border = min(length, right_border + half_hilbert_block)
-        temp = hilbert(signal[temp_left_border:temp_right_border])
-        for index in range(left_border, right_border):
-            result.append(temp[index - temp_left_border])
-    result = numpy.asarray(result)
-    amplitude = numpy.abs(result)
-    phase = numpy.unwrap(numpy.angle(result))
-    frequency = numpy.diff(phase) / (2.0 * numpy.pi) * rate
-    #frequency.append(frequency[len(frequency)-1])
-    frequecny = numpy.append(frequency, frequency[len(frequency) - 1])
-    return (amplitude, frequency)
-
-
-def find_block_average_value(arr, side_block_size, block_size):
-    """For each index, finds average value of its block, left block, right block.
-
-    It will find average value for each index in the range.
-
-    For each index, the range of its block is
-        [max(0, index - block_size / 2), min(length - 1, index + block_size / 2)]
-    For each index, the range of its left block is
-        [max(0, index - size_block_size), index]
-    For each index, the range of its right block is
-        [index, min(length - 1, index + side_block_size)]
-
-    Args:
-        arr: The array to be computed.
-        side_block_size: the size of the left_block and right_block.
-        block_size: the size of the block.
-
-    Returns:
-        A tuple of lists: (left_block_average_array,
-                                 right_block_average_array,
-                                 block_average_array)
-    """
-    length = len(arr)
-    left_border, right_border = 0, 1
-    left_block_sum = arr[0]
-    right_block_sum = arr[0]
-    left_average_array = numpy.zeros(length)
-    right_average_array = numpy.zeros(length)
-    block_average_array = numpy.zeros(length)
-    for index in range(0, length):
-        while left_border < index - side_block_size:
-            left_block_sum -= arr[left_border]
-            left_border += 1
-        while right_border < min(length, index + side_block_size):
-            right_block_sum += arr[right_border]
-            right_border += 1
-
-        left_average_value = float(left_block_sum) / (index - left_border + 1)
-        right_average_value = float(right_block_sum) / (right_border - index)
-        left_average_array[index] = left_average_value
-        right_average_array[index] = right_average_value
-
-        if index + 1 < length:
-            left_block_sum += arr[index + 1]
-        right_block_sum -= arr[index]
-    left_border, right_border = 0, 1
-    block_sum = 0
-    for index in range(0, length):
-        while left_border < index - block_size / 2:
-            block_sum -= arr[left_border]
-            left_border += 1
-        while right_border < min(length, index + block_size / 2):
-            block_sum += arr[right_border]
-            right_border += 1
-
-        average_value = float(block_sum) / (right_border - left_border)
-        block_average_array[index] = average_value
-    return (left_average_array, right_average_array, block_average_array)
-
-
-def find_start_end_index(dominant_frequency, block_frequency_delta, block_size,
-                         frequency_error_threshold):
-    """Finds start and end index of sine wave.
-
-    For each block with size of block_size, we check that whether its frequency
-    is close enough to the dominant_frequency. If yes, we will consider this
-    block to be within the sine wave.
-    Then, it will return the start and end index of sine wave indicating that
-    sine wave is between [start_index, end_index)
-    It's okay if the whole signal only contains sine wave.
-
-    Args:
-        dominant_frequency: Dominant frequency of signal.
-        block_frequency_delta: Average absolute difference between dominant
-                                  frequency and frequency of each block. For
-                                  each index, its block is
-                                  [max(0, index - block_size / 2),
-                                   min(length - 1, index + block_size / 2)]
-        block_size: Block size in samples.
-
-    Returns:
-        A tuple composed of (start_index, end_index)
-
-    """
-    length = len(block_frequency_delta)
-
-    # Finds the start/end time index of playing based on dominant frequency
-    start_index, end_index = length - 1, 0
-    for index in range(0, length):
-        left_border = max(0, index - block_size / 2)
-        right_border = min(length - 1, index + block_size / 2)
-        frequency_error = block_frequency_delta[index] / dominant_frequency
-        if frequency_error < frequency_error_threshold:
-            start_index = min(start_index, left_border)
-            end_index = max(end_index, right_border + 1)
-    return (start_index, end_index)
-
-
-def noise_detection(start_index, end_index, block_amplitude, average_amplitude,
-                    rate, noise_amplitude_threshold):
-    """Detects noise before/after sine wave.
-
-    If average amplitude of some sample's block before start of wave or after
-    end of wave is more than average_amplitude times noise_amplitude_threshold,
-    it will be considered as a noise.
-
-    Args:
-        start_index: Start index of sine wave.
-        end_index: End index of sine wave.
-        block_amplitude: An array for average amplitude of each block, where
-                            amplitude is computed from Hilbert transform.
-        average_amplitude: Average amplitude of sine wave.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        noise_amplitude_threshold: If the average amplitude of a block is
-                        higher than average amplitude of the wave times
-                        noise_amplitude_threshold, it will be considered as
-                        noise before/after playback.
-
-    Returns:
-        A tuple of lists indicating the time that noise happens:
-            (noise_before_playing, noise_after_playing).
-
-    """
-    length = len(block_amplitude)
-    amplitude_threshold = average_amplitude * noise_amplitude_threshold
-    same_event_samples = rate * DEFAULT_SAME_EVENT_SECS
-
-    # Detects noise before playing.
-    noise_time_point = []
-    last_noise_end_time_point = []
-    previous_noise_index = None
-    times = 0
-    for index in range(0, length):
-        # Ignore noise too close to the beginning or the end of sine wave.
-        # Check the docstring of NEAR_SINE_START_OR_END_SECS.
-        if ((start_index - rate * NEAR_SINE_START_OR_END_SECS) <= index
-                and (index < end_index + rate * NEAR_SINE_START_OR_END_SECS)):
-            continue
-
-        # Ignore noise too close to the beginning or the end of original data.
-        # Check the docstring of NEAR_DATA_START_OR_END_SECS.
-        if (float(index) / rate <=
-                NEAR_DATA_START_OR_END_SECS + APPEND_ZEROS_SECS):
-            continue
-        if (float(length - index) / rate <=
-                NEAR_DATA_START_OR_END_SECS + APPEND_ZEROS_SECS):
-            continue
-        if block_amplitude[index] > amplitude_threshold:
-            same_event = False
-            if previous_noise_index:
-                same_event = (index -
-                              previous_noise_index) < same_event_samples
-            if not same_event:
-                index_start_sec = float(index) / rate - APPEND_ZEROS_SECS
-                index_end_sec = float(index + 1) / rate - APPEND_ZEROS_SECS
-                noise_time_point.append(index_start_sec)
-                last_noise_end_time_point.append(index_end_sec)
-                times += 1
-            index_end_sec = float(index + 1) / rate - APPEND_ZEROS_SECS
-            last_noise_end_time_point[times - 1] = index_end_sec
-            previous_noise_index = index
-
-    noise_before_playing, noise_after_playing = [], []
-    for i in range(times):
-        duration = last_noise_end_time_point[i] - noise_time_point[i]
-        if noise_time_point[i] < float(start_index) / rate - APPEND_ZEROS_SECS:
-            noise_before_playing.append((noise_time_point[i], duration))
-        else:
-            noise_after_playing.append((noise_time_point[i], duration))
-
-    return (noise_before_playing, noise_after_playing)
-
-
-def delay_detection(start_index, end_index, block_amplitude, average_amplitude,
-                    dominant_frequency, rate, left_block_amplitude,
-                    right_block_amplitude, block_frequency_delta,
-                    delay_amplitude_threshold, frequency_error_threshold):
-    """Detects delay during playing.
-
-    For each sample, we will check whether the average amplitude of its block
-    is less than average amplitude of its left block and its right block times
-    delay_amplitude_threshold. Also, we will check whether the frequency of
-    its block is far from the dominant frequency.
-    If at least one constraint fulfilled, it will be considered as a delay.
-
-    Args:
-        start_index: Start index of sine wave.
-        end_index: End index of sine wave.
-        block_amplitude: An array for average amplitude of each block, where
-                            amplitude is computed from Hilbert transform.
-        average_amplitude: Average amplitude of sine wave.
-        dominant_frequency: Dominant frequency of signal.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        left_block_amplitude: Average amplitude of left block of each index.
-                                Ref to find_block_average_value function.
-        right_block_amplitude: Average amplitude of right block of each index.
-                                Ref to find_block_average_value function.
-        block_frequency_delta: Average absolute difference frequency to
-                                dominant frequency of block of each index.
-                                Ref to find_block_average_value function.
-        delay_amplitude_threshold: If the average amplitude of a block is
-                        lower than average amplitude of the wave times
-                        delay_amplitude_threshold, it will be considered
-                        as delay.
-        frequency_error_threshold: Ref to DEFAULT_FREQUENCY_ERROR
-
-    Returns:
-        List of delay occurrence:
-                [(time_1, duration_1), (time_2, duration_2), ...],
-              where time and duration are in seconds.
-
-    """
-    delay_time_points = []
-    last_delay_end_time_points = []
-    previous_delay_index = None
-    times = 0
-    same_event_samples = rate * DEFAULT_SAME_EVENT_SECS
-    start_time = float(start_index) / rate - APPEND_ZEROS_SECS
-    end_time = float(end_index) / rate - APPEND_ZEROS_SECS
-    for index in range(int(start_index), int(end_index)):
-        if block_amplitude[
-                index] > average_amplitude * delay_amplitude_threshold:
-            continue
-        now_time = float(index) / rate - APPEND_ZEROS_SECS
-        if abs(now_time - start_time) < NEAR_START_OR_END_SECS:
-            continue
-        if abs(now_time - end_time) < NEAR_START_OR_END_SECS:
-            continue
-        # If amplitude less than its left/right side and small enough,
-        # it will be considered as a delay.
-        amp_threshold = average_amplitude * delay_amplitude_threshold
-        left_threshold = delay_amplitude_threshold * left_block_amplitude[index]
-        amp_threshold = min(amp_threshold, left_threshold)
-        right_threshold = delay_amplitude_threshold * right_block_amplitude[
-            index]
-        amp_threshold = min(amp_threshold, right_threshold)
-
-        frequency_error = block_frequency_delta[index] / dominant_frequency
-
-        amplitude_too_small = block_amplitude[index] < amp_threshold
-        frequency_not_match = frequency_error > frequency_error_threshold
-
-        if amplitude_too_small or frequency_not_match:
-            same_event = False
-            if previous_delay_index:
-                same_event = (index -
-                              previous_delay_index) < same_event_samples
-            if not same_event:
-                index_start_sec = float(index) / rate - APPEND_ZEROS_SECS
-                index_end_sec = float(index + 1) / rate - APPEND_ZEROS_SECS
-                delay_time_points.append(index_start_sec)
-                last_delay_end_time_points.append(index_end_sec)
-                times += 1
-            previous_delay_index = index
-            index_end_sec = float(index + 1) / rate - APPEND_ZEROS_SECS
-            last_delay_end_time_points[times - 1] = index_end_sec
-
-    delay_list = []
-    for i in range(len(delay_time_points)):
-        duration = last_delay_end_time_points[i] - delay_time_points[i]
-        delay_list.append((delay_time_points[i], duration))
-    return delay_list
-
-
-def burst_detection(start_index, end_index, block_amplitude, average_amplitude,
-                    dominant_frequency, rate, left_block_amplitude,
-                    right_block_amplitude, block_frequency_delta,
-                    burst_amplitude_threshold, frequency_error_threshold):
-    """Detects burst during playing.
-
-    For each sample, we will check whether the average amplitude of its block is
-    more than average amplitude of its left block and its right block times
-    burst_amplitude_threshold. Also, we will check whether the frequency of
-    its block is not compatible to the dominant frequency.
-    If at least one constraint fulfilled, it will be considered as a burst.
-
-    Args:
-        start_index: Start index of sine wave.
-        end_index: End index of sine wave.
-        block_amplitude: An array for average amplitude of each block, where
-                            amplitude is computed from Hilbert transform.
-        average_amplitude: Average amplitude of sine wave.
-        dominant_frequency: Dominant frequency of signal.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        left_block_amplitude: Average amplitude of left block of each index.
-                                Ref to find_block_average_value function.
-        right_block_amplitude: Average amplitude of right block of each index.
-                                Ref to find_block_average_value function.
-        block_frequency_delta: Average absolute difference frequency to
-                                dominant frequency of block of each index.
-        burst_amplitude_threshold: If the amplitude is higher than average
-                            amplitude of its left block and its right block
-                            times burst_amplitude_threshold. It will be
-                            considered as a burst.
-        frequency_error_threshold: Ref to DEFAULT_FREQUENCY_ERROR
-
-    Returns:
-        List of burst occurence: [time_1, time_2, ...],
-              where time is in seconds.
-
-    """
-    burst_time_points = []
-    previous_burst_index = None
-    same_event_samples = rate * DEFAULT_SAME_EVENT_SECS
-    for index in range(int(start_index), int(end_index)):
-        # If amplitude higher than its left/right side and large enough,
-        # it will be considered as a burst.
-        if block_amplitude[
-                index] <= average_amplitude * DEFAULT_BURST_TOO_SMALL:
-            continue
-        if abs(index - start_index) < rate * NEAR_START_OR_END_SECS:
-            continue
-        if abs(index - end_index) < rate * NEAR_START_OR_END_SECS:
-            continue
-        amp_threshold = average_amplitude * DEFAULT_BURST_TOO_SMALL
-        left_threshold = burst_amplitude_threshold * left_block_amplitude[index]
-        amp_threshold = max(amp_threshold, left_threshold)
-        right_threshold = burst_amplitude_threshold * right_block_amplitude[
-            index]
-        amp_threshold = max(amp_threshold, right_threshold)
-
-        frequency_error = block_frequency_delta[index] / dominant_frequency
-
-        amplitude_too_large = block_amplitude[index] > amp_threshold
-        frequency_not_match = frequency_error > frequency_error_threshold
-
-        if amplitude_too_large or frequency_not_match:
-            same_event = False
-            if previous_burst_index:
-                same_event = index - previous_burst_index < same_event_samples
-            if not same_event:
-                burst_time_points.append(
-                    float(index) / rate - APPEND_ZEROS_SECS)
-            previous_burst_index = index
-
-    return burst_time_points
-
-
-def changing_volume_detection(start_index, end_index, average_amplitude, rate,
-                              left_block_amplitude, right_block_amplitude,
-                              volume_changing_amplitude_threshold):
-    """Finds volume changing during playback.
-
-    For each index, we will compare average amplitude of its left block and its
-    right block. If average amplitude of right block is more than average
-    amplitude of left block times (1 + DEFAULT_VOLUME_CHANGE_AMPLITUDE), it will
-    be considered as an increasing volume. If the one of right block is less
-    than that of left block times (1 - DEFAULT_VOLUME_CHANGE_AMPLITUDE), it will
-    be considered as a decreasing volume.
-
-    Args:
-        start_index: Start index of sine wave.
-        end_index: End index of sine wave.
-        average_amplitude: Average amplitude of sine wave.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        left_block_amplitude: Average amplitude of left block of each index.
-                                Ref to find_block_average_value function.
-        right_block_amplitude: Average amplitude of right block of each index.
-                                Ref to find_block_average_value function.
-        volume_changing_amplitude_threshold: If the average amplitude of right
-                                                block is higher or lower than
-                                                that of left one times this
-                                                value, it will be considered as
-                                                a volume change.
-                                                Also refer to
-                                                DEFAULT_VOLUME_CHANGE_AMPLITUDE
-
-    Returns:
-        List of volume changing composed of 1 for increasing and -1 for
-            decreasing.
-
-    """
-    length = len(left_block_amplitude)
-
-    # Detects rising and/or falling volume.
-    previous_rising_index, previous_falling_index = None, None
-    changing_time = []
-    changing_events = []
-    amplitude_threshold = average_amplitude * DEFAULT_VOLUME_CHANGE_TOO_SMALL
-    same_event_samples = rate * DEFAULT_SAME_EVENT_SECS
-    for index in range(int(start_index), int(end_index)):
-        # Skips if amplitude is too small.
-        if left_block_amplitude[index] < amplitude_threshold:
-            continue
-        if right_block_amplitude[index] < amplitude_threshold:
-            continue
-        # Skips if changing is from start or end time
-        if float(abs(start_index - index)) / rate < NEAR_START_OR_END_SECS:
-            continue
-        if float(abs(end_index - index)) / rate < NEAR_START_OR_END_SECS:
-            continue
-
-        delta_margin = volume_changing_amplitude_threshold
-        if left_block_amplitude[index] > 0:
-            delta_margin *= left_block_amplitude[index]
-
-        increasing_threshold = left_block_amplitude[index] + delta_margin
-        decreasing_threshold = left_block_amplitude[index] - delta_margin
-
-        if right_block_amplitude[index] > increasing_threshold:
-            same_event = False
-            if previous_rising_index:
-                same_event = index - previous_rising_index < same_event_samples
-            if not same_event:
-                changing_time.append(float(index) / rate - APPEND_ZEROS_SECS)
-                changing_events.append(+1)
-            previous_rising_index = index
-        if right_block_amplitude[index] < decreasing_threshold:
-            same_event = False
-            if previous_falling_index:
-                same_event = index - previous_falling_index < same_event_samples
-            if not same_event:
-                changing_time.append(float(index) / rate - APPEND_ZEROS_SECS)
-                changing_events.append(-1)
-            previous_falling_index = index
-
-    # Combines consecutive increasing/decreasing event.
-    combined_changing_events, prev = [], 0
-    for i in range(len(changing_events)):
-        if changing_events[i] == prev:
-            continue
-        combined_changing_events.append((changing_time[i], changing_events[i]))
-        prev = changing_events[i]
-    return combined_changing_events
-
-
-def quality_measurement(
-        signal,
-        rate,
-        dominant_frequency=None,
-        block_size_secs=DEFAULT_BLOCK_SIZE_SECS,
-        frequency_error_threshold=DEFAULT_FREQUENCY_ERROR,
-        delay_amplitude_threshold=DEFAULT_DELAY_AMPLITUDE_THRESHOLD,
-        noise_amplitude_threshold=DEFAULT_NOISE_AMPLITUDE_THRESHOLD,
-        burst_amplitude_threshold=DEFAULT_BURST_AMPLITUDE_THRESHOLD,
-        volume_changing_amplitude_threshold=DEFAULT_VOLUME_CHANGE_AMPLITUDE):
-    """Detects several artifacts and estimates the noise level.
-
-    This method detects artifact before playing, after playing, and delay
-    during playing. Also, it estimates the noise level of the signal.
-    To avoid the influence of noise, it calculates amplitude and frequency
-    block by block.
-
-    Args:
-        signal: A list of numbers for one-channel PCM data. The data should
-                   be normalized to [-1,1].
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        dominant_frequency: Dominant frequency of signal. Set None to
-                               recalculate the frequency in this function.
-        block_size_secs: Block size in seconds. The measurement will be done
-                            block-by-block using average amplitude and frequency
-                            in each block to avoid noise.
-        frequency_error_threshold: Ref to DEFAULT_FREQUENCY_ERROR.
-        delay_amplitude_threshold: If the average amplitude of a block is
-                                      lower than average amplitude of the wave
-                                      times delay_amplitude_threshold, it will
-                                      be considered as delay.
-                                      Also refer to delay_detection and
-                                      DEFAULT_DELAY_AMPLITUDE_THRESHOLD.
-        noise_amplitude_threshold: If the average amplitude of a block is
-                                      higher than average amplitude of the wave
-                                      times noise_amplitude_threshold, it will
-                                      be considered as noise before/after
-                                      playback.
-                                      Also refer to noise_detection and
-                                      DEFAULT_NOISE_AMPLITUDE_THRESHOLD.
-        burst_amplitude_threshold: If the average amplitude of a block is
-                                      higher than average amplitude of its left
-                                      block and its right block times
-                                      burst_amplitude_threshold. It will be
-                                      considered as a burst.
-                                      Also refer to burst_detection and
-                                      DEFAULT_BURST_AMPLITUDE_THRESHOLD.
-        volume_changing_amplitude_threshold: If the average amplitude of right
-                                                block is higher or lower than
-                                                that of left one times this
-                                                value, it will be considered as
-                                                a volume change.
-                                                Also refer to
-                                                changing_volume_detection and
-                                                DEFAULT_VOLUME_CHANGE_AMPLITUDE
-
-    Returns:
-        A dictoinary of detection/estimation:
-              {'artifacts':
-                {'noise_before_playback':
-                    [(time_1, duration_1), (time_2, duration_2), ...],
-                 'noise_after_playback':
-                    [(time_1, duration_1), (time_2, duration_2), ...],
-                 'delay_during_playback':
-                    [(time_1, duration_1), (time_2, duration_2), ...],
-                 'burst_during_playback':
-                    [time_1, time_2, ...]
-                },
-               'volume_changes':
-                 [(time_1, flag_1), (time_2, flag_2), ...],
-               'equivalent_noise_level': level
-              }
-              where durations and time points are in seconds. And,
-              equivalence_noise_level is the quotient of noise and wave which
-              refers to DEFAULT_STANDARD_NOISE. volume_changes is a list of
-              tuples containing time stamps and decreasing/increasing flags for
-              volume change events.
-
-    """
-    # Calculates the block size, from seconds to samples.
-    block_size = int(block_size_secs * rate)
-
-    signal = numpy.concatenate(
-        (numpy.zeros(int(rate * APPEND_ZEROS_SECS)), signal,
-         numpy.zeros(int(rate * APPEND_ZEROS_SECS))))
-    signal = numpy.array(signal, dtype=float)
-    length = len(signal)
-
-    # Calculates the amplitude and frequency.
-    amplitude, frequency = hilbert_analysis(signal, rate, block_size)
-
-    # Finds the dominant frequency.
-    if not dominant_frequency:
-        dominant_frequency = audio_analysis.spectral_analysis(signal,
-                                                              rate)[0][0]
-
-    # Finds the array which contains absolute difference between dominant
-    # frequency and frequency at each time point.
-    frequency_delta = abs(frequency - dominant_frequency)
-
-    # Computes average amplitude of each type of block
-    res = find_block_average_value(amplitude, block_size * 2, block_size)
-    left_block_amplitude, right_block_amplitude, block_amplitude = res
-
-    # Computes average absolute difference of frequency and dominant frequency
-    # of the block of each index
-    _, _, block_frequency_delta = find_block_average_value(
-        frequency_delta, block_size * 2, block_size)
-
-    # Finds start and end index of sine wave.
-    start_index, end_index = find_start_end_index(dominant_frequency,
-                                                  block_frequency_delta,
-                                                  block_size,
-                                                  frequency_error_threshold)
-
-    if start_index > end_index:
-        raise SineWaveNotFound('No sine wave found in signal')
-
-    logging.debug('Found sine wave: start: %s, end: %s',
-                  float(start_index) / rate - APPEND_ZEROS_SECS,
-                  float(end_index) / rate - APPEND_ZEROS_SECS)
-
-    sum_of_amplitude = float(sum(amplitude[int(start_index):int(end_index)]))
-    # Finds average amplitude of sine wave.
-    average_amplitude = sum_of_amplitude / (end_index - start_index)
-
-    # Finds noise before and/or after playback.
-    noise_before_playing, noise_after_playing = noise_detection(
-        start_index, end_index, block_amplitude, average_amplitude, rate,
-        noise_amplitude_threshold)
-
-    # Finds delay during playback.
-    delays = delay_detection(start_index, end_index, block_amplitude,
-                             average_amplitude, dominant_frequency, rate,
-                             left_block_amplitude, right_block_amplitude,
-                             block_frequency_delta, delay_amplitude_threshold,
-                             frequency_error_threshold)
-
-    # Finds burst during playback.
-    burst_time_points = burst_detection(
-        start_index, end_index, block_amplitude, average_amplitude,
-        dominant_frequency, rate, left_block_amplitude, right_block_amplitude,
-        block_frequency_delta, burst_amplitude_threshold,
-        frequency_error_threshold)
-
-    # Finds volume changing during playback.
-    volume_changes = changing_volume_detection(
-        start_index, end_index, average_amplitude, rate, left_block_amplitude,
-        right_block_amplitude, volume_changing_amplitude_threshold)
-
-    # Calculates the average teager value.
-    teager_value = average_teager_value(
-        signal[int(start_index):int(end_index)], average_amplitude)
-
-    # Finds out the noise level.
-    noise = noise_level(average_amplitude, dominant_frequency, rate,
-                        teager_value)
-
-    return {
-        'artifacts': {
-            'noise_before_playback': noise_before_playing,
-            'noise_after_playback': noise_after_playing,
-            'delay_during_playback': delays,
-            'burst_during_playback': burst_time_points
-        },
-        'volume_changes': volume_changes,
-        'equivalent_noise_level': noise
-    }
diff --git a/src/antlion/test_utils/audio_analysis_lib/check_quality.py b/src/antlion/test_utils/audio_analysis_lib/check_quality.py
deleted file mode 100644
index 0eef51b..0000000
--- a/src/antlion/test_utils/audio_analysis_lib/check_quality.py
+++ /dev/null
@@ -1,548 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Audio Analysis tool to analyze wave file and detect artifacts."""
-
-import collections
-import json
-import logging
-import numpy
-import pprint
-import subprocess
-import tempfile
-import wave
-
-import antlion.test_utils.audio_analysis_lib.audio_analysis as audio_analysis
-import antlion.test_utils.audio_analysis_lib.audio_data as audio_data
-import antlion.test_utils.audio_analysis_lib.audio_quality_measurement as \
- audio_quality_measurement
-
-# Holder for quality parameters used in audio_quality_measurement module.
-QualityParams = collections.namedtuple('QualityParams', [
-    'block_size_secs', 'frequency_error_threshold',
-    'delay_amplitude_threshold', 'noise_amplitude_threshold',
-    'burst_amplitude_threshold'
-])
-
-DEFAULT_QUALITY_BLOCK_SIZE_SECS = 0.0015
-DEFAULT_BURST_AMPLITUDE_THRESHOLD = 1.4
-DEFAULT_DELAY_AMPLITUDE_THRESHOLD = 0.6
-DEFAULT_FREQUENCY_ERROR_THRESHOLD = 0.5
-DEFAULT_NOISE_AMPLITUDE_THRESHOLD = 0.5
-
-
-class WaveFileException(Exception):
-    """Error in WaveFile."""
-
-
-class WaveFormatExtensibleException(Exception):
-    """Wave file is in WAVE_FORMAT_EXTENSIBLE format which is not supported."""
-
-
-class WaveFile(object):
-    """Class which handles wave file reading.
-
-    Properties:
-        raw_data: audio_data.AudioRawData object for data in wave file.
-        rate: sampling rate.
-
-    """
-
-    def __init__(self, filename):
-        """Inits a wave file.
-
-        Args:
-            filename: file name of the wave file.
-
-        """
-        self.raw_data = None
-        self.rate = None
-
-        self._wave_reader = None
-        self._n_channels = None
-        self._sample_width_bits = None
-        self._n_frames = None
-        self._binary = None
-
-        try:
-            self._read_wave_file(filename)
-        except WaveFormatExtensibleException:
-            logging.warning(
-                'WAVE_FORMAT_EXTENSIBLE is not supproted. '
-                'Try command "sox in.wav -t wavpcm out.wav" to convert '
-                'the file to WAVE_FORMAT_PCM format.')
-            self._convert_and_read_wav_file(filename)
-
-    def _convert_and_read_wav_file(self, filename):
-        """Converts the wav file and read it.
-
-        Converts the file into WAVE_FORMAT_PCM format using sox command and
-        reads its content.
-
-        Args:
-            filename: The wave file to be read.
-
-        Raises:
-            RuntimeError: sox is not installed.
-
-        """
-        # Checks if sox is installed.
-        try:
-            subprocess.check_output(['sox', '--version'])
-        except:
-            raise RuntimeError('sox command is not installed. '
-                               'Try sudo apt-get install sox')
-
-        with tempfile.NamedTemporaryFile(suffix='.wav') as converted_file:
-            command = ['sox', filename, '-t', 'wavpcm', converted_file.name]
-            logging.debug('Convert the file using sox: %s', command)
-            subprocess.check_call(command)
-            self._read_wave_file(converted_file.name)
-
-    def _read_wave_file(self, filename):
-        """Reads wave file header and samples.
-
-        Args:
-            filename: The wave file to be read.
-
-        @raises WaveFormatExtensibleException: Wave file is in
-                                               WAVE_FORMAT_EXTENSIBLE format.
-        @raises WaveFileException: Wave file format is not supported.
-
-        """
-        try:
-            self._wave_reader = wave.open(filename, 'r')
-            self._read_wave_header()
-            self._read_wave_binary()
-        except wave.Error as e:
-            if 'unknown format: 65534' in str(e):
-                raise WaveFormatExtensibleException()
-            else:
-                logging.exception('Unsupported wave format')
-                raise WaveFileException()
-        finally:
-            if self._wave_reader:
-                self._wave_reader.close()
-
-    def _read_wave_header(self):
-        """Reads wave file header.
-
-        @raises WaveFileException: wave file is compressed.
-
-        """
-        # Header is a tuple of
-        # (nchannels, sampwidth, framerate, nframes, comptype, compname).
-        header = self._wave_reader.getparams()
-        logging.debug('Wave header: %s', header)
-
-        self._n_channels = header[0]
-        self._sample_width_bits = header[1] * 8
-        self.rate = header[2]
-        self._n_frames = header[3]
-        comptype = header[4]
-        compname = header[5]
-
-        if comptype != 'NONE' or compname != 'not compressed':
-            raise WaveFileException('Can not support compressed wav file.')
-
-    def _read_wave_binary(self):
-        """Reads in samples in wave file."""
-        self._binary = self._wave_reader.readframes(self._n_frames)
-        format_str = 'S%d_LE' % self._sample_width_bits
-        self.raw_data = audio_data.AudioRawData(binary=self._binary,
-                                                channel=self._n_channels,
-                                                sample_format=format_str)
-
-
-class QualityCheckerError(Exception):
-    """Error in QualityChecker."""
-
-
-class CompareFailure(QualityCheckerError):
-    """Exception when frequency comparison fails."""
-
-
-class QualityFailure(QualityCheckerError):
-    """Exception when quality check fails."""
-
-
-class QualityChecker(object):
-    """Quality checker controls the flow of checking quality of raw data."""
-
-    def __init__(self, raw_data, rate):
-        """Inits a quality checker.
-
-        Args:
-            raw_data: An audio_data.AudioRawData object.
-            rate: Sampling rate in samples per second. Example inputs: 44100,
-            48000
-
-        """
-        self._raw_data = raw_data
-        self._rate = rate
-        self._spectrals = []
-        self._quality_result = []
-
-    def do_spectral_analysis(self, ignore_high_freq, check_quality,
-                             quality_params):
-        """Gets the spectral_analysis result.
-
-        Args:
-            ignore_high_freq: Ignore high frequencies above this threshold.
-            check_quality: Check quality of each channel.
-            quality_params: A QualityParams object for quality measurement.
-
-        """
-        self.has_data()
-        for channel_idx in range(self._raw_data.channel):
-            signal = self._raw_data.channel_data[channel_idx]
-            max_abs = max(numpy.abs(signal))
-            logging.debug('Channel %d max abs signal: %f', channel_idx,
-                          max_abs)
-            if max_abs == 0:
-                logging.info('No data on channel %d, skip this channel',
-                             channel_idx)
-                continue
-
-            saturate_value = audio_data.get_maximum_value_from_sample_format(
-                self._raw_data.sample_format)
-            normalized_signal = audio_analysis.normalize_signal(
-                signal, saturate_value)
-            logging.debug('saturate_value: %f', saturate_value)
-            logging.debug('max signal after normalized: %f',
-                          max(normalized_signal))
-            spectral = audio_analysis.spectral_analysis(
-                normalized_signal, self._rate)
-
-            logging.debug('Channel %d spectral:\n%s', channel_idx,
-                          pprint.pformat(spectral))
-
-            # Ignore high frequencies above the threshold.
-            spectral = [(f, c) for (f, c) in spectral if f < ignore_high_freq]
-
-            logging.info(
-                'Channel %d spectral after ignoring high frequencies '
-                'above %f:\n%s', channel_idx, ignore_high_freq,
-                pprint.pformat(spectral))
-
-            try:
-                if check_quality:
-                    quality = audio_quality_measurement.quality_measurement(
-                        signal=normalized_signal,
-                        rate=self._rate,
-                        dominant_frequency=spectral[0][0],
-                        block_size_secs=quality_params.block_size_secs,
-                        frequency_error_threshold=quality_params.
-                        frequency_error_threshold,
-                        delay_amplitude_threshold=quality_params.
-                        delay_amplitude_threshold,
-                        noise_amplitude_threshold=quality_params.
-                        noise_amplitude_threshold,
-                        burst_amplitude_threshold=quality_params.
-                        burst_amplitude_threshold)
-
-                    logging.debug('Channel %d quality:\n%s', channel_idx,
-                                  pprint.pformat(quality))
-                    self._quality_result.append(quality)
-                self._spectrals.append(spectral)
-            except Exception as error:
-                logging.warning(
-                    "Failed to analyze channel {} with error: {}".format(
-                        channel_idx, error))
-
-    def has_data(self):
-        """Checks if data has been set.
-
-        Raises:
-            QualityCheckerError: if data or rate is not set yet.
-
-        """
-        if not self._raw_data or not self._rate:
-            raise QualityCheckerError('Data and rate is not set yet')
-
-    def check_freqs(self, expected_freqs, freq_threshold):
-        """Checks the dominant frequencies in the channels.
-
-        Args:
-            expected_freq: A list of frequencies. If frequency is 0, it
-                              means this channel should be ignored.
-            freq_threshold: The difference threshold to compare two
-                               frequencies.
-
-        """
-        logging.debug('expected_freqs: %s', expected_freqs)
-        for idx, expected_freq in enumerate(expected_freqs):
-            if expected_freq == 0:
-                continue
-            if not self._spectrals[idx]:
-                raise CompareFailure(
-                    'Failed at channel %d: no dominant frequency' % idx)
-            dominant_freq = self._spectrals[idx][0][0]
-            if abs(dominant_freq - expected_freq) > freq_threshold:
-                raise CompareFailure(
-                    'Failed at channel %d: %f is too far away from %f' %
-                    (idx, dominant_freq, expected_freq))
-
-    def check_quality(self):
-        """Checks the quality measurement results on each channel.
-
-        Raises:
-            QualityFailure when there is artifact.
-
-        """
-        error_msgs = []
-
-        for idx, quality_res in enumerate(self._quality_result):
-            artifacts = quality_res['artifacts']
-            if artifacts['noise_before_playback']:
-                error_msgs.append('Found noise before playback: %s' %
-                                  (artifacts['noise_before_playback']))
-            if artifacts['noise_after_playback']:
-                error_msgs.append('Found noise after playback: %s' %
-                                  (artifacts['noise_after_playback']))
-            if artifacts['delay_during_playback']:
-                error_msgs.append('Found delay during playback: %s' %
-                                  (artifacts['delay_during_playback']))
-            if artifacts['burst_during_playback']:
-                error_msgs.append('Found burst during playback: %s' %
-                                  (artifacts['burst_during_playback']))
-        if error_msgs:
-            raise QualityFailure('Found bad quality: %s',
-                                 '\n'.join(error_msgs))
-
-    def dump(self, output_file):
-        """Dumps the result into a file in json format.
-
-        Args:
-            output_file: A file path to dump spectral and quality
-                            measurement result of each channel.
-
-        """
-        dump_dict = {
-            'spectrals': self._spectrals,
-            'quality_result': self._quality_result
-        }
-        with open(output_file, 'w') as f:
-            json.dump(dump_dict, f)
-
-    def has_data(self):
-        """Checks if data has been set.
-
-        Raises:
-            QualityCheckerError: if data or rate is not set yet.
-
-        """
-        if not self._raw_data or not self._rate:
-            raise QualityCheckerError('Data and rate is not set yet')
-
-    def check_freqs(self, expected_freqs, freq_threshold):
-        """Checks the dominant frequencies in the channels.
-
-        Args:
-            expected_freq: A list of frequencies. If frequency is 0, it
-                              means this channel should be ignored.
-            freq_threshold: The difference threshold to compare two
-                               frequencies.
-
-        """
-        logging.debug('expected_freqs: %s', expected_freqs)
-        for idx, expected_freq in enumerate(expected_freqs):
-            if expected_freq == 0:
-                continue
-            if not self._spectrals[idx]:
-                raise CompareFailure(
-                    'Failed at channel %d: no dominant frequency' % idx)
-            dominant_freq = self._spectrals[idx][0][0]
-            if abs(dominant_freq - expected_freq) > freq_threshold:
-                raise CompareFailure(
-                    'Failed at channel %d: %f is too far away from %f' %
-                    (idx, dominant_freq, expected_freq))
-
-    def check_quality(self):
-        """Checks the quality measurement results on each channel.
-
-        Raises:
-            QualityFailure when there is artifact.
-
-        """
-        error_msgs = []
-
-        for idx, quality_res in enumerate(self._quality_result):
-            artifacts = quality_res['artifacts']
-            if artifacts['noise_before_playback']:
-                error_msgs.append('Found noise before playback: %s' %
-                                  (artifacts['noise_before_playback']))
-            if artifacts['noise_after_playback']:
-                error_msgs.append('Found noise after playback: %s' %
-                                  (artifacts['noise_after_playback']))
-            if artifacts['delay_during_playback']:
-                error_msgs.append('Found delay during playback: %s' %
-                                  (artifacts['delay_during_playback']))
-            if artifacts['burst_during_playback']:
-                error_msgs.append('Found burst during playback: %s' %
-                                  (artifacts['burst_during_playback']))
-        if error_msgs:
-            raise QualityFailure('Found bad quality: %s',
-                                 '\n'.join(error_msgs))
-
-    def dump(self, output_file):
-        """Dumps the result into a file in json format.
-
-        Args:
-            output_file: A file path to dump spectral and quality
-                            measurement result of each channel.
-
-        """
-        dump_dict = {
-            'spectrals': self._spectrals,
-            'quality_result': self._quality_result
-        }
-        with open(output_file, 'w') as f:
-            json.dump(dump_dict, f)
-
-
-class CheckQualityError(Exception):
-    """Error in check_quality main function."""
-
-
-def read_audio_file(filename, channel, bit_width, rate):
-    """Reads audio file.
-
-    Args:
-        filename: The wav or raw file to check.
-        channel: For raw file. Number of channels.
-        bit_width: For raw file. Bit width of a sample.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-
-
-    Returns:
-        A tuple (raw_data, rate) where raw_data is audio_data.AudioRawData, rate
-            is sampling rate.
-
-    """
-    if filename.endswith('.wav'):
-        wavefile = WaveFile(filename)
-        raw_data = wavefile.raw_data
-        rate = wavefile.rate
-    elif filename.endswith('.raw'):
-        binary = None
-        with open(filename, 'rb') as f:
-            binary = f.read()
-        raw_data = audio_data.AudioRawData(binary=binary,
-                                           channel=channel,
-                                           sample_format='S%d_LE' % bit_width)
-    else:
-        raise CheckQualityError('File format for %s is not supported' %
-                                filename)
-
-    return raw_data, rate
-
-
-def get_quality_params(quality_block_size_secs,
-                       quality_frequency_error_threshold,
-                       quality_delay_amplitude_threshold,
-                       quality_noise_amplitude_threshold,
-                       quality_burst_amplitude_threshold):
-    """Gets quality parameters in arguments.
-
-    Args:
-        quality_block_size_secs: Input block size in seconds.
-        quality_frequency_error_threshold: Input the frequency error
-        threshold.
-        quality_delay_amplitude_threshold: Input the delay aplitutde
-        threshold.
-        quality_noise_amplitude_threshold: Input the noise aplitutde
-        threshold.
-        quality_burst_amplitude_threshold: Input the burst aplitutde
-        threshold.
-
-    Returns:
-        A QualityParams object.
-
-    """
-    quality_params = QualityParams(
-        block_size_secs=quality_block_size_secs,
-        frequency_error_threshold=quality_frequency_error_threshold,
-        delay_amplitude_threshold=quality_delay_amplitude_threshold,
-        noise_amplitude_threshold=quality_noise_amplitude_threshold,
-        burst_amplitude_threshold=quality_burst_amplitude_threshold)
-
-    return quality_params
-
-
-def quality_analysis(
-        filename,
-        output_file,
-        bit_width,
-        rate,
-        channel,
-        freqs=None,
-        freq_threshold=5,
-        ignore_high_freq=5000,
-        spectral_only=False,
-        quality_block_size_secs=DEFAULT_QUALITY_BLOCK_SIZE_SECS,
-        quality_burst_amplitude_threshold=DEFAULT_BURST_AMPLITUDE_THRESHOLD,
-        quality_delay_amplitude_threshold=DEFAULT_DELAY_AMPLITUDE_THRESHOLD,
-        quality_frequency_error_threshold=DEFAULT_FREQUENCY_ERROR_THRESHOLD,
-        quality_noise_amplitude_threshold=DEFAULT_NOISE_AMPLITUDE_THRESHOLD,
-):
-    """ Runs various functions to measure audio quality base on user input.
-
-    Args:
-        filename: The wav or raw file to check.
-        output_file: Output file to dump analysis result in JSON format.
-        bit_width: For raw file. Bit width of a sample.
-        rate: Sampling rate in samples per second. Example inputs: 44100,
-        48000
-        channel: For raw file. Number of channels.
-        freqs: Expected frequencies in the channels.
-        freq_threshold: Frequency difference threshold in Hz.
-        ignore_high_freq: Frequency threshold in Hz to be ignored for high
-        frequency. Default is 5KHz
-        spectral_only: Only do spectral analysis on each channel.
-        quality_block_size_secs: Input block size in seconds.
-        quality_frequency_error_threshold: Input the frequency error
-        threshold.
-        quality_delay_amplitude_threshold: Input the delay aplitutde
-        threshold.
-        quality_noise_amplitude_threshold: Input the noise aplitutde
-        threshold.
-        quality_burst_amplitude_threshold: Input the burst aplitutde
-        threshold.
-    """
-
-    raw_data, rate = read_audio_file(filename, channel, bit_width, rate)
-
-    checker = QualityChecker(raw_data, rate)
-
-    quality_params = get_quality_params(quality_block_size_secs,
-                                        quality_frequency_error_threshold,
-                                        quality_delay_amplitude_threshold,
-                                        quality_noise_amplitude_threshold,
-                                        quality_burst_amplitude_threshold)
-
-    checker.do_spectral_analysis(ignore_high_freq=ignore_high_freq,
-                                 check_quality=(not spectral_only),
-                                 quality_params=quality_params)
-
-    checker.dump(output_file)
-
-    if freqs:
-        checker.check_freqs(freqs, freq_threshold)
-
-    if not spectral_only:
-        checker.check_quality()
-    logging.debug("Audio analysis completed.")
diff --git a/src/antlion/test_utils/bt/A2dpBaseTest.py b/src/antlion/test_utils/bt/A2dpBaseTest.py
deleted file mode 100644
index 1b8d4e0..0000000
--- a/src/antlion/test_utils/bt/A2dpBaseTest.py
+++ /dev/null
@@ -1,445 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Stream music through connected device from phone test implementation."""
-import antlion
-import os
-import pandas as pd
-import shutil
-import time
-
-import antlion.test_utils.coex.audio_test_utils as atu
-import antlion.test_utils.bt.bt_test_utils as btutils
-from antlion import asserts
-from antlion.test_utils.bt import bt_constants
-from antlion.test_utils.bt import BtEnum
-from antlion.test_utils.abstract_devices.bluetooth_handsfree_abstract_device import BluetoothHandsfreeAbstractDeviceFactory as bt_factory
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.bt.ble_performance_test_utils import plot_graph
-from antlion.test_utils.power.PowerBTBaseTest import ramp_attenuation
-from antlion.test_utils.bt.loggers import bluetooth_metric_logger as log
-from antlion.signals import TestPass, TestError
-
-PHONE_MUSIC_FILE_DIRECTORY = '/sdcard/Music'
-INIT_ATTEN = 0
-WAIT_TIME = 1
-
-
-class A2dpBaseTest(BluetoothBaseTest):
-    """Stream audio file over desired Bluetooth codec configurations.
-
-    Audio file should be a sine wave. Other audio files will not work for the
-    test analysis metrics.
-
-    Device under test is Android phone, connected to headset with a controller
-    that can generate a BluetoothHandsfreeAbstractDevice from test_utils.
-    abstract_devices.bluetooth_handsfree_abstract_device.
-    BuetoothHandsfreeAbstractDeviceFactory.
-    """
-    def setup_class(self):
-
-        super().setup_class()
-        self.bt_logger = log.BluetoothMetricLogger.for_test_case()
-        self.dut = self.android_devices[0]
-        req_params = ['audio_params', 'music_files', 'system_path_loss']
-        opt_params = ['bugreport']
-        #'audio_params' is a dict, contains the audio device type, audio streaming
-        #settings such as volumn, duration, audio recording parameters such as
-        #channel, sampling rate/width, and thdn parameters for audio processing
-        self.unpack_userparams(req_params)
-        self.unpack_userparams(opt_params, bugreport=None)
-        # Find music file and push it to the dut
-        music_src = self.music_files[0]
-        music_dest = PHONE_MUSIC_FILE_DIRECTORY
-        success = self.dut.push_system_file(music_src, music_dest)
-        if success:
-            self.music_file = os.path.join(PHONE_MUSIC_FILE_DIRECTORY,
-                                           os.path.basename(music_src))
-        # Initialize media_control class
-        self.media = btutils.MediaControlOverSl4a(self.dut, self.music_file)
-        # Set attenuator to minimum attenuation
-        if hasattr(self, 'attenuators'):
-            self.attenuator = self.attenuators[0]
-            self.attenuator.set_atten(INIT_ATTEN)
-        # Create the BTOE(Bluetooth-Other-End) device object
-        bt_devices = self.user_params.get('bt_devices', [])
-        if bt_devices:
-            attr, idx = bt_devices.split(':')
-            self.bt_device_controller = getattr(self, attr)[int(idx)]
-            self.bt_device = bt_factory().generate(self.bt_device_controller)
-        else:
-            self.log.error('No BT devices config is provided!')
-
-    def teardown_class(self):
-
-        super().teardown_class()
-        if hasattr(self, 'media'):
-            self.media.stop()
-        if hasattr(self, 'attenuator'):
-            self.attenuator.set_atten(INIT_ATTEN)
-        self.dut.droid.bluetoothFactoryReset()
-        self.bt_device.reset()
-        self.bt_device.power_off()
-        btutils.disable_bluetooth(self.dut.droid)
-
-    def setup_test(self):
-
-        super().setup_test()
-        # Initialize audio capture devices
-        self.audio_device = atu.get_audio_capture_device(
-            self.bt_device_controller, self.audio_params)
-        # Reset BT to factory defaults
-        self.dut.droid.bluetoothFactoryReset()
-        self.bt_device.reset()
-        self.bt_device.power_on()
-        btutils.enable_bluetooth(self.dut.droid, self.dut.ed)
-        btutils.connect_phone_to_headset(self.dut, self.bt_device, 60)
-        vol = self.dut.droid.getMaxMediaVolume() * self.audio_params['volume']
-        self.dut.droid.setMediaVolume(0)
-        time.sleep(1)
-        self.dut.droid.setMediaVolume(int(vol))
-
-    def teardown_test(self):
-
-        super().teardown_test()
-        self.dut.droid.bluetoothFactoryReset()
-        self.media.stop()
-        # Set Attenuator to the initial attenuation
-        if hasattr(self, 'attenuator'):
-            self.attenuator.set_atten(INIT_ATTEN)
-        self.bt_device.reset()
-        self.bt_device.power_off()
-        btutils.disable_bluetooth(self.dut.droid)
-
-    def on_pass(self, test_name, begin_time):
-
-        if hasattr(self, 'bugreport') and self.bugreport == 1:
-            self._take_bug_report(test_name, begin_time)
-
-    def play_and_record_audio(self, duration):
-        """Play and record audio for a set duration.
-
-        Args:
-            duration: duration in seconds for music playing
-        Returns:
-            audio_captured: captured audio file path
-        """
-
-        self.log.info('Play and record audio for {} second'.format(duration))
-        self.media.play()
-        proc = self.audio_device.start()
-        time.sleep(duration + WAIT_TIME)
-        proc.kill()
-        time.sleep(WAIT_TIME)
-        proc.kill()
-        audio_captured = self.audio_device.stop()
-        self.media.stop()
-        self.log.info('Audio play and record stopped')
-        asserts.assert_true(audio_captured, 'Audio not recorded')
-        return audio_captured
-
-    def _get_bt_link_metrics(self, tag=''):
-        """Get bt link metrics such as rssi and tx pwls.
-
-        Returns:
-            master_metrics_list: list of metrics of central device
-            slave_metrics_list: list of metric of peripheral device
-        """
-
-        self.raw_bt_metrics_path = os.path.join(self.log_path,
-                                                'BT_Raw_Metrics')
-        self.media.play()
-        # Get master rssi and power level
-        process_data_dict = btutils.get_bt_metric(
-            self.dut, tag=tag, log_path=self.raw_bt_metrics_path)
-        rssi_master = process_data_dict.get('rssi')
-        pwl_master = process_data_dict.get('pwlv')
-        rssi_c0_master = process_data_dict.get('rssi_c0')
-        rssi_c1_master = process_data_dict.get('rssi_c1')
-        txpw_c0_master = process_data_dict.get('txpw_c0')
-        txpw_c1_master = process_data_dict.get('txpw_c1')
-        bftx_master = process_data_dict.get('bftx')
-        divtx_master = process_data_dict.get('divtx')
-
-        if isinstance(self.bt_device_controller,
-                      antlion.controllers.android_device.AndroidDevice):
-            rssi_slave = btutils.get_bt_rssi(self.bt_device_controller,
-                                             tag=tag,
-                                             log_path=self.raw_bt_metrics_path)
-        else:
-            rssi_slave = None
-        self.media.stop()
-
-        master_metrics_list = [
-            rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
-            txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
-        ]
-        slave_metrics_list = [rssi_slave]
-
-        return master_metrics_list, slave_metrics_list
-
-    def run_thdn_analysis(self, audio_captured, tag):
-        """Calculate Total Harmonic Distortion plus Noise for latest recording.
-
-        Store result in self.metrics.
-
-        Args:
-            audio_captured: the captured audio file
-        Returns:
-            thdn: thdn value in a list
-        """
-        # Calculate Total Harmonic Distortion + Noise
-        audio_result = atu.AudioCaptureResult(audio_captured,
-                                              self.audio_params)
-        thdn = audio_result.THDN(**self.audio_params['thdn_params'])
-        file_name = tag + os.path.basename(audio_result.path)
-        file_new = os.path.join(os.path.dirname(audio_result.path), file_name)
-        shutil.copyfile(audio_result.path, file_new)
-        for ch_no, t in enumerate(thdn):
-            self.log.info('THD+N for channel %s: %.4f%%' % (ch_no, t * 100))
-        return thdn
-
-    def run_anomaly_detection(self, audio_captured):
-        """Detect anomalies in latest recording.
-
-        Store result in self.metrics.
-
-        Args:
-            audio_captured: the captured audio file
-        Returns:
-            anom: anom detected in the captured file
-        """
-        # Detect Anomalies
-        audio_result = atu.AudioCaptureResult(audio_captured)
-        anom = audio_result.detect_anomalies(
-            **self.audio_params['anomaly_params'])
-        num_anom = 0
-        for ch_no, anomalies in enumerate(anom):
-            if anomalies:
-                for anomaly in anomalies:
-                    num_anom += 1
-                    start, end = anomaly
-                    self.log.warning(
-                        'Anomaly on channel {} at {}:{}. Duration '
-                        '{} sec'.format(ch_no, start // 60, start % 60,
-                                        end - start))
-        else:
-            self.log.info('%i anomalies detected.' % num_anom)
-        return anom
-
-    def generate_proto(self, data_points, codec_type, sample_rate,
-                       bits_per_sample, channel_mode):
-        """Generate a results protobuf.
-
-        Args:
-            data_points: list of dicts representing info to go into
-              AudioTestDataPoint protobuffer message.
-            codec_type: The codec type config to store in the proto.
-            sample_rate: The sample rate config to store in the proto.
-            bits_per_sample: The bits per sample config to store in the proto.
-            channel_mode: The channel mode config to store in the proto.
-        Returns:
-             dict: Dictionary with key 'proto' mapping to serialized protobuf,
-               'proto_ascii' mapping to human readable protobuf info, and 'test'
-               mapping to the test class name that generated the results.
-        """
-
-        # Populate protobuf
-        test_case_proto = self.bt_logger.proto_module.BluetoothAudioTestResult(
-        )
-
-        for data_point in data_points:
-            audio_data_proto = test_case_proto.data_points.add()
-            log.recursive_assign(audio_data_proto, data_point)
-
-        codec_proto = test_case_proto.a2dp_codec_config
-        codec_proto.codec_type = bt_constants.codec_types[codec_type]
-        codec_proto.sample_rate = int(sample_rate)
-        codec_proto.bits_per_sample = int(bits_per_sample)
-        codec_proto.channel_mode = bt_constants.channel_modes[channel_mode]
-
-        self.bt_logger.add_config_data_to_proto(test_case_proto, self.dut,
-                                                self.bt_device)
-
-        self.bt_logger.add_proto_to_results(test_case_proto,
-                                            self.__class__.__name__)
-
-        proto_dict = self.bt_logger.get_proto_dict(self.__class__.__name__,
-                                                   test_case_proto)
-        del proto_dict["proto_ascii"]
-        return proto_dict
-
-    def set_test_atten(self, atten):
-        """Set the attenuation(s) for current test condition.
-
-        """
-        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
-            ramp_attenuation(self.atten_c0,
-                             atten,
-                             attenuation_step_max=2,
-                             time_wait_in_between=1)
-            self.log.info('Set Chain 0 attenuation to %d dB', atten)
-            ramp_attenuation(self.atten_c1,
-                             atten + self.gain_mismatch,
-                             attenuation_step_max=2,
-                             time_wait_in_between=1)
-            self.log.info('Set Chain 1 attenuation to %d dB',
-                          atten + self.gain_mismatch)
-        else:
-            ramp_attenuation(self.attenuator, atten)
-            self.log.info('Set attenuation to %d dB', atten)
-
-    def run_a2dp_to_max_range(self, codec_config):
-        attenuation_range = range(self.attenuation_vector['start'],
-                                  self.attenuation_vector['stop'] + 1,
-                                  self.attenuation_vector['step'])
-
-        data_points = []
-        self.file_output = os.path.join(
-            self.log_path, '{}.csv'.format(self.current_test_name))
-
-        # Set Codec if needed
-        current_codec = self.dut.droid.bluetoothA2dpGetCurrentCodecConfig()
-        current_codec_type = BtEnum.BluetoothA2dpCodecType(
-            current_codec['codecType']).name
-        if current_codec_type != codec_config['codec_type']:
-            codec_set = btutils.set_bluetooth_codec(self.dut, **codec_config)
-            asserts.assert_true(codec_set, 'Codec configuration failed.')
-        else:
-            self.log.info('Current codec is {}, no need to change'.format(
-                current_codec_type))
-
-        #loop RSSI with the same codec setting
-        for atten in attenuation_range:
-            self.media.play()
-            self.set_test_atten(atten)
-
-            tag = 'codec_{}_attenuation_{}dB_'.format(
-                codec_config['codec_type'], atten)
-            recorded_file = self.play_and_record_audio(
-                self.audio_params['duration'])
-            thdns = self.run_thdn_analysis(recorded_file, tag)
-
-            # Collect Metrics for dashboard
-            [
-                rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
-                txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
-            ], [rssi_slave] = self._get_bt_link_metrics(tag)
-
-            data_point = {
-                'attenuation_db':
-                int(self.attenuator.get_atten()),
-                'pathloss':
-                atten + self.system_path_loss,
-                'rssi_primary':
-                rssi_master.get(self.dut.serial, -127),
-                'tx_power_level_master':
-                pwl_master.get(self.dut.serial, -127),
-                'rssi_secondary':
-                rssi_slave.get(self.bt_device_controller.serial, -127),
-                'rssi_c0_dut':
-                rssi_c0_master.get(self.dut.serial, -127),
-                'rssi_c1_dut':
-                rssi_c1_master.get(self.dut.serial, -127),
-                'txpw_c0_dut':
-                txpw_c0_master.get(self.dut.serial, -127),
-                'txpw_c1_dut':
-                txpw_c1_master.get(self.dut.serial, -127),
-                'bftx_state':
-                bftx_master.get(self.dut.serial, -127),
-                'divtx_state':
-                divtx_master.get(self.dut.serial, -127),
-                'total_harmonic_distortion_plus_noise_percent':
-                thdns[0] * 100
-            }
-            self.log.info(data_point)
-            # bokeh data for generating BokehFigure
-            bokeh_data = {
-                'x_label': 'Pathloss (dBm)',
-                'primary_y_label': 'RSSI (dBm)',
-                'log_path': self.log_path,
-                'current_test_name': self.current_test_name
-            }
-            #plot_data for adding line to existing BokehFigure
-            plot_data = {
-                'line_one': {
-                    'x_label': 'Pathloss (dBm)',
-                    'primary_y_label': 'RSSI (dBm)',
-                    'x_column': 'pathloss',
-                    'y_column': 'rssi_primary',
-                    'legend': 'DUT RSSI (dBm)',
-                    'marker': 'circle_x',
-                    'y_axis': 'default'
-                },
-                'line_two': {
-                    'x_column': 'pathloss',
-                    'y_column': 'rssi_secondary',
-                    'legend': 'Remote device RSSI (dBm)',
-                    'marker': 'hex',
-                    'y_axis': 'default'
-                },
-                'line_three': {
-                    'x_column': 'pathloss',
-                    'y_column': 'tx_power_level_master',
-                    'legend': 'DUT TX Power (dBm)',
-                    'marker': 'hex',
-                    'y_axis': 'secondary'
-                }
-            }
-
-            # Check thdn for glitches, stop if max range reached
-            if thdns[0] == 0:
-                proto_dict = self.generate_proto(data_points, **codec_config)
-                A2dpRange_df = pd.DataFrame(data_points)
-                A2dpRange_df.to_csv(self.file_output, index=False)
-                plot_graph(A2dpRange_df,
-                           plot_data,
-                           bokeh_data,
-                           secondary_y_label='DUT TX Power')
-                raise TestError(
-                    'Music play/recording is not working properly or Connection has lost'
-                )
-
-            data_points.append(data_point)
-            A2dpRange_df = pd.DataFrame(data_points)
-
-            for thdn in thdns:
-                if thdn >= self.audio_params['thdn_threshold']:
-                    self.log.info(
-                        'Max range at attenuation {} dB'.format(atten))
-                    self.log.info('DUT rssi {} dBm, DUT tx power level {}, '
-                                  'Remote rssi {} dBm'.format(
-                                      rssi_master, pwl_master, rssi_slave))
-                    proto_dict = self.generate_proto(data_points,
-                                                     **codec_config)
-                    A2dpRange_df.to_csv(self.file_output, index=False)
-                    plot_graph(A2dpRange_df,
-                               plot_data,
-                               bokeh_data,
-                               secondary_y_label='DUT TX Power')
-                    return True
-                    raise TestPass('Max range reached and move to next codec',
-                                   extras=proto_dict)
-        # Save Data points to csv
-        A2dpRange_df.to_csv(self.file_output, index=False)
-        # Plot graph
-        plot_graph(A2dpRange_df,
-                   plot_data,
-                   bokeh_data,
-                   secondary_y_label='DUT TX Power')
-        proto_dict = self.generate_proto(data_points, **codec_config)
-        return True
-        raise TestPass('Could not reach max range, need extra attenuation.',
-                       extras=proto_dict)
diff --git a/src/antlion/test_utils/bt/AvrcpBaseTest.py b/src/antlion/test_utils/bt/AvrcpBaseTest.py
deleted file mode 100644
index d6d2007..0000000
--- a/src/antlion/test_utils/bt/AvrcpBaseTest.py
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Perform base Avrcp command from headset to dut"""
-import time
-import os
-import queue
-
-from antlion import asserts
-from antlion.test_utils.abstract_devices.bluetooth_handsfree_abstract_device import BluetoothHandsfreeAbstractDeviceFactory as Factory
-from antlion.test_utils.bt.simulated_carkit_device import SimulatedCarkitDevice
-from antlion.test_utils.bt.bt_test_utils import connect_phone_to_headset
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.car.car_media_utils import EVENT_PLAY_RECEIVED
-from antlion.test_utils.car.car_media_utils import EVENT_PAUSE_RECEIVED
-from antlion.test_utils.car.car_media_utils import EVENT_SKIP_NEXT_RECEIVED
-from antlion.test_utils.car.car_media_utils import EVENT_SKIP_PREV_RECEIVED
-from antlion.test_utils.car.car_media_utils import CMD_MEDIA_PLAY
-from antlion.test_utils.car.car_media_utils import CMD_MEDIA_PAUSE
-
-ADB_FILE_EXISTS = 'test -e %s && echo True'
-DEFAULT_TIMEOUT = 5
-EVENT_TIMEOUT = 1
-
-
-class AvrcpBaseTest(BluetoothBaseTest):
-    def __init__(self, configs):
-        super(AvrcpBaseTest, self).__init__(configs)
-        self.dut = self.android_devices[0]
-        serial = self.user_params['simulated_carkit_device']
-        controller = SimulatedCarkitDevice(serial)
-        self.controller = Factory().generate(controller)
-
-        self.phone_music_files = []
-        self.host_music_files = []
-        for music_file in self.user_params['music_file_names']:
-            self.phone_music_files.append(os.path.join(
-                self.user_params['phone_music_file_dir'], music_file))
-            self.host_music_files.append(os.path.join(
-                self.user_params['host_music_file_dir'], music_file))
-
-        self.ensure_phone_has_music_file()
-
-    def setup_class(self):
-        super().setup_class()
-        self.controller.power_on()
-        time.sleep(DEFAULT_TIMEOUT)
-
-    def teardown_class(self):
-        super().teardown_class()
-        self.dut.droid.mediaPlayStop()
-        self.controller.destroy()
-
-    def setup_test(self):
-        self.dut.droid.bluetoothMediaPhoneSL4AMBSStart()
-        time.sleep(DEFAULT_TIMEOUT)
-
-        self.dut.droid.bluetoothStartPairingHelper(True)
-        if not connect_phone_to_headset(self.dut, self.controller, 600):
-            asserts.fail('Not able to connect to hands-free device')
-
-        #make sure SL4AMBS is active MediaSession
-        self.dut.droid.bluetoothMediaHandleMediaCommandOnPhone(CMD_MEDIA_PLAY)
-        time.sleep(0.5)
-        self.dut.droid.bluetoothMediaHandleMediaCommandOnPhone(CMD_MEDIA_PAUSE)
-
-    def teardown_test(self):
-        self.dut.droid.bluetoothMediaPhoneSL4AMBSStop()
-
-    def ensure_phone_has_music_file(self):
-        """Make sure music file (based on config values) is on the phone."""
-        for host_file, phone_file in zip(self.host_music_files,
-                                         self.phone_music_files):
-            if self.dut.adb.shell(ADB_FILE_EXISTS % phone_file):
-                self.log.info(
-                    'Music file {} already on phone. Skipping file transfer.'
-                    .format(host_file))
-            else:
-                self.dut.adb.push(host_file, phone_file)
-                has_file = self.dut.adb.shell(
-                        ADB_FILE_EXISTS % phone_file)
-                if not has_file:
-                    self.log.error(
-                        'Audio file {} not pushed to phone.'.format(host_file))
-                self.log.info('Music file successfully pushed to phone.')
-
-    def play_from_controller(self):
-        self.dut.ed.clear_all_events()
-        self.controller.play()
-        try:
-            self.dut.ed.pop_event(EVENT_PLAY_RECEIVED, EVENT_TIMEOUT)
-        except queue.Empty as e:
-            asserts.fail('{} Event Not received'.format(EVENT_PLAY_RECEIVED))
-        self.log.info('Event Received : {}'.format(EVENT_PLAY_RECEIVED))
-
-    def pause_from_controller(self):
-        self.dut.ed.clear_all_events()
-        self.controller.pause()
-        try:
-            self.dut.ed.pop_event(EVENT_PAUSE_RECEIVED, EVENT_TIMEOUT)
-        except queue.Empty as e:
-            asserts.fail('{} Event Not received'.format(EVENT_PAUSE_RECEIVED))
-        self.log.info('Event Received : {}'.format(EVENT_PAUSE_RECEIVED))
-
-    def skip_next_from_controller(self):
-        self.dut.ed.clear_all_events()
-        self.controller.next_track()
-        try:
-            self.dut.ed.pop_event(EVENT_SKIP_NEXT_RECEIVED, EVENT_TIMEOUT)
-        except queue.Empty as e:
-            asserts.fail('{} Event Not '
-                         'received'.format(EVENT_SKIP_NEXT_RECEIVED))
-        self.log.info('Event Received : {}'.format(EVENT_SKIP_NEXT_RECEIVED))
-
-    def skip_prev_from_controller(self):
-        self.dut.ed.clear_all_events()
-        self.controller.previous_track()
-        try:
-            self.dut.ed.pop_event(EVENT_SKIP_PREV_RECEIVED, EVENT_TIMEOUT)
-        except queue.Empty as e:
-            asserts.fail('{} Event Not '
-                         'received'.format(EVENT_SKIP_PREV_RECEIVED))
-        self.log.info('Event Received : {}'.format(EVENT_SKIP_PREV_RECEIVED))
diff --git a/src/antlion/test_utils/bt/BleEnum.py b/src/antlion/test_utils/bt/BleEnum.py
deleted file mode 100644
index 4aed867..0000000
--- a/src/antlion/test_utils/bt/BleEnum.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-
-
-class ScanSettingsCallbackType(Enum):
-    CALLBACK_TYPE_ALL_MATCHES = 1
-    CALLBACK_TYPE_FIRST_MATCH = 2
-    CALLBACK_TYPE_MATCH_LOST = 4
-    CALLBACK_TYPE_FOUND_AND_LOST = 6
-
-
-class ScanSettingsMatchMode(Enum):
-    AGGRESIVE = 1
-    STICKY = 2
-
-
-class ScanSettingsMatchNum(Enum):
-    MATCH_NUM_ONE_ADVERTISEMENT = 1
-    MATCH_NUM_FEW_ADVERTISEMENT = 2
-    MATCH_NUM_MAX_ADVERTISEMENT = 3
-
-
-class ScanSettingsScanResultType(Enum):
-    SCAN_RESULT_TYPE_FULL = 0
-    SCAN_RESULT_TYPE_ABBREVIATED = 1
-
-
-class ScanSettingsScanMode(Enum):
-    SCAN_MODE_OPPORTUNISTIC = -1
-    SCAN_MODE_LOW_POWER = 0
-    SCAN_MODE_BALANCED = 1
-    SCAN_MODE_LOW_LATENCY = 2
-
-
-class ScanSettingsReportDelaySeconds(Enum):
-    MIN = 0
-    MAX = 9223372036854775807
-
-
-class ScanSettingsPhy(Enum):
-    PHY_LE_1M = 1
-    PHY_LE_CODED = 3
-    PHY_LE_ALL_SUPPORTED = 255
-
-
-class AdvertiseSettingsAdvertiseType(Enum):
-    ADVERTISE_TYPE_NON_CONNECTABLE = 0
-    ADVERTISE_TYPE_CONNECTABLE = 1
-
-
-class AdvertiseSettingsAdvertiseMode(Enum):
-    ADVERTISE_MODE_LOW_POWER = 0
-    ADVERTISE_MODE_BALANCED = 1
-    ADVERTISE_MODE_LOW_LATENCY = 2
-
-
-class AdvertiseSettingsAdvertiseTxPower(Enum):
-    ADVERTISE_TX_POWER_ULTRA_LOW = 0
-    ADVERTISE_TX_POWER_LOW = 1
-    ADVERTISE_TX_POWER_MEDIUM = 2
-    ADVERTISE_TX_POWER_HIGH = 3
-
-
-class BLEConnectionPriority(Enum):
-    # Connection Interval: BALANCED = 36ms, HIGH = 12ms, LOW = 96ms
-    CONNECTION_PRIORITY_BALANCED = 0
-    CONNECTION_PRIORITY_HIGH = 1
-    CONNECTION_PRIORITY_LOW = 2
-
-
-class JavaInteger(Enum):
-    MIN = -2147483648
-    MAX = 2147483647
-
-
-class Uuids(Enum):
-    P_Service = "0000feef-0000-1000-8000-00805f9b34fb"
-    HR_SERVICE = "0000180d-0000-1000-8000-00805f9b34fb"
-
-
-class AdvertiseErrorCode(Enum):
-    DATA_TOO_LARGE = 1
-    TOO_MANY_ADVERTISERS = 2
-    ADVERTISE_ALREADY_STARTED = 3
-    BLUETOOTH_INTERNAL_FAILURE = 4
-    FEATURE_NOT_SUPPORTED = 5
-
-
-class BluetoothAdapterState(Enum):
-    STATE_OFF = 10
-    STATE_TURNING_ON = 11
-    STATE_ON = 12
-    STATE_TURNING_OFF = 13
-    STATE_BLE_TURNING_ON = 14
-    STATE_BLE_ON = 15
-    STATE_BLE_TURNING_OFF = 16
diff --git a/src/antlion/test_utils/bt/BluetoothBaseTest.py b/src/antlion/test_utils/bt/BluetoothBaseTest.py
deleted file mode 100644
index 4730bc9..0000000
--- a/src/antlion/test_utils/bt/BluetoothBaseTest.py
+++ /dev/null
@@ -1,191 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-    Base Class for Defining Common Bluetooth Test Functionality
-"""
-
-import threading
-import time
-import traceback
-import os
-from antlion.base_test import BaseTestClass
-from antlion.signals import TestSignal
-from antlion.utils import dump_string_to_file
-
-from antlion.test_utils.bt.bt_test_utils import get_device_selector_dictionary
-from antlion.test_utils.bt.bt_test_utils import reset_bluetooth
-from antlion.test_utils.bt.bt_test_utils import setup_multiple_devices_for_bt_test
-from antlion.test_utils.bt.bt_test_utils import take_btsnoop_logs
-from antlion.test_utils.bt.ble_lib import BleLib
-from antlion.test_utils.bt.bta_lib import BtaLib
-from antlion.test_utils.bt.config_lib import ConfigLib
-from antlion.test_utils.bt.gattc_lib import GattClientLib
-from antlion.test_utils.bt.gatts_lib import GattServerLib
-from antlion.test_utils.bt.rfcomm_lib import RfcommLib
-from antlion.test_utils.bt.shell_commands_lib import ShellCommands
-
-
-class BluetoothBaseTest(BaseTestClass):
-    DEFAULT_TIMEOUT = 10
-    start_time = 0
-    timer_list = []
-
-    # Use for logging in the test cases to facilitate
-    # faster log lookup and reduce ambiguity in logging.
-    @staticmethod
-    def bt_test_wrap(fn):
-        def _safe_wrap_test_case(self, *args, **kwargs):
-            test_id = "{}:{}:{}".format(self.__class__.__name__, fn.__name__,
-                                        time.time())
-            log_string = "[Test ID] {}".format(test_id)
-            self.log.info(log_string)
-            try:
-                for ad in self.android_devices:
-                    ad.droid.logI("Started " + log_string)
-                result = fn(self, *args, **kwargs)
-                for ad in self.android_devices:
-                    ad.droid.logI("Finished " + log_string)
-                if result is not True and "bt_auto_rerun" in self.user_params:
-                    self.teardown_test()
-                    log_string = "[Rerun Test ID] {}. 1st run failed.".format(
-                        test_id)
-                    self.log.info(log_string)
-                    self.setup_test()
-                    for ad in self.android_devices:
-                        ad.droid.logI("Rerun Started " + log_string)
-                    result = fn(self, *args, **kwargs)
-                    if result is True:
-                        self.log.info("Rerun passed.")
-                    elif result is False:
-                        self.log.info("Rerun failed.")
-                    else:
-                        # In the event that we have a non-bool or null
-                        # retval, we want to clearly distinguish this in the
-                        # logs from an explicit failure, though the test will
-                        # still be considered a failure for reporting purposes.
-                        self.log.info("Rerun indeterminate.")
-                        result = False
-                return result
-            except TestSignal:
-                raise
-            except Exception as e:
-                self.log.error(traceback.format_exc())
-                self.log.error(str(e))
-                raise
-            return fn(self, *args, **kwargs)
-
-        return _safe_wrap_test_case
-
-    def setup_class(self):
-        super().setup_class()
-        for ad in self.android_devices:
-            self._setup_bt_libs(ad)
-        if 'preferred_device_order' in self.user_params:
-            prefered_device_order = self.user_params['preferred_device_order']
-            for i, ad in enumerate(self.android_devices):
-                if ad.serial in prefered_device_order:
-                    index = prefered_device_order.index(ad.serial)
-                    self.android_devices[i], self.android_devices[index] = \
-                        self.android_devices[index], self.android_devices[i]
-
-        if "reboot_between_test_class" in self.user_params:
-            threads = []
-            for a in self.android_devices:
-                thread = threading.Thread(
-                    target=self._reboot_device, args=([a]))
-                threads.append(thread)
-                thread.start()
-            for t in threads:
-                t.join()
-        if not setup_multiple_devices_for_bt_test(self.android_devices):
-            return False
-        self.device_selector = get_device_selector_dictionary(
-            self.android_devices)
-        if "bluetooth_proto_path" in self.user_params:
-            for ad in self.android_devices:
-                ad.metrics_path = os.path.join(ad.log_path, "BluetoothMetrics")
-                os.makedirs(ad.metrics_path, exist_ok=True)
-        return True
-
-    def setup_test(self):
-        self.timer_list = []
-        for a in self.android_devices:
-            a.ed.clear_all_events()
-            a.droid.setScreenTimeout(500)
-            a.droid.wakeUpNow()
-        return True
-
-    def on_fail(self, test_name, begin_time):
-        self.log.debug(
-            "Test {} failed. Gathering bugreport and btsnoop logs".format(
-                test_name))
-        take_btsnoop_logs(self.android_devices, self, test_name)
-        self._take_bug_report(test_name, begin_time)
-        for _ in range(5):
-            if reset_bluetooth(self.android_devices):
-                break
-            else:
-                self.log.error("Failed to reset Bluetooth... retrying.")
-        return
-
-    def _get_time_in_milliseconds(self):
-        return int(round(time.time() * 1000))
-
-    def start_timer(self):
-        self.start_time = self._get_time_in_milliseconds()
-
-    def end_timer(self):
-        total_time = self._get_time_in_milliseconds() - self.start_time
-        self.timer_list.append(total_time)
-        self.start_time = 0
-        return total_time
-
-    def log_stats(self):
-        if self.timer_list:
-            self.log.info("Overall list {}".format(self.timer_list))
-            self.log.info("Average of list {}".format(
-                sum(self.timer_list) / float(len(self.timer_list))))
-            self.log.info("Maximum of list {}".format(max(self.timer_list)))
-            self.log.info("Minimum of list {}".format(min(self.timer_list)))
-            self.log.info("Total items in list {}".format(
-                len(self.timer_list)))
-        self.timer_list = []
-
-    def _setup_bt_libs(self, android_device):
-        # Bluetooth Low Energy library.
-        setattr(android_device, "ble", BleLib(
-            log=self.log, dut=android_device))
-        # Bluetooth Adapter library.
-        setattr(android_device, "bta", BtaLib(
-            log=self.log, dut=android_device))
-        # Bluetooth stack config library.
-        setattr(android_device, "config",
-                ConfigLib(log=self.log, dut=android_device))
-        # GATT Client library.
-        setattr(android_device, "gattc",
-                GattClientLib(log=self.log, dut=android_device))
-        # GATT Server library.
-        setattr(android_device, "gatts",
-                GattServerLib(log=self.log, dut=android_device))
-        # RFCOMM library.
-        setattr(android_device, "rfcomm",
-                RfcommLib(log=self.log, dut=android_device))
-        # Shell command library
-        setattr(android_device, "shell",
-                ShellCommands(log=self.log, dut=android_device))
-        # Setup Android Device feature list
-        setattr(android_device, "features",
-                android_device.adb.shell("pm list features").split("\n"))
diff --git a/src/antlion/test_utils/bt/BluetoothCarHfpBaseTest.py b/src/antlion/test_utils/bt/BluetoothCarHfpBaseTest.py
deleted file mode 100644
index 11e4c1b..0000000
--- a/src/antlion/test_utils/bt/BluetoothCarHfpBaseTest.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is base class for tests that exercises different GATT procedures between two connected devices.
-Setup/Teardown methods take care of establishing connection, and doing GATT DB initialization/discovery.
-"""
-
-import os
-import time
-
-from antlion.keys import Config
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.bt.bt_test_utils import pair_pri_to_sec
-
-
-class BluetoothCarHfpBaseTest(BluetoothBaseTest):
-    DEFAULT_TIMEOUT = 15
-    ag_phone_number = ""
-    re_phone_number = ""
-
-    def __init__(self, controllers):
-        BluetoothBaseTest.__init__(self, controllers)
-        # HF : HandsFree (CarKit role)
-        self.hf = self.android_devices[0]
-        self.hf.log.info("Role set to HF (HandsFree Carkit role).")
-        # AG : Audio Gateway (Phone role)
-        self.ag = self.android_devices[1]
-        self.ag.log.info("Role set to AG (Audio Gateway Phone role).")
-        # RE : Remote Device (Phone being talked to role)
-        if len(self.android_devices) > 2:
-            self.re = self.android_devices[2]
-            self.re.log.info("Role set to RE (Remote device).")
-        else:
-            self.re = None
-        if len(self.android_devices) > 3:
-            self.re2 = self.android_devices[3]
-            self.re2.log.info("Role set to RE2 (Remote device 2).")
-        else:
-            self.re2 = None
-
-    def setup_class(self):
-        super(BluetoothCarHfpBaseTest, self).setup_class()
-        if not "sim_conf_file" in self.user_params.keys():
-            self.log.error("Missing mandatory user config \"sim_conf_file\"!")
-            return False
-        sim_conf_file = self.user_params["sim_conf_file"][0]
-        if not os.path.isfile(sim_conf_file):
-            sim_conf_file = os.path.join(
-                self.user_params[Config.key_config_path.value], sim_conf_file)
-            if not os.path.isfile(sim_conf_file):
-                self.log.error("Unable to load user config " + sim_conf_file +
-                               " from test config file.")
-                return False
-        # Pair and connect the devices.
-        # Grace time inbetween stack state changes
-        time.sleep(5)
-        if not pair_pri_to_sec(
-                self.hf, self.ag, attempts=4, auto_confirm=False):
-            self.log.error("Failed to pair")
-            return False
-        return True
diff --git a/src/antlion/test_utils/bt/BtEnum.py b/src/antlion/test_utils/bt/BtEnum.py
deleted file mode 100644
index 380095b..0000000
--- a/src/antlion/test_utils/bt/BtEnum.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-from enum import IntEnum
-
-
-class BluetoothScanModeType(IntEnum):
-    STATE_OFF = -1
-    SCAN_MODE_NONE = 0
-    SCAN_MODE_CONNECTABLE = 1
-    SCAN_MODE_CONNECTABLE_DISCOVERABLE = 3
-
-
-class BluetoothAdapterState(IntEnum):
-    STATE_OFF = 10
-    STATE_TURNING_ON = 11
-    STATE_ON = 12
-    STATE_TURNING_OFF = 13
-    STATE_BLE_TURNING_ON = 14
-    STATE_BLE_ON = 15
-    STATE_BLE_TURNING_OFF = 16
-
-
-class BluetoothProfile(IntEnum):
-    # Should be kept in sync with BluetoothProfile.java
-    HEADSET = 1
-    A2DP = 2
-    HEALTH = 3
-    INPUT_DEVICE = 4
-    PAN = 5
-    PBAP_SERVER = 6
-    GATT = 7
-    GATT_SERVER = 8
-    MAP = 9
-    SAP = 10
-    A2DP_SINK = 11
-    AVRCP_CONTROLLER = 12
-    HEADSET_CLIENT = 16
-    PBAP_CLIENT = 17
-    MAP_MCE = 18
-
-
-class RfcommUuid(Enum):
-    DEFAULT_UUID = "457807c0-4897-11df-9879-0800200c9a66"
-    BASE_UUID = "00000000-0000-1000-8000-00805F9B34FB"
-    SDP = "00000001-0000-1000-8000-00805F9B34FB"
-    UDP = "00000002-0000-1000-8000-00805F9B34FB"
-    RFCOMM = "00000003-0000-1000-8000-00805F9B34FB"
-    TCP = "00000004-0000-1000-8000-00805F9B34FB"
-    TCS_BIN = "00000005-0000-1000-8000-00805F9B34FB"
-    TCS_AT = "00000006-0000-1000-8000-00805F9B34FB"
-    ATT = "00000007-0000-1000-8000-00805F9B34FB"
-    OBEX = "00000008-0000-1000-8000-00805F9B34FB"
-    IP = "00000009-0000-1000-8000-00805F9B34FB"
-    FTP = "0000000A-0000-1000-8000-00805F9B34FB"
-    HTTP = "0000000C-0000-1000-8000-00805F9B34FB"
-    WSP = "0000000E-0000-1000-8000-00805F9B34FB"
-    BNEP = "0000000F-0000-1000-8000-00805F9B34FB"
-    UPNP = "00000010-0000-1000-8000-00805F9B34FB"
-    HIDP = "00000011-0000-1000-8000-00805F9B34FB"
-    HARDCOPY_CONTROL_CHANNEL = "00000012-0000-1000-8000-00805F9B34FB"
-    HARDCOPY_DATA_CHANNEL = "00000014-0000-1000-8000-00805F9B34FB"
-    HARDCOPY_NOTIFICATION = "00000016-0000-1000-8000-00805F9B34FB"
-    AVCTP = "00000017-0000-1000-8000-00805F9B34FB"
-    AVDTP = "00000019-0000-1000-8000-00805F9B34FB"
-    CMTP = "0000001B-0000-1000-8000-00805F9B34FB"
-    MCAP_CONTROL_CHANNEL = "0000001E-0000-1000-8000-00805F9B34FB"
-    MCAP_DATA_CHANNEL = "0000001F-0000-1000-8000-00805F9B34FB"
-    L2CAP = "00000100-0000-1000-8000-00805F9B34FB"
-
-
-class BluetoothProfileState(Enum):
-    # Should be kept in sync with BluetoothProfile#STATE_* constants.
-    STATE_DISCONNECTED = 0
-    STATE_CONNECTING = 1
-    STATE_CONNECTED = 2
-    STATE_DISCONNECTING = 3
-
-
-class BluetoothAccessLevel(Enum):
-    # Access Levels from BluetoothDevice.
-    ACCESS_ALLOWED = 1
-    ACCESS_DENIED = 2
-
-
-class BluetoothPriorityLevel(Enum):
-    # Priority levels as defined in BluetoothProfile.java.
-    PRIORITY_AUTO_CONNECT = 1000
-    PRIORITY_ON = 100
-    PRIORITY_OFF = 0
-    PRIORITY_UNDEFINED = -1
-
-class BluetoothA2dpCodecType(Enum):
-    SBC = 0
-    AAC = 1
-    APTX = 2
-    APTX_HD = 3
-    LDAC = 4
-    MAX = 5
diff --git a/src/antlion/test_utils/bt/BtFunhausBaseTest.py b/src/antlion/test_utils/bt/BtFunhausBaseTest.py
deleted file mode 100644
index 6975685..0000000
--- a/src/antlion/test_utils/bt/BtFunhausBaseTest.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test script to automate the Bluetooth Audio Funhaus.
-"""
-from antlion.keys import Config
-from antlion.test_utils.bt.BtMetricsBaseTest import BtMetricsBaseTest
-from antlion.test_utils.bt.bt_test_utils import bluetooth_enabled_check
-from antlion.utils import bypass_setup_wizard
-from antlion.utils import exe_cmd
-from antlion.utils import sync_device_time
-import time
-import os
-
-BT_CONF_PATH = "/data/misc/bluedroid/bt_config.conf"
-
-
-class BtFunhausBaseTest(BtMetricsBaseTest):
-    """
-    Base class for Bluetooth A2DP audio tests, this class is in charge of
-    pushing link key to Android device so that it could be paired with remote
-    A2DP device, pushing music to Android device, playing audio, monitoring
-    audio play, and stop playing audio
-    """
-    music_file_to_play = ""
-    device_fails_to_connect_list = []
-
-    def __init__(self, controllers):
-        BtMetricsBaseTest.__init__(self, controllers)
-        self.ad = self.android_devices[0]
-        self.dongle = self.relay_devices[0]
-
-    def _pair_devices(self):
-        self.ad.droid.bluetoothStartPairingHelper(False)
-        self.dongle.enter_pairing_mode()
-
-        self.ad.droid.bluetoothBond(self.dongle.mac_address)
-
-        end_time = time.time() + 20
-        self.ad.log.info("Verifying devices are bonded")
-        while time.time() < end_time:
-            bonded_devices = self.ad.droid.bluetoothGetBondedDevices()
-
-            for d in bonded_devices:
-                if d['address'] == self.dongle.mac_address:
-                    self.ad.log.info("Successfully bonded to device.")
-                    self.log.info("Bonded devices:\n{}".format(bonded_devices))
-                return True
-        self.ad.log.info("Failed to bond devices.")
-        return False
-
-    def setup_test(self):
-        super(BtFunhausBaseTest, self).setup_test()
-        self.dongle.setup()
-        tries = 5
-        # Since we are not concerned with pairing in this test, try 5 times.
-        while tries > 0:
-            if self._pair_devices():
-                return True
-            else:
-                tries -= 1
-        return False
-
-    def teardown_test(self):
-        super(BtFunhausBaseTest, self).teardown_test()
-        self.dongle.clean_up()
-        return True
-
-    def on_fail(self, test_name, begin_time):
-        self.dongle.clean_up()
-        self._collect_bluetooth_manager_dumpsys_logs(self.android_devices)
-        super(BtFunhausBaseTest, self).on_fail(test_name, begin_time)
-
-    def setup_class(self):
-        if not super(BtFunhausBaseTest, self).setup_class():
-            return False
-        for ad in self.android_devices:
-            sync_device_time(ad)
-            # Disable Bluetooth HCI Snoop Logs for audio tests
-            ad.adb.shell("setprop persist.bluetooth.btsnoopenable false")
-            if not bypass_setup_wizard(ad):
-                self.log.debug(
-                    "Failed to bypass setup wizard, continuing test.")
-                # Add music to the Android device
-        return self._add_music_to_android_device(ad)
-
-    def _add_music_to_android_device(self, ad):
-        """
-        Add music to Android device as specified by the test config
-        :param ad: Android device
-        :return: True on success, False on failure
-        """
-        self.log.info("Pushing music to the Android device.")
-        music_path_str = "bt_music"
-        android_music_path = "/sdcard/Music/"
-        if music_path_str not in self.user_params:
-            self.log.error("Need music for audio testcases...")
-            return False
-        music_path = self.user_params[music_path_str]
-        if type(music_path) is list:
-            self.log.info("Media ready to push as is.")
-        elif not os.path.isdir(music_path):
-            music_path = os.path.join(
-                self.user_params[Config.key_config_path.value], music_path)
-            if not os.path.isdir(music_path):
-                self.log.error(
-                    "Unable to find music directory {}.".format(music_path))
-                return False
-        if type(music_path) is list:
-            for item in music_path:
-                self.music_file_to_play = item
-                ad.adb.push("{} {}".format(item, android_music_path))
-        else:
-            for dirname, dirnames, filenames in os.walk(music_path):
-                for filename in filenames:
-                    self.music_file_to_play = filename
-                    file = os.path.join(dirname, filename)
-                    # TODO: Handle file paths with spaces
-                    ad.adb.push("{} {}".format(file, android_music_path))
-        ad.reboot()
-        return True
-
-    def _collect_bluetooth_manager_dumpsys_logs(self, ads):
-        """
-        Collect "adb shell dumpsys bluetooth_manager" logs
-        :param ads: list of active Android devices
-        :return: None
-        """
-        for ad in ads:
-            serial = ad.serial
-            out_name = "{}_{}".format(serial, "bluetooth_dumpsys.txt")
-            dumpsys_path = ''.join((ad.log_path, "/BluetoothDumpsys"))
-            os.makedirs(dumpsys_path, exist_ok=True)
-            cmd = ''.join(
-                ("adb -s ", serial, " shell dumpsys bluetooth_manager > ",
-                 dumpsys_path, "/", out_name))
-            exe_cmd(cmd)
-
-    def start_playing_music_on_all_devices(self):
-        """
-        Start playing music
-        :return: None
-        """
-        self.ad.droid.mediaPlayOpen("file:///sdcard/Music/{}".format(
-            self.music_file_to_play.split("/")[-1]))
-        self.ad.droid.mediaPlaySetLooping(True)
-        self.ad.log.info("Music is now playing.")
-
-    def monitor_music_play_util_deadline(self, end_time, sleep_interval=1):
-        """
-        Monitor music play on all devices, if a device's Bluetooth adapter is
-        OFF or if a device is not connected to any remote Bluetooth devices,
-        we add them to failure lists bluetooth_off_list and
-        device_not_connected_list respectively
-        :param end_time: The deadline in epoch floating point seconds that we
-            must stop playing
-        :param sleep_interval: How often to monitor, too small we may drain
-            too much resources on Android, too big the deadline might be passed
-            by a maximum of this amount
-        :return:
-            status: False iff all devices are off or disconnected otherwise True
-            bluetooth_off_list: List of ADs that have Bluetooth at OFF state
-            device_not_connected_list: List of ADs with no remote device
-                                        connected
-        """
-        device_not_connected_list = []
-        while time.time() < end_time:
-            if not self.ad.droid.bluetoothCheckState():
-                self.ad.log.error("Device {}'s Bluetooth state is off.".format(
-                    self.ad.serial))
-                return False
-            if self.ad.droid.bluetoothGetConnectedDevices() == 0:
-                self.ad.log.error(
-                    "Bluetooth device not connected. Failing test.")
-            time.sleep(sleep_interval)
-        return True
-
-    def play_music_for_duration(self, duration, sleep_interval=1):
-        """
-        A convenience method for above methods. It starts run music on all
-        devices, monitors the health of music play and stops playing them when
-        time passes the duration
-        :param duration: Duration in floating point seconds
-        :param sleep_interval: How often to check the health of music play
-        :return:
-            status: False iff all devices are off or disconnected otherwise True
-            bluetooth_off_list: List of ADs that have Bluetooth at OFF state
-            device_not_connected_list: List of ADs with no remote device
-                                        connected
-        """
-        start_time = time.time()
-        end_time = start_time + duration
-        self.start_playing_music_on_all_devices()
-        status = self.monitor_music_play_util_deadline(end_time,
-                                                       sleep_interval)
-        self.ad.droid.mediaPlayStopAll()
-        return status
diff --git a/src/antlion/test_utils/bt/BtInterferenceBaseTest.py b/src/antlion/test_utils/bt/BtInterferenceBaseTest.py
deleted file mode 100644
index 181a0da..0000000
--- a/src/antlion/test_utils/bt/BtInterferenceBaseTest.py
+++ /dev/null
@@ -1,283 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Stream music through connected device from phone across different
-attenuations."""
-
-import json
-import math
-import time
-import logging
-import antlion.controllers.iperf_client as ipc
-import antlion.controllers.iperf_server as ipf
-import antlion.test_utils.bt.bt_test_utils as btutils
-from antlion import asserts
-from antlion.test_utils.bt.A2dpBaseTest import A2dpBaseTest
-from antlion.test_utils.bt.loggers import bluetooth_metric_logger as log
-from antlion.test_utils.wifi import wifi_performance_test_utils as wpeutils
-from antlion.test_utils.wifi import wifi_power_test_utils as wputils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.power.PowerBaseTest import ObjNew
-
-MAX_ATTENUATION = 95
-TEMP_FILE = '/sdcard/Download/tmp.log'
-IPERF_CLIENT_ERROR = 'the client has unexpectedly closed the connection'
-
-
-def setup_ap_connection(dut, network, ap, bandwidth=20):
-    """Setup AP and connect DUT to it.
-
-    Args:
-        dut: the android device to connect and run traffic
-        network: the network config for the AP to be setup
-        ap: access point object
-        bandwidth: bandwidth of the WiFi network to be setup
-    Returns:
-        brconfigs: dict for bridge interface configs
-    """
-    wutils.wifi_toggle_state(dut, True)
-    brconfigs = wputils.ap_setup(ap, network, bandwidth=bandwidth)
-    wutils.wifi_connect(dut, network, num_of_tries=3)
-    return brconfigs
-
-
-def start_iperf_client(traffic_pair_obj, duration):
-    """Setup iperf traffic for TCP downlink.
-    Args:
-        traffic_pair_obj: obj to contain info on traffic pair
-        duration: duration of iperf traffic to run
-    """
-    # Construct the iperf command based on the test params
-    iperf_cmd = 'iperf3 -c {} -i 1 -t {} -p {} -J -R > {}'.format(
-        traffic_pair_obj.server_address, duration,
-        traffic_pair_obj.iperf_server.port, TEMP_FILE)
-    # Start IPERF client
-    traffic_pair_obj.dut.adb.shell_nb(iperf_cmd)
-
-
-def unpack_custom_file(file):
-    """Unpack the json file to .
-
-    Args:
-        file: custom json file.
-    """
-    with open(file, 'r') as f:
-        params = json.load(f)
-    return params
-
-
-def get_iperf_results(iperf_server_obj):
-    """Get the iperf results and process.
-
-    Args:
-        iperf_server_obj: the IperfServer object
-    Returns:
-         throughput: the average throughput during tests.
-    """
-    # Get IPERF results and add this to the plot title
-    iperf_file = iperf_server_obj.log_files[-1]
-    try:
-        iperf_result = ipf.IPerfResult(iperf_file)
-        # Compute the throughput in Mbit/s
-        if iperf_result.error == IPERF_CLIENT_ERROR:
-            rates = []
-            for item in iperf_result.result['intervals']:
-                rates.append(item['sum']['bits_per_second'] / 8 / 1024 / 1024)
-            throughput = ((math.fsum(rates) / len(rates))) * 8 * (1.024**2)
-        else:
-            throughput = (math.fsum(iperf_result.instantaneous_rates) / len(
-                iperf_result.instantaneous_rates)) * 8 * (1.024**2)
-    except (ValueError, TypeError):
-        throughput = 0
-    return throughput
-
-
-def locate_interference_pair_by_channel(wifi_int_pairs, interference_channels):
-    """Function to find which attenautor to set based on channel info
-    Args:
-        interference_channels: list of interference channels
-    Return:
-        interference_pair_indices: list of indices for interference pair
-            in wifi_int_pairs
-    """
-    interference_pair_indices = []
-    for chan in interference_channels:
-        for i in range(len(wifi_int_pairs)):
-            if wifi_int_pairs[i].channel == chan:
-                interference_pair_indices.append(i)
-    return interference_pair_indices
-
-
-def inject_static_wifi_interference(wifi_int_pairs, interference_level,
-                                    channels):
-    """Function to inject wifi interference to bt link and read rssi.
-
-    Interference of IPERF traffic is always running, by setting attenuation,
-    the gate is opened to release the interference to the setup.
-    Args:
-        interference_level: the signal strength of wifi interference, use
-            attenuation level to represent this
-        channels: wifi channels where interference will
-            be injected, list
-    """
-    all_pair = range(len(wifi_int_pairs))
-    interference_pair_indices = locate_interference_pair_by_channel(
-        wifi_int_pairs, channels)
-    inactive_interference_pairs_indices = [
-        item for item in all_pair if item not in interference_pair_indices
-    ]
-    logging.info('WiFi interference at {} and inactive channels at {}'.format(
-        interference_pair_indices, inactive_interference_pairs_indices))
-    for i in interference_pair_indices:
-        wifi_int_pairs[i].attenuator.set_atten(interference_level)
-        logging.info('Set attenuation {} dB on attenuator {}'.format(
-            wifi_int_pairs[i].attenuator.get_atten(), i + 1))
-    for i in inactive_interference_pairs_indices:
-        wifi_int_pairs[i].attenuator.set_atten(MAX_ATTENUATION)
-        logging.info('Set attenuation {} dB on attenuator {}'.format(
-            wifi_int_pairs[i].attenuator.get_atten(), i + 1))
-
-
-class BtInterferenceBaseTest(A2dpBaseTest):
-    def __init__(self, configs):
-        super().__init__(configs)
-        self.bt_logger = log.BluetoothMetricLogger.for_test_case()
-        self.start_time = time.time()
-        req_params = [
-            'attenuation_vector', 'wifi_networks', 'codecs', 'custom_files',
-            'audio_params'
-        ]
-        self.unpack_userparams(req_params)
-        for file in self.custom_files:
-            if 'static_interference' in file:
-                self.static_wifi_interference = unpack_custom_file(file)
-            elif 'dynamic_interference' in file:
-                self.dynamic_wifi_interference = unpack_custom_file(file)
-
-    def setup_class(self):
-        super().setup_class()
-        # Build object to store all necessary information for each pair of wifi
-        # interference setup: phone, ap, network, channel, iperf server port/ip
-        # object and bridge interface configs
-        if len(self.android_devices) < 5 or len(self.attenuators) < 4:
-            self.log.error('Need a 4 channel attenuator and 5 android phones'
-                           'please update the config file')
-        self.wifi_int_pairs = []
-        for i in range(len(self.attenuators) - 1):
-            tmp_dict = {
-                'dut': self.android_devices[i + 1],
-                'ap': self.access_points[i],
-                'network': self.wifi_networks[i],
-                'channel': self.wifi_networks[i]['channel'],
-                'iperf_server': self.iperf_servers[i],
-                'attenuator': self.attenuators[i + 1],
-                'ether_int': self.packet_senders[i],
-                'iperf_client':
-                ipc.IPerfClientOverAdb(self.android_devices[i + 1])
-            }
-            tmp_obj = ObjNew(**tmp_dict)
-            self.wifi_int_pairs.append(tmp_obj)
-        ##Setup connection between WiFi APs and Phones and get DHCP address
-        # for the interface
-        for obj in self.wifi_int_pairs:
-            brconfigs = setup_ap_connection(obj.dut, obj.network, obj.ap)
-            iperf_server_address = wputils.wait_for_dhcp(
-                obj.ether_int.interface)
-            setattr(obj, 'server_address', iperf_server_address)
-            setattr(obj, 'brconfigs', brconfigs)
-            obj.attenuator.set_atten(MAX_ATTENUATION)
-        # Enable BQR on master and slave Android device
-        btutils.enable_bqr(self.dut)
-        btutils.enable_bqr(self.bt_device_controller)
-
-    def teardown_class(self):
-        super().teardown_class()
-        for obj in self.wifi_int_pairs:
-            obj.ap.bridge.teardown(obj.brconfigs)
-            self.log.info('Stop IPERF server at port {}'.format(
-                obj.iperf_server.port))
-            obj.iperf_server.stop()
-            self.log.info('Stop IPERF process on {}'.format(obj.dut.serial))
-            #only for glinux machine
-            #            wputils.bring_down_interface(obj.ether_int.interface)
-            obj.attenuator.set_atten(MAX_ATTENUATION)
-            obj.ap.close()
-
-    def teardown_test(self):
-
-        super().teardown_test()
-        for obj in self.wifi_int_pairs:
-            obj.attenuator.set_atten(MAX_ATTENUATION)
-
-    def play_and_record_audio(self, duration, queue):
-        """Play and record audio for a set duration.
-
-        Args:
-            duration: duration in seconds for music playing
-            que: multiprocess que to store the return value of this function
-        Returns:
-            audio_captured: captured audio file path
-        """
-
-        self.log.info('Play and record audio for {} second'.format(duration))
-        self.media.play()
-        self.audio_device.start()
-        time.sleep(duration)
-        audio_captured = self.audio_device.stop()
-        self.media.stop()
-        self.log.info('Audio play and record stopped')
-        asserts.assert_true(audio_captured, 'Audio not recorded')
-        queue.put(audio_captured)
-
-    def locate_interference_pair_by_channel(self, interference_channels):
-        """Function to find which attenautor to set based on channel info
-        Args:
-            interference_channels: list of interference channels
-        Return:
-            interference_pair_indices: list of indices for interference pair
-                in self.wifi_int_pairs
-        """
-        interference_pair_indices = []
-        for chan in interference_channels:
-            for i in range(len(self.wifi_int_pairs)):
-                if self.wifi_int_pairs[i].channel == chan:
-                    interference_pair_indices.append(i)
-        return interference_pair_indices
-
-    def get_interference_rssi(self):
-        """Function to read wifi interference RSSI level."""
-
-        bssids = []
-        self.interference_rssi = []
-        wutils.wifi_toggle_state(self.android_devices[0], True)
-        for item in self.wifi_int_pairs:
-            ssid = item.network['SSID']
-            bssid = item.ap.get_bssid_from_ssid(ssid, '2g')
-            bssids.append(bssid)
-            interference_rssi_dict = {
-                "ssid": ssid,
-                "bssid": bssid,
-                "chan": item.channel,
-                "rssi": 0
-            }
-            self.interference_rssi.append(interference_rssi_dict)
-        scaned_rssi = wpeutils.get_scan_rssi(self.android_devices[0],
-                                             bssids,
-                                             num_measurements=2)
-        for item in self.interference_rssi:
-            item['rssi'] = scaned_rssi[item['bssid']]['mean']
-            self.log.info('Interference RSSI at channel {} is {} dBm'.format(
-                item['chan'], item['rssi']))
-        wutils.wifi_toggle_state(self.android_devices[0], False)
diff --git a/src/antlion/test_utils/bt/BtMetricsBaseTest.py b/src/antlion/test_utils/bt/BtMetricsBaseTest.py
deleted file mode 100644
index 8abd13d..0000000
--- a/src/antlion/test_utils/bt/BtMetricsBaseTest.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-
-
-class BtMetricsBaseTest(BluetoothBaseTest):
-    """
-    Base class for tests that requires dumping and parsing Bluetooth Metrics
-    """
-
-    def __init__(self, controllers):
-        BluetoothBaseTest.__init__(self, controllers)
-        self.ad = self.android_devices[0]
diff --git a/src/antlion/test_utils/bt/BtSarBaseTest.py b/src/antlion/test_utils/bt/BtSarBaseTest.py
deleted file mode 100644
index eb06837..0000000
--- a/src/antlion/test_utils/bt/BtSarBaseTest.py
+++ /dev/null
@@ -1,732 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-import time
-import logging
-import pandas as pd
-
-from antlion import asserts
-from antlion.libs.proc import job
-from antlion.base_test import BaseTestClass
-
-from antlion.test_utils.bt.bt_power_test_utils import MediaControl
-from antlion.test_utils.bt.ble_performance_test_utils import run_ble_throughput_and_read_rssi
-from antlion.test_utils.abstract_devices.bluetooth_handsfree_abstract_device import BluetoothHandsfreeAbstractDeviceFactory as bt_factory
-
-import antlion.test_utils.bt.bt_test_utils as bt_utils
-import antlion.test_utils.wifi.wifi_performance_test_utils as wifi_utils
-
-PHONE_MUSIC_FILE_DIRECTORY = '/sdcard/Music'
-
-FORCE_SAR_ADB_COMMAND = ('am broadcast -n'
-                         'com.google.android.apps.scone/.coex.TestReceiver -a '
-                         'com.google.android.apps.scone.coex.SIMULATE_STATE ')
-
-SLEEP_DURATION = 2
-
-DEFAULT_DURATION = 5
-DEFAULT_MAX_ERROR_THRESHOLD = 2
-DEFAULT_AGG_MAX_ERROR_THRESHOLD = 2
-FIXED_ATTENUATION = 36
-
-
-class BtSarBaseTest(BaseTestClass):
-    """ Base class for all BT SAR Test classes.
-
-        This class implements functions common to BT SAR test Classes.
-    """
-    BACKUP_BT_SAR_TABLE_NAME = 'backup_bt_sar_table.csv'
-
-    def __init__(self, controllers):
-        BaseTestClass.__init__(self, controllers)
-        self.power_file_paths = [
-            '/vendor/etc/bluetooth_power_limits.csv',
-            '/data/vendor/radio/bluetooth_power_limits.csv'
-        ]
-        self.sar_file_name = os.path.basename(self.power_file_paths[0])
-        self.power_column = 'BluetoothPower'
-        self.REG_DOMAIN_DICT = {
-            ('us', 'ca', 'in'): 'US',
-            ('uk', 'fr', 'es', 'de', 'it', 'ie', 'sg', 'au', 'tw'): 'EU',
-            ('jp', ): 'JP'
-        }
-
-    def setup_class(self):
-        """Initializes common test hardware and parameters.
-
-        This function initializes hardware and compiles parameters that are
-        common to all tests in this class and derived classes.
-        """
-        super().setup_class()
-
-        self.test_params = self.user_params.get('bt_sar_test_params', {})
-        if not self.test_params:
-            self.log.warning(
-                'bt_sar_test_params was not found in the config file.')
-
-        self.user_params.update(self.test_params)
-        req_params = ['bt_devices', 'calibration_params', 'custom_files']
-
-        self.unpack_userparams(
-            req_params,
-            country_code='us',
-            duration=DEFAULT_DURATION,
-            sort_order=None,
-            max_error_threshold=DEFAULT_MAX_ERROR_THRESHOLD,
-            agg_error_threshold=DEFAULT_AGG_MAX_ERROR_THRESHOLD,
-            tpc_threshold=[2, 8],
-            sar_margin={
-                'BDR': 0,
-                'EDR': 0,
-                'BLE': 0
-            })
-
-        self.attenuator = self.attenuators[0]
-        self.dut = self.android_devices[0]
-
-        for key in self.REG_DOMAIN_DICT.keys():
-            if self.country_code.lower() in key:
-                self.reg_domain = self.REG_DOMAIN_DICT[key]
-
-        self.sar_version_2 = False
-
-        if 'Error' not in self.dut.adb.shell('bluetooth_sar_test -r'):
-            #Flag for SAR version 2
-            self.sar_version_2 = True
-            self.power_column = 'BluetoothEDRPower'
-            self.power_file_paths[0] = os.path.join(
-                os.path.dirname(self.power_file_paths[0]),
-                'bluetooth_power_limits_{}.csv'.format(self.reg_domain))
-            self.sar_file_name = os.path.basename(self.power_file_paths[0])
-
-        if self.sar_version_2:
-            custom_file_suffix = 'version2'
-        else:
-            custom_file_suffix = 'version1'
-
-        for file in self.custom_files:
-            if 'custom_sar_table_{}.csv'.format(custom_file_suffix) in file:
-                self.custom_sar_path = file
-                break
-        else:
-            raise RuntimeError('Custom Sar File is missing')
-
-        self.sar_file_path = self.power_file_paths[0]
-        self.atten_min = 0
-        self.atten_max = int(self.attenuator.get_max_atten())
-
-        # Get music file and push it to the phone and initialize Media controller
-        music_files = self.user_params.get('music_files', [])
-        if music_files:
-            music_src = music_files[0]
-            music_dest = PHONE_MUSIC_FILE_DIRECTORY
-            success = self.dut.push_system_file(music_src, music_dest)
-            if success:
-                self.music_file = os.path.join(PHONE_MUSIC_FILE_DIRECTORY,
-                                               os.path.basename(music_src))
-            # Initialize media_control class
-            self.media = MediaControl(self.dut, self.music_file)
-
-        #Initializing BT device controller
-        if self.bt_devices:
-            attr, idx = self.bt_devices.split(':')
-            self.bt_device_controller = getattr(self, attr)[int(idx)]
-            self.bt_device = bt_factory().generate(self.bt_device_controller)
-        else:
-            self.log.error('No BT devices config is provided!')
-
-        bt_utils.enable_bqr(self.android_devices)
-
-        self.log_path = os.path.join(logging.log_path, 'results')
-        os.makedirs(self.log_path, exist_ok=True)
-
-        # Reading BT SAR table from the phone
-        self.bt_sar_df = self.read_sar_table(self.dut)
-
-    def setup_test(self):
-        super().setup_test()
-
-        # Starting BT on the master
-        self.dut.droid.bluetoothFactoryReset()
-        bt_utils.enable_bluetooth(self.dut.droid, self.dut.ed)
-
-        # Starting BT on the slave
-        self.bt_device.reset()
-        self.bt_device.power_on()
-
-        # Connect master and slave
-        bt_utils.connect_phone_to_headset(self.dut, self.bt_device, 60)
-
-        # Playing music
-        self.media.play()
-
-        # Find and set PL10 level for the DUT
-        self.pl10_atten = self.set_PL10_atten_level(self.dut)
-        self.attenuator.set_atten(self.pl10_atten)
-
-    def teardown_test(self):
-        #Stopping Music
-        if hasattr(self, 'media'):
-            self.media.stop()
-
-        # Stopping BT on slave
-        self.bt_device.reset()
-        self.bt_device.power_off()
-
-        #Stopping BT on master
-        bt_utils.disable_bluetooth(self.dut.droid)
-
-        #Resetting the atten to initial levels
-        self.attenuator.set_atten(self.atten_min)
-        self.log.info('Attenuation set to {} dB'.format(self.atten_min))
-
-    def teardown_class(self):
-
-        super().teardown_class()
-        self.dut.droid.bluetoothFactoryReset()
-
-        # Stopping BT on slave
-        self.bt_device.reset()
-        self.bt_device.power_off()
-
-        #Stopping BT on master
-        bt_utils.disable_bluetooth(self.dut.droid)
-
-    def save_sar_plot(self, df):
-        """ Saves SAR plot to the path given.
-
-        Args:
-            df: Processed SAR table sweep results
-        """
-        self.plot.add_line(
-            df.index,
-            df['expected_tx_power'],
-            legend='expected',
-            marker='circle')
-        self.plot.add_line(
-            df.index,
-            df['measured_tx_power'],
-            legend='measured',
-            marker='circle')
-        self.plot.add_line(
-            df.index, df['delta'], legend='delta', marker='circle')
-
-        results_file_path = os.path.join(self.log_path, '{}.html'.format(
-            self.current_test_name))
-        self.plot.generate_figure()
-        wifi_utils.BokehFigure.save_figures([self.plot], results_file_path)
-
-    def sweep_power_cap(self):
-        sar_df = self.bt_sar_df
-        sar_df['BDR_power_cap'] = -128
-        sar_df['EDR_power_cap'] = -128
-        sar_df['BLE_power_cap'] = -128
-
-        if self.sar_version_2:
-            power_column_dict = {
-                'BDR': 'BluetoothBDRPower',
-                'EDR': 'BluetoothEDRPower',
-                'BLE': 'BluetoothLEPower'
-            }
-        else:
-            power_column_dict = {'EDR': self.power_column}
-
-        power_cap_error = False
-
-        for type, column_name in power_column_dict.items():
-
-            self.log.info("Performing sanity test on {}".format(type))
-            # Iterating through the BT SAR scenarios
-            for scenario in range(0, self.bt_sar_df.shape[0]):
-                # Reading BT SAR table row into dict
-                read_scenario = sar_df.loc[scenario].to_dict()
-                start_time = self.dut.adb.shell('date +%s.%m')
-                time.sleep(SLEEP_DURATION)
-
-                # Setting SAR state to the read BT SAR row
-                self.set_sar_state(self.dut, read_scenario, self.country_code)
-
-                # Reading device power cap from logcat after forcing SAR State
-                scenario_power_cap = self.get_current_power_cap(
-                    self.dut, start_time, type=type)
-                sar_df.loc[scenario, '{}_power_cap'.format(
-                    type)] = scenario_power_cap
-                self.log.info(
-                    'scenario: {}, '
-                    'sar_power: {}, power_cap:{}'.format(
-                        scenario, sar_df.loc[scenario, column_name],
-                        sar_df.loc[scenario, '{}_power_cap'.format(type)]))
-
-        if not sar_df['{}_power_cap'.format(type)].equals(sar_df[column_name]):
-            power_cap_error = True
-
-        results_file_path = os.path.join(self.log_path, '{}.csv'.format(
-            self.current_test_name))
-        sar_df.to_csv(results_file_path)
-
-        return power_cap_error
-
-    def sweep_table(self,
-                    client_ad=None,
-                    server_ad=None,
-                    client_conn_id=None,
-                    gatt_server=None,
-                    gatt_callback=None,
-                    isBLE=False):
-        """Iterates over the BT SAR table and forces signal states.
-
-        Iterates over BT SAR table and forces signal states,
-        measuring RSSI and power level for each state.
-
-        Args:
-            client_ad: the Android device performing the connection.
-            server_ad: the Android device accepting the connection.
-            client_conn_id: the client connection ID.
-            gatt_server: the gatt server
-            gatt_callback: Gatt callback objec
-            isBLE : boolean variable for BLE connection
-        Returns:
-            sar_df : SAR table sweep results in pandas dataframe
-        """
-
-        sar_df = self.bt_sar_df.copy()
-        sar_df['power_cap'] = -128
-        sar_df['slave_rssi'] = -128
-        sar_df['master_rssi'] = -128
-        sar_df['ble_rssi'] = -128
-        sar_df['pwlv'] = -1
-
-        # Sorts the table
-        if self.sort_order:
-            if self.sort_order.lower() == 'ascending':
-                sar_df = sar_df.sort_values(
-                    by=[self.power_column], ascending=True)
-            else:
-                sar_df = sar_df.sort_values(
-                    by=[self.power_column], ascending=False)
-            sar_df = sar_df.reset_index(drop=True)
-
-        # Sweeping BT SAR table
-        for scenario in range(sar_df.shape[0]):
-            # Reading BT SAR Scenario from the table
-            read_scenario = sar_df.loc[scenario].to_dict()
-
-            start_time = self.dut.adb.shell('date +%s.%m')
-            time.sleep(SLEEP_DURATION)
-
-            #Setting SAR State
-            self.set_sar_state(self.dut, read_scenario, self.country_code)
-
-            if isBLE:
-                sar_df.loc[scenario, 'power_cap'] = self.get_current_power_cap(
-                    self.dut, start_time, type='BLE')
-
-                sar_df.loc[
-                    scenario, 'ble_rssi'] = run_ble_throughput_and_read_rssi(
-                        client_ad, server_ad, client_conn_id, gatt_server,
-                        gatt_callback)
-
-                self.log.info('scenario:{}, power_cap:{},  ble_rssi:{}'.format(
-                    scenario, sar_df.loc[scenario, 'power_cap'],
-                    sar_df.loc[scenario, 'ble_rssi']))
-            else:
-                sar_df.loc[scenario, 'power_cap'] = self.get_current_power_cap(
-                    self.dut, start_time)
-
-                processed_bqr_results = bt_utils.get_bt_metric(
-                    self.android_devices, self.duration)
-                sar_df.loc[scenario, 'slave_rssi'] = processed_bqr_results[
-                    'rssi'][self.bt_device_controller.serial]
-                sar_df.loc[scenario, 'master_rssi'] = processed_bqr_results[
-                    'rssi'][self.dut.serial]
-                sar_df.loc[scenario, 'pwlv'] = processed_bqr_results['pwlv'][
-                    self.dut.serial]
-                self.log.info(
-                    'scenario:{}, power_cap:{},  s_rssi:{}, m_rssi:{}, m_pwlv:{}'
-                    .format(scenario, sar_df.loc[scenario, 'power_cap'],
-                            sar_df.loc[scenario, 'slave_rssi'],
-                            sar_df.loc[scenario, 'master_rssi'],
-                            sar_df.loc[scenario, 'pwlv']))
-
-        self.log.info('BT SAR Table swept')
-
-        return sar_df
-
-    def process_table(self, sar_df):
-        """Processes the results of sweep_table and computes BT TX power.
-
-        Processes the results of sweep_table and computes BT TX power
-        after factoring in the path loss and FTM offsets.
-
-        Args:
-             sar_df: BT SAR table after the sweep
-
-        Returns:
-            sar_df: processed BT SAR table
-        """
-
-        sar_df['pathloss'] = self.calibration_params['pathloss']
-
-        if hasattr(self, 'pl10_atten'):
-            sar_df['atten'] = self.pl10_atten
-        else:
-            sar_df['atten'] = FIXED_ATTENUATION
-
-        # BT SAR Backoff for each scenario
-        if self.sar_version_2:
-            #Reads OTP values from the phone
-            self.otp = bt_utils.read_otp(self.dut)
-
-            #OTP backoff
-            edr_otp = min(0, float(self.otp['EDR']['10']))
-            bdr_otp = min(0, float(self.otp['BR']['10']))
-            ble_otp = min(0, float(self.otp['BLE']['10']))
-
-            # EDR TX Power for PL10
-            edr_tx_power_pl10 = self.calibration_params['target_power']['EDR']['10'] - edr_otp
-
-            # BDR TX Power for PL10
-            bdr_tx_power_pl10 = self.calibration_params['target_power']['BDR']['10'] - bdr_otp
-
-            # RSSI being measured is BDR
-            offset = bdr_tx_power_pl10 - edr_tx_power_pl10
-
-            # BDR-EDR offset
-            sar_df['offset'] = offset
-
-            # Max TX power permissible
-            sar_df['max_power'] = self.calibration_params['max_power']
-
-            # Adding a target power column
-            if 'ble_rssi' in sar_df.columns:
-                sar_df[
-                    'target_power'] = self.calibration_params['target_power']['BLE']['10'] - ble_otp
-            else:
-                sar_df['target_power'] = sar_df['pwlv'].astype(str).map(
-                    self.calibration_params['target_power']['EDR']) - edr_otp
-
-            #Translates power_cap values to expected TX power level
-            sar_df['cap_tx_power'] = sar_df['power_cap'] / 4.0
-
-            sar_df['expected_tx_power'] = sar_df[[
-                'cap_tx_power', 'target_power', 'max_power'
-            ]].min(axis=1)
-
-            if hasattr(self, 'pl10_atten'):
-                sar_df[
-                    'measured_tx_power'] = sar_df['slave_rssi'] + sar_df['pathloss'] + self.pl10_atten - offset
-            else:
-                sar_df[
-                    'measured_tx_power'] = sar_df['ble_rssi'] + sar_df['pathloss'] + FIXED_ATTENUATION
-
-        else:
-
-            # Adding a target power column
-            sar_df['target_power'] = sar_df['pwlv'].astype(str).map(
-                self.calibration_params['target_power']['EDR']['10'])
-
-            # Adding a ftm  power column
-            sar_df['ftm_power'] = sar_df['pwlv'].astype(str).map(
-                self.calibration_params['ftm_power']['EDR'])
-            sar_df[
-                'backoff'] = sar_df['target_power'] - sar_df['power_cap'] / 4.0
-
-            sar_df[
-                'expected_tx_power'] = sar_df['ftm_power'] - sar_df['backoff']
-            sar_df[
-                'measured_tx_power'] = sar_df['slave_rssi'] + sar_df['pathloss'] + self.pl10_atten
-
-        sar_df[
-            'delta'] = sar_df['expected_tx_power'] - sar_df['measured_tx_power']
-
-        self.log.info('Sweep results processed')
-
-        results_file_path = os.path.join(self.log_path, self.current_test_name)
-        sar_df.to_csv('{}.csv'.format(results_file_path))
-        self.save_sar_plot(sar_df)
-
-        return sar_df
-
-    def process_results(self, sar_df, type='EDR'):
-        """Determines the test results of the sweep.
-
-         Parses the processed table with computed BT TX power values
-         to return pass or fail.
-
-        Args:
-             sar_df: processed BT SAR table
-        """
-        if self.sar_version_2:
-            breach_error_result = (
-                sar_df['expected_tx_power'] + self.sar_margin[type] >
-                sar_df['measured_tx_power']).all()
-            if not breach_error_result:
-                asserts.fail('Measured TX power exceeds expected')
-
-        else:
-            # checks for errors at particular points in the sweep
-            max_error_result = abs(
-                sar_df['delta']) > self.max_error_threshold[type]
-            if max_error_result:
-                asserts.fail('Maximum Error Threshold Exceeded')
-
-            # checks for error accumulation across the sweep
-            if sar_df['delta'].sum() > self.agg_error_threshold[type]:
-                asserts.fail(
-                    'Aggregate Error Threshold Exceeded. Error: {} Threshold: {}'.
-                    format(sar_df['delta'].sum(), self.agg_error_threshold))
-
-        asserts.explicit_pass('Measured and Expected Power Values in line')
-
-    def set_sar_state(self, ad, signal_dict, country_code='us'):
-        """Sets the SAR state corresponding to the BT SAR signal.
-
-        The SAR state is forced using an adb command that takes
-        device signals as input.
-
-        Args:
-            ad: android_device object.
-            signal_dict: dict of BT SAR signals read from the SAR file.
-        Returns:
-            enforced_state: dict of device signals.
-        """
-        signal_dict = {k: max(int(v), 0) for (k, v) in signal_dict.items()}
-        signal_dict["Wifi"] = signal_dict['WIFI5Ghz']
-        signal_dict['WIFI2Ghz'] = 0 if signal_dict['WIFI5Ghz'] else 1
-
-        device_state_dict = {
-            ('Earpiece', 'earpiece'): signal_dict['Head'],
-            ('Wifi', 'wifi'): signal_dict['WIFI5Ghz'],
-            ('Wifi 2.4G', 'wifi_24g'): signal_dict['WIFI2Ghz'],
-            ('Voice', 'voice'): 0,
-            ('Wifi AP', 'wifi_ap'): signal_dict['HotspotVoice'],
-            ('Bluetooth', 'bluetooth'): 1,
-            ('Bluetooth media', 'bt_media'): signal_dict['BTMedia'],
-            ('Radio', 'radio_power'): signal_dict['Cell'],
-            ('Motion', 'motion'): signal_dict['IMU'],
-            ('Bluetooth connected', 'bt_connected'): 1
-        }
-
-        if 'BTHotspot' in signal_dict.keys():
-            device_state_dict[('Bluetooth tethering',
-                               'bt_tethering')] = signal_dict['BTHotspot']
-
-        enforced_state = {}
-        sar_state_command = FORCE_SAR_ADB_COMMAND
-        for key in device_state_dict:
-            enforced_state[key[0]] = device_state_dict[key]
-            sar_state_command = '{} --ei {} {}'.format(
-                sar_state_command, key[1], device_state_dict[key])
-        if self.sar_version_2:
-            sar_state_command = '{} --es country_iso "{}"'.format(
-                sar_state_command, country_code.lower())
-
-        #Forcing the SAR state
-        adb_output = ad.adb.shell(sar_state_command)
-
-        # Checking if command was successfully enforced
-        if 'result=0' in adb_output:
-            self.log.info('Requested BT SAR state successfully enforced.')
-            return enforced_state
-        else:
-            self.log.error("Couldn't force BT SAR state.")
-
-    def parse_bt_logs(self, ad, begin_time, regex=''):
-        """Returns bt software stats by parsing logcat since begin_time.
-
-        The quantity to be fetched is dictated by the regex provided.
-
-        Args:
-             ad: android_device object.
-             begin_time: time stamp to start the logcat parsing.
-             regex: regex for fetching the required BT software stats.
-
-        Returns:
-             stat: the desired BT stat.
-        """
-        # Waiting for logcat to update
-        time.sleep(SLEEP_DURATION)
-        bt_adb_log = ad.adb.logcat('-b all -t %s' % begin_time)
-        for line in bt_adb_log.splitlines():
-            if re.findall(regex, line):
-                stat = re.findall(regex, line)[0]
-                return stat
-
-    def set_country_code(self, ad, cc):
-        """Sets the SAR regulatory domain as per given country code
-
-        The SAR regulatory domain is forced using an adb command that takes
-        country code as input.
-
-        Args:
-            ad: android_device object.
-            cc: country code
-        """
-
-        ad.adb.shell("{} --es country_iso {}".format(FORCE_SAR_ADB_COMMAND,
-                                                     cc))
-        self.log.info("Country Code set to {}".format(cc))
-
-    def get_country_code(self, ad, begin_time):
-        """Returns the enforced regulatory domain since begin_time
-
-        Returns enforced regulatory domain since begin_time by parsing logcat.
-        Function should follow a function call to set a country code
-
-        Args:
-            ad : android_device obj
-            begin_time: time stamp to start
-
-        Returns:
-            read enforced regulatory domain
-        """
-
-        reg_domain_regex = "updateRegulatoryDomain:\s+(\S+)"
-        reg_domain = self.parse_bt_logs(ad, begin_time, reg_domain_regex)
-        return reg_domain
-
-    def get_current_power_cap(self, ad, begin_time, type='EDR'):
-        """ Returns the enforced software EDR power cap since begin_time.
-
-        Returns the enforced EDR power cap since begin_time by parsing logcat.
-        Function should follow a function call that forces a SAR state
-
-        Args:
-            ad: android_device obj.
-            begin_time: time stamp to start.
-
-        Returns:
-            read enforced power cap
-        """
-        power_cap_regex_dict = {
-            'BDR': [
-                'Bluetooth powers: BR:\s+(\d+), EDR:\s+\d+',
-                'Bluetooth Tx Power Cap\s+(\d+)'
-            ],
-            'EDR': [
-                'Bluetooth powers: BR:\s+\d+, EDR:\s+(\d+)',
-                'Bluetooth Tx Power Cap\s+(\d+)'
-            ],
-            'BLE': [
-                'Bluetooth powers: BR:\s+\d+, EDR:\s+\d+, BLE:\s+(\d+)',
-                'Bluetooth Tx Power Cap\s+(\d+)'
-            ]
-        }
-
-        power_cap_regex_list = power_cap_regex_dict[type]
-
-        for power_cap_regex in power_cap_regex_list:
-            power_cap = self.parse_bt_logs(ad, begin_time, power_cap_regex)
-            if power_cap:
-                return int(power_cap)
-
-        raise ValueError('Failed to get TX power cap')
-
-    def get_current_device_state(self, ad, begin_time):
-        """ Returns the device state of the android dut since begin_time.
-
-        Returns the device state of the android dut by parsing logcat since
-        begin_time. Function should follow a function call that forces
-        a SAR state.
-
-        Args:
-            ad: android_device obj.
-            begin_time: time stamp to start.
-
-        Returns:
-            device_state: device state of the android device.
-        """
-
-        device_state_regex = 'updateDeviceState: DeviceState: ([\s*\S+\s]+)'
-        time.sleep(SLEEP_DURATION)
-        device_state = self.parse_bt_logs(ad, begin_time, device_state_regex)
-        if device_state:
-            return device_state
-
-        raise ValueError("Couldn't fetch device state")
-
-    def read_sar_table(self, ad, output_path=''):
-        """Extracts the BT SAR table from the phone.
-
-        Extracts the BT SAR table from the phone into the android device
-        log path directory.
-
-        Args:
-            ad: android_device object.
-            output_path: path to custom sar table
-        Returns:
-            df : BT SAR table (as pandas DataFrame).
-        """
-        if not output_path:
-            output_path = os.path.join(ad.device_log_path, self.sar_file_name)
-            ad.adb.pull('{} {}'.format(self.sar_file_path, output_path))
-
-        df = pd.read_csv(output_path)
-        self.log.info('BT SAR table read from the phone')
-        return df
-
-    def push_table(self, ad, src_path, dest_path=''):
-        """Pushes a BT SAR table to the phone.
-
-        Pushes a BT SAR table to the android device and reboots the device.
-        Also creates a backup file if backup flag is True.
-
-        Args:
-            ad: android_device object.
-            src_path: path to the  BT SAR table.
-        """
-        #Copying the to-be-pushed file for logging
-        if os.path.dirname(src_path) != ad.device_log_path:
-            job.run('cp {} {}'.format(src_path, ad.device_log_path))
-
-        #Pushing the file provided in the config
-        if dest_path:
-            ad.push_system_file(src_path, dest_path)
-        else:
-            ad.push_system_file(src_path, self.sar_file_path)
-        self.log.info('BT SAR table pushed')
-        ad.reboot()
-
-        self.bt_sar_df = self.read_sar_table(self.dut, src_path)
-
-    def set_PL10_atten_level(self, ad):
-        """Finds the attenuation level at which the phone is at PL10
-
-        Finds PL10 attenuation level by sweeping the attenuation range.
-        If the power level is not achieved during sweep,
-        returns the max atten level
-
-        Args:
-            ad: android object class
-        Returns:
-            atten : attenuation level when the phone is at PL10
-        """
-        BT_SAR_ATTEN_STEP = 3
-
-        for atten in range(self.atten_min, self.atten_max, BT_SAR_ATTEN_STEP):
-            self.attenuator.set_atten(atten)
-            # Sleep required for BQR to reflect the change in parameters
-            time.sleep(SLEEP_DURATION)
-            metrics = bt_utils.get_bt_metric(ad)
-            if metrics['pwlv'][ad.serial] == 10:
-                self.log.info(
-                    'PL10 located at {}'.format(atten + BT_SAR_ATTEN_STEP))
-                return atten + BT_SAR_ATTEN_STEP
-
-        self.log.warn(
-            "PL10 couldn't be located in the given attenuation range")
diff --git a/src/antlion/test_utils/bt/GattConnectedBaseTest.py b/src/antlion/test_utils/bt/GattConnectedBaseTest.py
deleted file mode 100644
index c131530..0000000
--- a/src/antlion/test_utils/bt/GattConnectedBaseTest.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is base class for tests that exercises different GATT procedures between two connected devices.
-Setup/Teardown methods take care of establishing connection, and doing GATT DB initialization/discovery.
-"""
-
-from queue import Empty
-
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import gatt_event
-from antlion.test_utils.bt.bt_constants import gatt_cb_err
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_mtu_size
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import orchestrate_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_characteristics
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_descriptors
-from antlion.test_utils.bt.bt_constants import gatt_char_desc_uuids
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-
-
-class GattConnectedBaseTest(BluetoothBaseTest):
-
-    TEST_SERVICE_UUID = "3846D7A0-69C8-11E4-BA00-0002A5D5C51B"
-    READABLE_CHAR_UUID = "21c0a0bf-ad51-4a2d-8124-b74003e4e8c8"
-    READABLE_DESC_UUID = "aa7edd5a-4d1d-4f0e-883a-d145616a1630"
-    WRITABLE_CHAR_UUID = "aa7edd5a-4d1d-4f0e-883a-d145616a1630"
-    WRITABLE_DESC_UUID = "76d5ed92-ca81-4edb-bb6b-9f019665fb32"
-    NOTIFIABLE_CHAR_UUID = "b2c83efa-34ca-11e6-ac61-9e71128cae77"
-
-    def setup_class(self):
-        super().setup_class()
-        self.cen_ad = self.android_devices[0]
-        self.per_ad = self.android_devices[1]
-
-    def setup_test(self):
-        super(GattConnectedBaseTest, self).setup_test()
-
-        self.gatt_server_callback, self.gatt_server = self._setup_multiple_services(
-        )
-        if not self.gatt_server_callback or not self.gatt_server:
-            raise AssertionError('Service setup failed')
-
-        self.bluetooth_gatt, self.gatt_callback, self.adv_callback = (
-            orchestrate_gatt_connection(self.cen_ad, self.per_ad))
-        self.per_ad.droid.bleStopBleAdvertising(self.adv_callback)
-
-        self.mtu = gatt_mtu_size['min']
-
-        if self.cen_ad.droid.gattClientDiscoverServices(self.bluetooth_gatt):
-            event = self._client_wait(gatt_event['gatt_serv_disc'])
-            self.discovered_services_index = event['data']['ServicesIndex']
-        services_count = self.cen_ad.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        self.test_service_index = None
-        for i in range(services_count):
-            disc_service_uuid = (
-                self.cen_ad.droid.gattClientGetDiscoveredServiceUuid(
-                    self.discovered_services_index, i).upper())
-            if disc_service_uuid == self.TEST_SERVICE_UUID:
-                self.test_service_index = i
-                break
-
-        if not self.test_service_index:
-            print("Service not found")
-            return False
-
-        connected_device_list = self.per_ad.droid.gattServerGetConnectedDevices(
-            self.gatt_server)
-        if len(connected_device_list) == 0:
-            self.log.info("No devices connected from peripheral.")
-            return False
-
-        return True
-
-    def teardown_test(self):
-        self.per_ad.droid.gattServerClearServices(self.gatt_server)
-        self.per_ad.droid.gattServerClose(self.gatt_server)
-
-        del self.gatt_server_callback
-        del self.gatt_server
-
-        self._orchestrate_gatt_disconnection(self.bluetooth_gatt,
-                                             self.gatt_callback)
-
-        return super(GattConnectedBaseTest, self).teardown_test()
-
-    def _server_wait(self, gatt_event):
-        return self._timed_pop(gatt_event, self.per_ad,
-                               self.gatt_server_callback)
-
-    def _client_wait(self, gatt_event):
-        return self._timed_pop(gatt_event, self.cen_ad, self.gatt_callback)
-
-    def _timed_pop(self, gatt_event, droid, gatt_callback):
-        expected_event = gatt_event["evt"].format(gatt_callback)
-        try:
-            return droid.ed.pop_event(expected_event, bt_default_timeout)
-        except Empty as emp:
-            raise AssertionError(gatt_event["err"].format(expected_event))
-
-    def _setup_characteristics_and_descriptors(self, droid):
-        characteristic_input = [
-            {
-                'uuid': self.WRITABLE_CHAR_UUID,
-                'property': gatt_characteristic['property_write'] |
-                gatt_characteristic['property_write_no_response'],
-                'permission': gatt_characteristic['permission_write']
-            },
-            {
-                'uuid': self.READABLE_CHAR_UUID,
-                'property': gatt_characteristic['property_read'],
-                'permission': gatt_characteristic['permission_read']
-            },
-            {
-                'uuid': self.NOTIFIABLE_CHAR_UUID,
-                'property': gatt_characteristic['property_notify'] |
-                gatt_characteristic['property_indicate'],
-                'permission': gatt_characteristic['permission_read']
-            },
-        ]
-        descriptor_input = [{
-            'uuid': self.WRITABLE_DESC_UUID,
-            'property': gatt_descriptor['permission_read'] |
-            gatt_characteristic['permission_write'],
-        }, {
-            'uuid': self.READABLE_DESC_UUID,
-            'property': gatt_descriptor['permission_read'] |
-            gatt_descriptor['permission_write'],
-        }, {
-            'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            'property': gatt_descriptor['permission_read'] |
-            gatt_descriptor['permission_write'],
-        }]
-        characteristic_list = setup_gatt_characteristics(droid,
-                                                         characteristic_input)
-        self.notifiable_char_index = characteristic_list[2]
-        descriptor_list = setup_gatt_descriptors(droid, descriptor_input)
-        return characteristic_list, descriptor_list
-
-    def _orchestrate_gatt_disconnection(self, bluetooth_gatt, gatt_callback):
-        self.log.info("Disconnecting from peripheral device.")
-        try:
-            disconnect_gatt_connection(self.cen_ad, bluetooth_gatt,
-                                       gatt_callback)
-        except GattTestUtilsError as err:
-            log.error(err)
-            return False
-        self.cen_ad.droid.gattClientClose(bluetooth_gatt)
-        return True
-
-    def _find_service_added_event(self, gatt_server_callback, uuid):
-        expected_event = gatt_cb_strings['serv_added'].format(
-            gatt_server_callback)
-        try:
-            event = self.per_ad.ed.pop_event(expected_event,
-                                             bt_default_timeout)
-        except Empty:
-            self.log.error(gatt_cb_err['serv_added_err'].format(
-                expected_event))
-            return False
-        if event['data']['serviceUuid'].lower() != uuid.lower():
-            self.log.error("Uuid mismatch. Found: {}, Expected {}.".format(
-                event['data']['serviceUuid'], uuid))
-            return False
-        return True
-
-    def _setup_multiple_services(self):
-        gatt_server_callback = (
-            self.per_ad.droid.gattServerCreateGattServerCallback())
-        gatt_server = self.per_ad.droid.gattServerOpenGattServer(
-            gatt_server_callback)
-        characteristic_list, descriptor_list = (
-            self._setup_characteristics_and_descriptors(self.per_ad.droid))
-        self.per_ad.droid.gattServerCharacteristicAddDescriptor(
-            characteristic_list[0], descriptor_list[0])
-        self.per_ad.droid.gattServerCharacteristicAddDescriptor(
-            characteristic_list[1], descriptor_list[1])
-        self.per_ad.droid.gattServerCharacteristicAddDescriptor(
-            characteristic_list[2], descriptor_list[2])
-        gatt_service3 = self.per_ad.droid.gattServerCreateService(
-            self.TEST_SERVICE_UUID, gatt_service_types['primary'])
-        for characteristic in characteristic_list:
-            self.per_ad.droid.gattServerAddCharacteristicToService(
-                gatt_service3, characteristic)
-        self.per_ad.droid.gattServerAddService(gatt_server, gatt_service3)
-        result = self._find_service_added_event(gatt_server_callback,
-                                                self.TEST_SERVICE_UUID)
-        if not result:
-            return False, False
-        return gatt_server_callback, gatt_server
-
-    def assertEqual(self, first, second, msg=None):
-        if not first == second:
-            if not msg:
-                raise AssertionError('%r != %r' % (first, second))
-            else:
-                raise AssertionError(msg + ' %r != %r' % (first, second))
diff --git a/src/antlion/test_utils/bt/GattEnum.py b/src/antlion/test_utils/bt/GattEnum.py
deleted file mode 100644
index 4b4c171..0000000
--- a/src/antlion/test_utils/bt/GattEnum.py
+++ /dev/null
@@ -1,304 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import Enum
-from enum import IntEnum
-
-
-class GattCbErr(Enum):
-    CHAR_WRITE_REQ_ERR = "Characteristic Write Request event not found. Expected {}"
-    CHAR_WRITE_ERR = "Characteristic Write event not found. Expected {}"
-    DESC_WRITE_REQ_ERR = "Descriptor Write Request event not found. Expected {}"
-    DESC_WRITE_ERR = "Descriptor Write event not found. Expected {}"
-    CHAR_READ_ERR = "Characteristic Read event not found. Expected {}"
-    CHAR_READ_REQ_ERR = "Characteristic Read Request not found. Expected {}"
-    DESC_READ_ERR = "Descriptor Read event not found. Expected {}"
-    DESC_READ_REQ_ERR = "Descriptor Read Request event not found. Expected {}"
-    RD_REMOTE_RSSI_ERR = "Read Remote RSSI event not found. Expected {}"
-    GATT_SERV_DISC_ERR = "GATT Services Discovered event not found. Expected {}"
-    SERV_ADDED_ERR = "Service Added event not found. Expected {}"
-    MTU_CHANGED_ERR = "MTU Changed event not found. Expected {}"
-    MTU_SERV_CHANGED_ERR = "MTU Server Changed event not found. Expected {}"
-    GATT_CONN_CHANGE_ERR = "GATT Connection Changed event not found. Expected {}"
-    CHAR_CHANGE_ERR = "GATT Characteristic Changed event not fond. Expected {}"
-    PHY_READ_ERR = "Phy Read event not fond. Expected {}"
-    PHY_UPDATE_ERR = "Phy Update event not fond. Expected {}"
-    EXEC_WRITE_ERR = "GATT Execute Write event not found. Expected {}"
-
-
-class GattCbStrings(Enum):
-    CHAR_WRITE_REQ = "GattServer{}onCharacteristicWriteRequest"
-    EXEC_WRITE = "GattServer{}onExecuteWrite"
-    CHAR_WRITE = "GattConnect{}onCharacteristicWrite"
-    DESC_WRITE_REQ = "GattServer{}onDescriptorWriteRequest"
-    DESC_WRITE = "GattConnect{}onDescriptorWrite"
-    CHAR_READ = "GattConnect{}onCharacteristicRead"
-    CHAR_READ_REQ = "GattServer{}onCharacteristicReadRequest"
-    DESC_READ = "GattConnect{}onDescriptorRead"
-    DESC_READ_REQ = "GattServer{}onDescriptorReadRequest"
-    RD_REMOTE_RSSI = "GattConnect{}onReadRemoteRssi"
-    GATT_SERV_DISC = "GattConnect{}onServicesDiscovered"
-    SERV_ADDED = "GattServer{}onServiceAdded"
-    MTU_CHANGED = "GattConnect{}onMtuChanged"
-    MTU_SERV_CHANGED = "GattServer{}onMtuChanged"
-    GATT_CONN_CHANGE = "GattConnect{}onConnectionStateChange"
-    CHAR_CHANGE = "GattConnect{}onCharacteristicChanged"
-    PHY_READ = "GattConnect{}onPhyRead"
-    PHY_UPDATE = "GattConnect{}onPhyUpdate"
-    SERV_PHY_READ = "GattServer{}onPhyRead"
-    SERV_PHY_UPDATE = "GattServer{}onPhyUpdate"
-
-
-class GattEvent(Enum):
-    CHAR_WRITE_REQ = {
-        "evt": GattCbStrings.CHAR_WRITE_REQ.value,
-        "err": GattCbErr.CHAR_WRITE_REQ_ERR.value
-    }
-    EXEC_WRITE = {
-        "evt": GattCbStrings.EXEC_WRITE.value,
-        "err": GattCbErr.EXEC_WRITE_ERR.value
-    }
-    CHAR_WRITE = {
-        "evt": GattCbStrings.CHAR_WRITE.value,
-        "err": GattCbErr.CHAR_WRITE_ERR.value
-    }
-    DESC_WRITE_REQ = {
-        "evt": GattCbStrings.DESC_WRITE_REQ.value,
-        "err": GattCbErr.DESC_WRITE_REQ_ERR.value
-    }
-    DESC_WRITE = {
-        "evt": GattCbStrings.DESC_WRITE.value,
-        "err": GattCbErr.DESC_WRITE_ERR.value
-    }
-    CHAR_READ = {
-        "evt": GattCbStrings.CHAR_READ.value,
-        "err": GattCbErr.CHAR_READ_ERR.value
-    }
-    CHAR_READ_REQ = {
-        "evt": GattCbStrings.CHAR_READ_REQ.value,
-        "err": GattCbErr.CHAR_READ_REQ_ERR.value
-    }
-    DESC_READ = {
-        "evt": GattCbStrings.DESC_READ.value,
-        "err": GattCbErr.DESC_READ_ERR.value
-    }
-    DESC_READ_REQ = {
-        "evt": GattCbStrings.DESC_READ_REQ.value,
-        "err": GattCbErr.DESC_READ_REQ_ERR.value
-    }
-    RD_REMOTE_RSSI = {
-        "evt": GattCbStrings.RD_REMOTE_RSSI.value,
-        "err": GattCbErr.RD_REMOTE_RSSI_ERR.value
-    }
-    GATT_SERV_DISC = {
-        "evt": GattCbStrings.GATT_SERV_DISC.value,
-        "err": GattCbErr.GATT_SERV_DISC_ERR.value
-    }
-    SERV_ADDED = {
-        "evt": GattCbStrings.SERV_ADDED.value,
-        "err": GattCbErr.SERV_ADDED_ERR.value
-    }
-    MTU_CHANGED = {
-        "evt": GattCbStrings.MTU_CHANGED.value,
-        "err": GattCbErr.MTU_CHANGED_ERR.value
-    }
-    GATT_CONN_CHANGE = {
-        "evt": GattCbStrings.GATT_CONN_CHANGE.value,
-        "err": GattCbErr.GATT_CONN_CHANGE_ERR.value
-    }
-    CHAR_CHANGE = {
-        "evt": GattCbStrings.CHAR_CHANGE.value,
-        "err": GattCbErr.CHAR_CHANGE_ERR.value
-    }
-    PHY_READ = {
-        "evt": GattCbStrings.PHY_READ.value,
-        "err": GattCbErr.PHY_READ_ERR.value
-    }
-    PHY_UPDATE = {
-        "evt": GattCbStrings.PHY_UPDATE.value,
-        "err": GattCbErr.PHY_UPDATE_ERR.value
-    }
-    SERV_PHY_READ = {
-        "evt": GattCbStrings.SERV_PHY_READ.value,
-        "err": GattCbErr.PHY_READ_ERR.value
-    }
-    SERV_PHY_UPDATE = {
-        "evt": GattCbStrings.SERV_PHY_UPDATE.value,
-        "err": GattCbErr.PHY_UPDATE_ERR.value
-    }
-
-
-class GattConnectionState(IntEnum):
-    STATE_DISCONNECTED = 0
-    STATE_CONNECTING = 1
-    STATE_CONNECTED = 2
-    STATE_DISCONNECTING = 3
-
-
-class GattCharacteristic(Enum):
-    PROPERTY_BROADCAST = 0x01
-    PROPERTY_READ = 0x02
-    PROPERTY_WRITE_NO_RESPONSE = 0x04
-    PROPERTY_WRITE = 0x08
-    PROPERTY_NOTIFY = 0x10
-    PROPERTY_INDICATE = 0x20
-    PROPERTY_SIGNED_WRITE = 0x40
-    PROPERTY_EXTENDED_PROPS = 0x80
-    PERMISSION_READ = 0x01
-    PERMISSION_READ_ENCRYPTED = 0x02
-    PERMISSION_READ_ENCRYPTED_MITM = 0x04
-    PERMISSION_WRITE = 0x10
-    PERMISSION_WRITE_ENCRYPTED = 0x20
-    PERMISSION_WRITE_ENCRYPTED_MITM = 0x40
-    PERMISSION_WRITE_SIGNED = 0x80
-    PERMISSION_WRITE_SIGNED_MITM = 0x100
-    WRITE_TYPE_DEFAULT = 0x02
-    WRITE_TYPE_NO_RESPONSE = 0x01
-    WRITE_TYPE_SIGNED = 0x04
-    FORMAT_UINT8 = 0x11
-    FORMAT_UINT16 = 0x12
-    FORMAT_UINT32 = 0x14
-    FORMAT_SINT8 = 0x21
-    FORMAT_SINT16 = 0x22
-    FORMAT_SINT32 = 0x24
-    FORMAT_SFLOAT = 0x32
-    FORMAT_FLOAT = 0x34
-
-
-class GattDescriptor(Enum):
-    ENABLE_NOTIFICATION_VALUE = [0x01, 0x00]
-    ENABLE_INDICATION_VALUE = [0x02, 0x00]
-    DISABLE_NOTIFICATION_VALUE = [0x00, 0x00]
-    PERMISSION_READ = 0x01
-    PERMISSION_READ_ENCRYPTED = 0x02
-    PERMISSION_READ_ENCRYPTED_MITM = 0x04
-    PERMISSION_WRITE = 0x10
-    PERMISSION_WRITE_ENCRYPTED = 0x20
-    PERMISSION_WRITE_ENCRYPTED_MITM = 0x40
-    PERMISSION_WRITE_SIGNED = 0x80
-    PERMISSION_WRITE_SIGNED_MITM = 0x100
-
-
-class GattCharDesc(Enum):
-    GATT_CHARAC_EXT_PROPER_UUID = '00002900-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_USER_DESC_UUID = '00002901-0000-1000-8000-00805f9b34fb'
-    GATT_CLIENT_CHARAC_CFG_UUID = '00002902-0000-1000-8000-00805f9b34fb'
-    GATT_SERVER_CHARAC_CFG_UUID = '00002903-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_FMT_UUID = '00002904-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_AGREG_FMT_UUID = '00002905-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_VALID_RANGE_UUID = '00002906-0000-1000-8000-00805f9b34fb'
-    GATT_EXTERNAL_REPORT_REFERENCE = '00002907-0000-1000-8000-00805f9b34fb'
-    GATT_REPORT_REFERENCE = '00002908-0000-1000-8000-00805f9b34fb'
-
-
-class GattCharTypes(Enum):
-    GATT_CHARAC_DEVICE_NAME = '00002a00-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_APPEARANCE = '00002a01-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_PERIPHERAL_PRIV_FLAG = '00002a02-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_RECONNECTION_ADDRESS = '00002a03-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_PERIPHERAL_PREF_CONN = '00002a04-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_SERVICE_CHANGED = '00002a05-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_SYSTEM_ID = '00002a23-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_MODEL_NUMBER_STRING = '00002a24-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_SERIAL_NUMBER_STRING = '00002a25-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_FIRMWARE_REVISION_STRING = '00002a26-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_HARDWARE_REVISION_STRING = '00002a27-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_SOFTWARE_REVISION_STRING = '00002a28-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_MANUFACTURER_NAME_STRING = '00002a29-0000-1000-8000-00805f9b34fb'
-    GATT_CHARAC_PNP_ID = '00002a50-0000-1000-8000-00805f9b34fb'
-
-
-class GattCharacteristicAttrLength(Enum):
-    MTU_ATTR_1 = 1
-    MTU_ATTR_2 = 3
-    MTU_ATTR_3 = 15
-
-
-class CharacteristicValueFormat(Enum):
-    STRING = 0x1
-    BYTE = 0x2
-    FORMAT_SINT8 = 0x21
-    FORMAT_UINT8 = 0x11
-    FORMAT_SINT16 = 0x22
-    FORMAT_UINT16 = 0x12
-    FORMAT_SINT32 = 0x24
-    FORMAT_UINT32 = 0x14
-
-
-class GattService(IntEnum):
-    SERVICE_TYPE_PRIMARY = 0
-    SERVICE_TYPE_SECONDARY = 1
-
-
-class GattConnectionPriority(IntEnum):
-    CONNECTION_PRIORITY_BALANCED = 0
-    CONNECTION_PRIORITY_HIGH = 1
-    CONNECTION_PRIORITY_LOW_POWER = 2
-
-
-class MtuSize(IntEnum):
-    MIN = 23
-    MAX = 217
-
-
-class GattCharacteristicAttrLength(IntEnum):
-    MTU_ATTR_1 = 1
-    MTU_ATTR_2 = 3
-    MTU_ATTR_3 = 15
-
-
-class BluetoothGatt(Enum):
-    GATT_SUCCESS = 0
-    GATT_FAILURE = 0x101
-
-
-class GattTransport(IntEnum):
-    TRANSPORT_AUTO = 0x00
-    TRANSPORT_BREDR = 0x01
-    TRANSPORT_LE = 0x02
-
-
-class GattPhy(IntEnum):
-    PHY_LE_1M = 1
-    PHY_LE_2M = 2
-    PHY_LE_CODED = 3
-
-
-class GattPhyMask(IntEnum):
-    PHY_LE_1M_MASK = 1
-    PHY_LE_2M_MASK = 2
-    PHY_LE_CODED_MASK = 4
-
-
-# TODO Decide whether to continue with Enums or move to dictionaries
-GattServerResponses = {
-    "GATT_SUCCESS": 0x0,
-    "GATT_FAILURE": 0x1,
-    "GATT_READ_NOT_PERMITTED": 0x2,
-    "GATT_WRITE_NOT_PERMITTED": 0x3,
-    "GATT_INVALID_PDU": 0x4,
-    "GATT_INSUFFICIENT_AUTHENTICATION": 0x5,
-    "GATT_REQUEST_NOT_SUPPORTED": 0x6,
-    "GATT_INVALID_OFFSET": 0x7,
-    "GATT_INSUFFICIENT_AUTHORIZATION": 0x8,
-    "GATT_INVALID_ATTRIBUTE_LENGTH": 0xD,
-    "GATT_INSUFFICIENT_ENCRYPTION": 0xF,
-    "GATT_CONNECTION_CONGESTED": 0x8F,
-    "GATT_13_ERR": 0x13,
-    "GATT_12_ERR": 0x12,
-    "GATT_0C_ERR": 0x0C,
-    "GATT_16": 0x16
-}
diff --git a/src/antlion/test_utils/bt/__init__.py b/src/antlion/test_utils/bt/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/bt/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/bt/ble_lib.py b/src/antlion/test_utils/bt/ble_lib.py
deleted file mode 100644
index ee2cb1c..0000000
--- a/src/antlion/test_utils/bt/ble_lib.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Ble libraries
-"""
-
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_modes
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_tx_powers
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import small_timeout
-from antlion.test_utils.bt.bt_constants import adv_fail
-from antlion.test_utils.bt.bt_constants import adv_succ
-from antlion.test_utils.bt.bt_constants import advertising_set_on_own_address_read
-from antlion.test_utils.bt.bt_constants import advertising_set_started
-from antlion.test_utils.bt.bt_test_utils import generate_ble_advertise_objects
-
-import time
-
-
-class BleLib():
-    def __init__(self, log, dut):
-        self.advertisement_list = []
-        self.dut = dut
-        self.log = log
-        self.default_timeout = 5
-        self.set_advertisement_list = []
-        self.generic_uuid = "0000{}-0000-1000-8000-00805f9b34fb"
-
-    def _verify_ble_adv_started(self, advertise_callback):
-        """Helper for verifying if an advertisment started or not"""
-        regex = "({}|{})".format(adv_succ.format(advertise_callback),
-                                 adv_fail.format(advertise_callback))
-        try:
-            event = self.dut.ed.pop_events(regex, 5, small_timeout)
-        except Empty:
-            self.dut.log.error("Failed to get success or failed event.")
-            return
-        if event[0]["name"] == adv_succ.format(advertise_callback):
-            self.dut.log.info("Advertisement started successfully.")
-            return True
-        else:
-            self.dut.log.info("Advertisement failed to start.")
-            return False
-
-    def start_generic_connectable_advertisement(self, line):
-        """Start a connectable LE advertisement"""
-        scan_response = None
-        if line:
-            scan_response = bool(line)
-        self.dut.droid.bleSetAdvertiseSettingsAdvertiseMode(
-            ble_advertise_settings_modes['low_latency'])
-        self.dut.droid.bleSetAdvertiseSettingsIsConnectable(True)
-        advertise_callback, advertise_data, advertise_settings = (
-            generate_ble_advertise_objects(self.dut.droid))
-        if scan_response:
-            self.dut.droid.bleStartBleAdvertisingWithScanResponse(
-                advertise_callback, advertise_data, advertise_settings,
-                advertise_data)
-        else:
-            self.dut.droid.bleStartBleAdvertising(advertise_callback,
-                                                  advertise_data,
-                                                  advertise_settings)
-        if self._verify_ble_adv_started(advertise_callback):
-            self.log.info(
-                "Tracking Callback ID: {}".format(advertise_callback))
-            self.advertisement_list.append(advertise_callback)
-            self.log.info(self.advertisement_list)
-
-    def start_connectable_advertisement_set(self, line):
-        """Start Connectable Advertisement Set"""
-        adv_callback = self.dut.droid.bleAdvSetGenCallback()
-        adv_data = {
-            "includeDeviceName": True,
-        }
-        self.dut.droid.bleAdvSetStartAdvertisingSet(
-            {
-                "connectable": True,
-                "legacyMode": False,
-                "primaryPhy": "PHY_LE_1M",
-                "secondaryPhy": "PHY_LE_1M",
-                "interval": 320
-            }, adv_data, None, None, None, 0, 0, adv_callback)
-        evt = self.dut.ed.pop_event(
-            advertising_set_started.format(adv_callback), self.default_timeout)
-        set_id = evt['data']['setId']
-        self.log.error("did not receive the set started event!")
-        evt = self.dut.ed.pop_event(
-            advertising_set_on_own_address_read.format(set_id),
-            self.default_timeout)
-        address = evt['data']['address']
-        self.log.info("Advertiser address is: {}".format(str(address)))
-        self.set_advertisement_list.append(adv_callback)
-
-    def stop_all_advertisement_set(self, line):
-        """Stop all Advertisement Sets"""
-        for adv in self.set_advertisement_list:
-            try:
-                self.dut.droid.bleAdvSetStopAdvertisingSet(adv)
-            except Exception as err:
-                self.log.error("Failed to stop advertisement: {}".format(err))
-
-    def adv_add_service_uuid_list(self, line):
-        """Add service UUID to the LE advertisement inputs:
-         [uuid1 uuid2 ... uuidN]"""
-        uuids = line.split()
-        uuid_list = []
-        for uuid in uuids:
-            if len(uuid) == 4:
-                uuid = self.generic_uuid.format(line)
-            uuid_list.append(uuid)
-        self.dut.droid.bleSetAdvertiseDataSetServiceUuids(uuid_list)
-
-    def adv_data_include_local_name(self, is_included):
-        """Include local name in the advertisement. inputs: [true|false]"""
-        self.dut.droid.bleSetAdvertiseDataIncludeDeviceName(bool(is_included))
-
-    def adv_data_include_tx_power_level(self, is_included):
-        """Include tx power level in the advertisement. inputs: [true|false]"""
-        self.dut.droid.bleSetAdvertiseDataIncludeTxPowerLevel(
-            bool(is_included))
-
-    def adv_data_add_manufacturer_data(self, line):
-        """Include manufacturer id and data to the advertisment:
-        [id data1 data2 ... dataN]"""
-        info = line.split()
-        manu_id = int(info[0])
-        manu_data = []
-        for data in info[1:]:
-            manu_data.append(int(data))
-        self.dut.droid.bleAddAdvertiseDataManufacturerId(manu_id, manu_data)
-
-    def start_generic_nonconnectable_advertisement(self, line):
-        """Start a nonconnectable LE advertisement"""
-        self.dut.droid.bleSetAdvertiseSettingsAdvertiseMode(
-            ble_advertise_settings_modes['low_latency'])
-        self.dut.droid.bleSetAdvertiseSettingsIsConnectable(False)
-        advertise_callback, advertise_data, advertise_settings = (
-            generate_ble_advertise_objects(self.dut.droid))
-        self.dut.droid.bleStartBleAdvertising(advertise_callback,
-                                              advertise_data,
-                                              advertise_settings)
-        if self._verify_ble_adv_started(advertise_callback):
-            self.log.info(
-                "Tracking Callback ID: {}".format(advertise_callback))
-            self.advertisement_list.append(advertise_callback)
-            self.log.info(self.advertisement_list)
-
-    def stop_all_advertisements(self, line):
-        """Stop all LE advertisements"""
-        for callback_id in self.advertisement_list:
-            self.log.info("Stopping Advertisement {}".format(callback_id))
-            self.dut.droid.bleStopBleAdvertising(callback_id)
-            time.sleep(1)
-        self.advertisement_list = []
-
-    def ble_stop_advertisement(self, callback_id):
-        """Stop an LE advertisement"""
-        if not callback_id:
-            self.log.info("Need a callback ID")
-            return
-        callback_id = int(callback_id)
-        if callback_id not in self.advertisement_list:
-            self.log.info("Callback not in list of advertisements.")
-            return
-        self.dut.droid.bleStopBleAdvertising(callback_id)
-        self.advertisement_list.remove(callback_id)
-
-    def start_max_advertisements(self, line):
-        scan_response = None
-        if line:
-            scan_response = bool(line)
-        while (True):
-            try:
-                self.dut.droid.bleSetAdvertiseSettingsAdvertiseMode(
-                    ble_advertise_settings_modes['low_latency'])
-                self.dut.droid.bleSetAdvertiseSettingsIsConnectable(True)
-                advertise_callback, advertise_data, advertise_settings = (
-                    generate_ble_advertise_objects(self.dut.droid))
-                if scan_response:
-                    self.dut.droid.bleStartBleAdvertisingWithScanResponse(
-                        advertise_callback, advertise_data, advertise_settings,
-                        advertise_data)
-                else:
-                    self.dut.droid.bleStartBleAdvertising(
-                        advertise_callback, advertise_data, advertise_settings)
-                if self._verify_ble_adv_started(advertise_callback):
-                    self.log.info(
-                        "Tracking Callback ID: {}".format(advertise_callback))
-                    self.advertisement_list.append(advertise_callback)
-                    self.log.info(self.advertisement_list)
-                else:
-                    self.log.info("Advertisements active: {}".format(
-                        len(self.advertisement_list)))
-                    return False
-            except Exception as err:
-                self.log.info("Advertisements active: {}".format(
-                    len(self.advertisement_list)))
-                return True
diff --git a/src/antlion/test_utils/bt/ble_performance_test_utils.py b/src/antlion/test_utils/bt/ble_performance_test_utils.py
deleted file mode 100644
index 29055f6..0000000
--- a/src/antlion/test_utils/bt/ble_performance_test_utils.py
+++ /dev/null
@@ -1,328 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-import datetime
-import statistics
-import os
-from antlion.test_utils.bt.bt_constants import advertising_set_started
-import antlion.test_utils.wifi.wifi_performance_test_utils.bokeh_figure as bokeh_figure
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_phys
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_gatt_utils import close_gatt_client
-from antlion.test_utils.bt.bt_coc_test_utils import do_multi_connection_throughput
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from queue import Empty
-from antlion.test_utils.bt.bt_constants import gatt_cb_err
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import l2cap_coc_header_size
-from antlion.test_utils.bt.bt_gatt_utils import GattTestUtilsError
-from antlion.test_utils.bt.bt_test_utils import generate_ble_scan_objects
-from antlion.test_utils.bt.bt_coc_test_utils import orchestrate_coc_connection
-from antlion.test_utils.bt.bt_gatt_utils import orchestrate_gatt_connection
-from concurrent.futures import ThreadPoolExecutor
-
-default_event_timeout = 10
-rssi_read_duration = 25
-
-
-def establish_ble_connection(client_ad, server_ad):
-    """Function to establish BLE connection between two BLE devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-    Returns:
-        bluetooth_gatt: GATT object
-        gatt_callback: Gatt callback object
-        adv_callback: advertisement callback object
-        gatt_server: the gatt server
-    """
-    gatt_server_cb = server_ad.droid.gattServerCreateGattServerCallback()
-    gatt_server = server_ad.droid.gattServerOpenGattServer(gatt_server_cb)
-    try:
-        bluetooth_gatt, gatt_callback, adv_callback = (
-            orchestrate_gatt_connection(client_ad, server_ad))
-    except GattTestUtilsError as err:
-        logging.error(err)
-        return False
-    return bluetooth_gatt, gatt_callback, adv_callback, gatt_server
-
-
-def read_ble_rssi(client_ad, gatt_server, gatt_callback):
-    """Function to Read BLE RSSI of the remote BLE device.
-    Args:
-        client_ad: the Android device performing the connection.
-        gatt_server: the gatt server
-        gatt_callback:the gatt connection call back object
-    Returns:
-      ble_rssi: RSSI value of the remote BLE device
-    """
-    AVG_RSSI = []
-    end_time = time.time() + rssi_read_duration
-    logging.info("Reading BLE RSSI for {} sec".format(rssi_read_duration))
-    while time.time() < end_time:
-        expected_event = gatt_cb_strings['rd_remote_rssi'].format(
-            gatt_callback)
-        read_rssi = client_ad.droid.gattClientReadRSSI(gatt_server)
-        if read_rssi:
-            try:
-                event = client_ad.ed.pop_event(expected_event,
-                                               default_event_timeout)
-            except Empty:
-                logging.error(
-                    gatt_cb_err['rd_remote_rssi_err'].format(expected_event))
-                return False
-        rssi_value = event['data']['Rssi']
-        AVG_RSSI.append(rssi_value)
-    logging.debug("First & Last reading of RSSI :{:03d} & {:03d}".format(
-        AVG_RSSI[0], AVG_RSSI[-1]))
-    ble_rssi = statistics.mean(AVG_RSSI)
-    ble_rssi = round(ble_rssi, 2)
-
-    return ble_rssi
-
-
-def read_ble_scan_rssi(client_ad, scan_callback, rssi_read_duration=30):
-    """Function to Read BLE RSSI of the remote BLE device.
-    Args:
-        client_ad: the Android device performing the connection.
-        scan_callback: the scan callback of the server
-    Returns:
-      ble_rssi: RSSI value of the remote BLE device
-      raw_rssi: RSSI list of remote BLE device
-    """
-    raw_rssi = []
-    timestamp = []
-    end_time = time.time() + rssi_read_duration
-    logging.info("Reading BLE Scan RSSI for {} sec".format(rssi_read_duration))
-    while time.time() < end_time:
-        expected_event = gatt_cb_strings['rd_remote_ble_rssi'].format(
-            scan_callback)
-        try:
-            event = client_ad.ed.pop_event(expected_event,
-                                           default_event_timeout)
-        except Empty:
-            logging.error(
-                gatt_cb_err['rd_remote_rssi_err'].format(expected_event))
-            return False
-        rssi_value = event['data']['Result']['rssi']
-        epoch_time = event['time']
-        d = datetime.datetime.fromtimestamp(epoch_time / 1000)
-        tstamp = d.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
-        timestamp.append(tstamp)
-        raw_rssi.append(rssi_value)
-    logging.debug("First & Last reading of RSSI :{:03d} & {:03d}".format(
-        raw_rssi[0], raw_rssi[-1]))
-    ble_rssi = statistics.mean(raw_rssi)
-    ble_rssi = round(ble_rssi, 2)
-
-    return ble_rssi, raw_rssi, timestamp
-
-
-def ble_coc_connection(client_ad, server_ad):
-    """Sets up the CoC connection between two Android devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-
-    Returns:
-        True if connection was successful or false if unsuccessful,
-        gatt_callback: GATT callback object
-        client connection ID: Client connection ID
-        and server connection ID : server connection ID
-    """
-    # secured_conn: True if using secured connection
-    # le_connection_interval: LE Connection interval. 0 means use default.
-    # buffer_size : is the number of bytes per L2CAP data buffer
-    # le_tx_data_length: LE Data Length used by BT Controller to transmit.
-    is_secured = False
-    le_connection_interval = 30
-    buffer_size = 240
-    le_tx_data_length = buffer_size + l2cap_coc_header_size
-    gatt_server_cb = server_ad.droid.gattServerCreateGattServerCallback()
-    gatt_server = server_ad.droid.gattServerOpenGattServer(gatt_server_cb)
-
-    logging.info(
-        "orchestrate_ble_coc_connection. is_secured={}, Connection Interval={}msec, "
-        "buffer_size={}bytes".format(is_secured, le_connection_interval,
-                                     buffer_size))
-    try:
-        status, client_conn_id, server_conn_id, bluetooth_gatt, gatt_callback = orchestrate_coc_connection(
-            client_ad,
-            server_ad,
-            True,
-            is_secured,
-            le_connection_interval,
-            le_tx_data_length,
-            gatt_disconnection=False)
-    except Exception as err:
-        logging.info("Failed to esatablish COC connection".format(err))
-        return 0
-    return True, gatt_callback, gatt_server, bluetooth_gatt, client_conn_id
-
-
-def run_ble_throughput(server_ad,
-                       client_conn_id,
-                       client_ad,
-                       num_iterations=30):
-    """Function to measure Throughput from one client to one-or-many servers
-
-    Args:
-        server_ad: the Android device accepting the connection.
-        client_conn_id: the client connection ID.
-        client_ad: the Android device performing the connection.
-        num_iterations: The num_iterations is that number of repetitions of each
-        set of buffers r/w.
-    Returns:
-      data_rate: Throughput in terms of bytes per second, 0 if test failed.
-    """
-    # number_buffers is the total number of data buffers to transmit per
-    # set of buffers r/w.
-    # buffer_size is the number of bytes per L2CAP data buffer.
-    number_buffers = 100
-    buffer_size = 240
-    list_server_ad = [server_ad]
-    list_client_conn_id = [client_conn_id]
-    data_rate = do_multi_connection_throughput(client_ad, list_server_ad,
-                                               list_client_conn_id,
-                                               num_iterations, number_buffers,
-                                               buffer_size)
-    if data_rate <= 0:
-        return False
-    data_rate = data_rate * 8
-    logging.info(
-        "run_ble_coc_connection_throughput: throughput=%d bites per sec",
-        data_rate)
-    return data_rate
-
-
-def run_ble_throughput_and_read_rssi(client_ad, server_ad, client_conn_id,
-                                     gatt_server, gatt_callback):
-    """Function to measure ble rssi while sendinng data from client to server
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-        client_conn_id: the client connection ID.
-        gatt_server: the gatt server
-        gatt_callback: Gatt callback object
-    Returns:
-      ble_rssi: RSSI value of the remote BLE device.
-    """
-    executor = ThreadPoolExecutor(2)
-    ble_throughput = executor.submit(run_ble_throughput, client_ad,
-                                     client_conn_id, server_ad)
-    ble_rssi = executor.submit(read_ble_rssi, server_ad, gatt_server,
-                               gatt_callback)
-    logging.info("BLE RSSI is:{} dBm with data rate={} bites per sec ".format(
-        ble_rssi.result(), ble_throughput.result()))
-    return ble_rssi.result()
-
-
-def ble_gatt_disconnection(client_ad, bluetooth_gatt, gatt_callback):
-    """Function to disconnect GATT connection between client and server.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        bluetooth_gatt: GATT object
-        gatt_callback:the gatt connection call back object
-    Returns:
-      ble_rssi: RSSI value of the remote BLE device
-    """
-    logging.info("Disconnecting from peripheral device.")
-    try:
-        disconnect_gatt_connection(client_ad, bluetooth_gatt, gatt_callback)
-        close_gatt_client(client_ad, bluetooth_gatt)
-    except GattTestUtilsError as err:
-        logging.error(err)
-        return False
-    return True
-
-
-def plot_graph(df, plot_data, bokeh_data, secondary_y_label=None):
-    """ Plotting for generating bokeh figure
-
-    Args:
-        df: Summary of results contains attenuation, DUT RSSI, remote RSSI and Tx Power
-        plot_data: plot_data for adding line to existing BokehFigure
-        bokeh_data: bokeh data for generating BokehFigure
-        secondary_y_label : label for secondary y axis , None if not available
-    """
-    plot = bokeh_figure.BokehFigure(
-        title='{}'.format(bokeh_data['current_test_name']),
-        x_label=bokeh_data['x_label'],
-        primary_y_label=bokeh_data['primary_y_label'],
-        secondary_y_label=secondary_y_label,
-        axis_label_size='16pt',
-        legend_label_size='16pt',
-        axis_tick_label_size='16pt',
-        sizing_mode='stretch_both')
-
-    for data in plot_data:
-        plot.add_line(df[plot_data[data].get('x_column')],
-                      df[plot_data[data].get('y_column')],
-                      legend=plot_data[data].get('legend'),
-                      marker=plot_data[data].get('marker'),
-                      y_axis=plot_data[data].get('y_axis'))
-
-    results_file_path = os.path.join(
-        bokeh_data['log_path'],
-        '{}.html'.format(bokeh_data['current_test_name']))
-    plot.generate_figure()
-    bokeh_figure.BokehFigure.save_figures([plot], results_file_path)
-
-
-def start_advertising_and_scanning(client_ad, server_ad, Legacymode=True):
-    """Function to start bt5 advertisement.
-
-        Args:
-            client_ad: the Android device performing the scanning.
-            server_ad: the Android device performing the bt advertising
-            Legacymode: True for Legacy advertising mode, false for bt5 advertising mode
-        Returns:
-          adv_callback: the advertising callback
-          scan_callback: the scan_callback
-        """
-    adv_callback = server_ad.droid.bleAdvSetGenCallback()
-    adv_data = {
-        "includeDeviceName": True,
-    }
-    server_ad.droid.bleAdvSetStartAdvertisingSet(
-        {
-            "connectable": False,
-            "legacyMode": Legacymode,
-            "primaryPhy": "PHY_LE_1M",
-            "secondaryPhy": "PHY_LE_1M",
-            "interval": 320
-        }, adv_data, None, None, None, 0, 0, adv_callback)
-    server_ad.ed.pop_event(advertising_set_started.format(adv_callback),
-                           default_event_timeout)
-    logging.info("Bt5 Advertiser Started Successfully")
-    client_ad.droid.bleSetScanSettingsLegacy(False)
-    client_ad.droid.bleSetScanSettingsScanMode(
-        ble_scan_settings_modes['low_latency'])
-    client_ad.droid.bleSetScanSettingsPhy(ble_scan_settings_phys['1m'])
-
-    filter_list, scan_settings, scan_callback = generate_ble_scan_objects(
-        client_ad.droid)
-    adv_device_name = server_ad.droid.bluetoothGetLocalName()
-    client_ad.droid.bleSetScanFilterDeviceName(adv_device_name)
-    client_ad.droid.bleBuildScanFilter(filter_list)
-    client_ad.droid.bleStartBleScan(filter_list, scan_settings, scan_callback)
-    return adv_callback, scan_callback
diff --git a/src/antlion/test_utils/bt/bt_coc_test_utils.py b/src/antlion/test_utils/bt/bt_coc_test_utils.py
deleted file mode 100644
index 6da6350..0000000
--- a/src/antlion/test_utils/bt/bt_coc_test_utils.py
+++ /dev/null
@@ -1,299 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from antlion import utils
-
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-from antlion.test_utils.bt.bt_constants import default_bluetooth_socket_timeout_ms
-from antlion.test_utils.bt.bt_constants import default_le_connection_interval_ms
-from antlion.test_utils.bt.bt_constants import default_le_data_length
-from antlion.test_utils.bt.bt_constants import gatt_phy
-from antlion.test_utils.bt.bt_constants import gatt_transport
-from antlion.test_utils.bt.bt_constants import l2cap_coc_header_size
-from antlion.test_utils.bt.bt_constants import le_connection_event_time_step_ms
-from antlion.test_utils.bt.bt_constants import le_connection_interval_time_step_ms
-from antlion.test_utils.bt.bt_constants import le_default_supervision_timeout
-from antlion.test_utils.bt.bt_test_utils import get_mac_address_of_generic_advertisement
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-
-log = logging
-
-
-class BtCoCTestUtilsError(Exception):
-    pass
-
-
-def do_multi_connection_throughput(client_ad, list_server_ad,
-                                   list_client_conn_id, num_iterations,
-                                   number_buffers, buffer_size):
-    """Throughput measurements from one client to one-or-many servers.
-
-    Args:
-        client_ad: the Android device to perform the write.
-        list_server_ad: the list of Android server devices connected to this client.
-        list_client_conn_id: list of client connection IDs
-        num_iterations: the number of test repetitions.
-        number_buffers: the total number of data buffers to transmit per test.
-        buffer_size: the number of bytes per L2CAP data buffer.
-
-    Returns:
-        Throughput in terms of bytes per second, 0 if test failed.
-    """
-
-    total_num_bytes = 0
-    start_write_time = time.perf_counter()
-    client_ad.log.info(
-        "do_multi_connection_throughput: Before write. Start Time={:f}, "
-        "num_iterations={}, number_buffers={}, buffer_size={}, "
-        "number_buffers*buffer_size={}, num_servers={}".format(
-            start_write_time, num_iterations, number_buffers, buffer_size,
-            number_buffers * buffer_size, len(list_server_ad)))
-
-    if (len(list_server_ad) != len(list_client_conn_id)):
-        client_ad.log.error("do_multi_connection_throughput: invalid "
-                            "parameters. Num of list_server_ad({}) != "
-                            "list_client_conn({})".format(
-                                len(list_server_ad), len(list_client_conn_id)))
-        return 0
-
-    try:
-        for _, client_conn_id in enumerate(list_client_conn_id):
-            client_ad.log.info("do_multi_connection_throughput: "
-                               "client_conn_id={}".format(client_conn_id))
-            # Plumb the tx data queue with the first set of data buffers.
-            client_ad.droid.bluetoothConnectionThroughputSend(
-                number_buffers, buffer_size, client_conn_id)
-    except Exception as err:
-        client_ad.log.error("Failed to write data: {}".format(err))
-        return 0
-
-    # Each Loop iteration will write and read one set of buffers.
-    for _ in range(0, (num_iterations - 1)):
-        try:
-            for _, client_conn_id in enumerate(list_client_conn_id):
-                client_ad.droid.bluetoothConnectionThroughputSend(
-                    number_buffers, buffer_size, client_conn_id)
-        except Exception as err:
-            client_ad.log.error("Failed to write data: {}".format(err))
-            return 0
-
-        for _, server_ad in enumerate(list_server_ad):
-            try:
-                server_ad.droid.bluetoothConnectionThroughputRead(
-                    number_buffers, buffer_size)
-                total_num_bytes += number_buffers * buffer_size
-            except Exception as err:
-                server_ad.log.error("Failed to read data: {}".format(err))
-                return 0
-
-    for _, server_ad in enumerate(list_server_ad):
-        try:
-            server_ad.droid.bluetoothConnectionThroughputRead(
-                number_buffers, buffer_size)
-            total_num_bytes += number_buffers * buffer_size
-        except Exception as err:
-            server_ad.log.error("Failed to read data: {}".format(err))
-            return 0
-
-    end_read_time = time.perf_counter()
-
-    test_time = (end_read_time - start_write_time)
-    if (test_time == 0):
-        client_ad.log.error("Buffer transmits cannot take zero time")
-        return 0
-    data_rate = (1.000 * total_num_bytes) / test_time
-    log.info(
-        "Calculated using total write and read times: total_num_bytes={}, "
-        "test_time={}, data rate={:08.0f} bytes/sec, {:08.0f} bits/sec".format(
-            total_num_bytes, test_time, data_rate, (data_rate * 8)))
-    return data_rate
-
-
-def orchestrate_coc_connection(
-        client_ad,
-        server_ad,
-        is_ble,
-        secured_conn=False,
-        le_connection_interval=0,
-        le_tx_data_length=default_le_data_length,
-        accept_timeout_ms=default_bluetooth_socket_timeout_ms,
-        le_min_ce_len=0,
-        le_max_ce_len=0,
-        gatt_disconnection=True):
-    """Sets up the CoC connection between two Android devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-        is_ble: using LE transport.
-        secured_conn: using secured connection
-        le_connection_interval: LE Connection interval. 0 means use default.
-        le_tx_data_length: LE Data Length used by BT Controller to transmit.
-        accept_timeout_ms: timeout while waiting for incoming connection.
-        gatt_disconnection: LE GATT disconnection, default is True, False will return
-        bluetooth_gatt and gatt_callback
-    Returns:
-        True if connection was successful or false if unsuccessful,
-        client connection ID,
-        and server connection ID
-    """
-    server_ad.droid.bluetoothStartPairingHelper()
-    client_ad.droid.bluetoothStartPairingHelper()
-
-    adv_callback = None
-    mac_address = None
-    if is_ble:
-        try:
-            # This will start advertising and scanning. Will fail if it could
-            # not find the advertisements from server_ad
-            client_ad.log.info(
-                "Orchestrate_coc_connection: Start BLE advertisement and"
-                "scanning. Secured Connection={}".format(secured_conn))
-            mac_address, adv_callback, scan_callback = (
-                get_mac_address_of_generic_advertisement(client_ad, server_ad))
-        except BtTestUtilsError as err:
-            raise BtCoCTestUtilsError(
-                "Orchestrate_coc_connection: Error in getting mac address: {}".
-                format(err))
-    else:
-        mac_address = server_ad.droid.bluetoothGetLocalAddress()
-        adv_callback = None
-
-    # Adjust the Connection Interval (if necessary)
-    bluetooth_gatt_1 = -1
-    gatt_callback_1 = -1
-    gatt_connected = False
-    if is_ble and (le_connection_interval != 0 or le_min_ce_len != 0 or le_max_ce_len != 0):
-        client_ad.log.info(
-            "Adjusting connection interval={}, le_min_ce_len={}, le_max_ce_len={}"
-            .format(le_connection_interval, le_min_ce_len, le_max_ce_len))
-        try:
-            bluetooth_gatt_1, gatt_callback_1 = setup_gatt_connection(
-                client_ad,
-                mac_address,
-                False,
-                transport=gatt_transport['le'],
-                opportunistic=False)
-            client_ad.droid.bleStopBleScan(scan_callback)
-        except GattTestUtilsError as err:
-            client_ad.log.error(err)
-            if (adv_callback != None):
-                server_ad.droid.bleStopBleAdvertising(adv_callback)
-            return False, None, None
-        client_ad.log.info("setup_gatt_connection returns success")
-        if (le_connection_interval != 0):
-            minInterval = le_connection_interval / le_connection_interval_time_step_ms
-            maxInterval = le_connection_interval / le_connection_interval_time_step_ms
-        else:
-            minInterval = default_le_connection_interval_ms / le_connection_interval_time_step_ms
-            maxInterval = default_le_connection_interval_ms / le_connection_interval_time_step_ms
-        if (le_min_ce_len != 0):
-            le_min_ce_len = le_min_ce_len / le_connection_event_time_step_ms
-        if (le_max_ce_len != 0):
-            le_max_ce_len = le_max_ce_len / le_connection_event_time_step_ms
-
-        return_status = client_ad.droid.gattClientRequestLeConnectionParameters(
-            bluetooth_gatt_1, minInterval, maxInterval, 0,
-            le_default_supervision_timeout, le_min_ce_len, le_max_ce_len)
-        if not return_status:
-            client_ad.log.error(
-                "gattClientRequestLeConnectionParameters returns failure")
-            if (adv_callback != None):
-                server_ad.droid.bleStopBleAdvertising(adv_callback)
-            return False, None, None
-        client_ad.log.info(
-            "gattClientRequestLeConnectionParameters returns success. Interval={}"
-            .format(minInterval))
-        gatt_connected = True
-        # For now, we will only test with 1 Mbit Phy.
-        # TODO: Add explicit tests with 2 MBit Phy.
-        client_ad.droid.gattClientSetPreferredPhy(
-            bluetooth_gatt_1, gatt_phy['1m'], gatt_phy['1m'], 0)
-
-    server_ad.droid.bluetoothSocketConnBeginAcceptThreadPsm(
-        accept_timeout_ms, is_ble, secured_conn)
-
-    psm_value = server_ad.droid.bluetoothSocketConnGetPsm()
-    client_ad.log.info("Assigned PSM value={}".format(psm_value))
-
-    client_ad.droid.bluetoothSocketConnBeginConnectThreadPsm(
-        mac_address, is_ble, psm_value, secured_conn)
-
-    if (le_tx_data_length != default_le_data_length) and is_ble:
-        client_ad.log.info("orchestrate_coc_connection: call "
-                           "bluetoothSocketRequestMaximumTxDataLength")
-        client_ad.droid.bluetoothSocketRequestMaximumTxDataLength()
-
-    end_time = time.time() + bt_default_timeout
-    test_result = False
-    while time.time() < end_time:
-        if len(server_ad.droid.bluetoothSocketConnActiveConnections()) > 0:
-            server_ad.log.info("CoC Server Connection Active")
-            if len(client_ad.droid.bluetoothSocketConnActiveConnections()) > 0:
-                client_ad.log.info("CoC Client Connection Active")
-                test_result = True
-                break
-        time.sleep(1)
-
-    if (adv_callback != None):
-        server_ad.droid.bleStopBleAdvertising(adv_callback)
-
-    if not test_result:
-        client_ad.log.error("Failed to establish an CoC connection")
-        return False, None, None
-
-    if len(client_ad.droid.bluetoothSocketConnActiveConnections()) > 0:
-        server_ad.log.info(
-            "CoC client_ad Connection Active, num=%d",
-            len(client_ad.droid.bluetoothSocketConnActiveConnections()))
-    else:
-        server_ad.log.info("Error CoC client_ad Connection Inactive")
-        client_ad.log.info("Error CoC client_ad Connection Inactive")
-
-    # Wait for the client to be ready
-    client_conn_id = None
-    while (client_conn_id == None):
-        client_conn_id = client_ad.droid.bluetoothGetLastConnId()
-        if (client_conn_id != None):
-            break
-        time.sleep(1)
-
-    # Wait for the server to be ready
-    server_conn_id = None
-    while (server_conn_id == None):
-        server_conn_id = server_ad.droid.bluetoothGetLastConnId()
-        if (server_conn_id != None):
-            break
-        time.sleep(1)
-
-    client_ad.log.info(
-        "orchestrate_coc_connection: client conn id={}, server conn id={}".
-        format(client_conn_id, server_conn_id))
-
-    if gatt_disconnection:
-
-        if gatt_connected:
-            disconnect_gatt_connection(client_ad, bluetooth_gatt_1,
-                                       gatt_callback_1)
-            client_ad.droid.gattClientClose(bluetooth_gatt_1)
-
-        return True, client_conn_id, server_conn_id
-
-    else:
-        return True, client_conn_id, server_conn_id, bluetooth_gatt_1, gatt_callback_1
diff --git a/src/antlion/test_utils/bt/bt_constants.py b/src/antlion/test_utils/bt/bt_constants.py
deleted file mode 100644
index 5d7f883..0000000
--- a/src/antlion/test_utils/bt/bt_constants.py
+++ /dev/null
@@ -1,798 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-### Generic Constants Begin ###
-
-bt_default_timeout = 15
-default_rfcomm_timeout_ms = 10000
-default_bluetooth_socket_timeout_ms = 10000
-pan_connect_timeout = 5
-bt_discovery_timeout = 3
-small_timeout = 0.0001
-
-# Time delay (in seconds) at the end of each LE CoC Test to give sufficient time
-# for the ACL LE link to be disconnected. The ACL link stays connected after
-# L2CAP disconnects.  An example of the timeout is L2CAP_LINK_INACTIVITY_TOUT.
-# This delay must be greater than the maximum of these timeouts.
-# TODO: Investigate the use of broadcast intent
-# BluetoothDevice.ACTION_ACL_DISCONNECTED to replace this delay method.
-l2cap_max_inactivity_delay_after_disconnect = 5
-
-# LE specifications related constants
-le_connection_interval_time_step_ms = 1.25
-le_default_supervision_timeout = 2000
-default_le_data_length = 23
-default_le_connection_interval_ms = 30
-le_connection_event_time_step_ms = 0.625
-
-# Headers of LE L2CAP Connection-oriented Channels. See section 3.4, Vol
-# 3, Part A, Version 5.0.
-l2cap_header_size = 4
-l2cap_coc_sdu_length_field_size = 2
-l2cap_coc_header_size = l2cap_header_size + l2cap_coc_sdu_length_field_size
-
-java_integer = {"min": -2147483648, "max": 2147483647}
-
-btsnoop_log_path_on_device = "/data/misc/bluetooth/logs/btsnoop_hci.log"
-btsnoop_last_log_path_on_device = \
-    "/data/misc/bluetooth/logs/btsnoop_hci.log.last"
-pairing_variant_passkey_confirmation = 2
-
-# Callback strings
-scan_result = "BleScan{}onScanResults"
-scan_failed = "BleScan{}onScanFailed"
-batch_scan_result = "BleScan{}onBatchScanResult"
-adv_fail = "BleAdvertise{}onFailure"
-adv_succ = "BleAdvertise{}onSuccess"
-bluetooth_off = "BluetoothStateChangedOff"
-bluetooth_on = "BluetoothStateChangedOn"
-mtu_changed = "GattConnect{}onMtuChanged"
-advertising_set_started = "AdvertisingSet{}onAdvertisingSetStarted"
-advertising_set_stopped = "AdvertisingSet{}onAdvertisingSetStopped"
-advertising_set_on_own_address_read = "AdvertisingSet{}onOwnAddressRead"
-advertising_set_enabled = "AdvertisingSet{}onAdvertisingEnabled"
-advertising_set_data_set = "AdvertisingSet{}onAdvertisingDataSet"
-advertising_set_scan_response_set = "AdvertisingSet{}onScanResponseDataSet"
-advertising_set_parameters_update = \
-    "AdvertisingSet{}onAdvertisingParametersUpdated"
-advertising_set_periodic_parameters_updated = \
-    "AdvertisingSet{}onPeriodicAdvertisingParametersUpdated"
-advertising_set_periodic_data_set = \
-    "AdvertisingSet{}onPeriodicAdvertisingDataSet"
-advertising_set_periodic_enable = "AdvertisingSet{}onPeriodicAdvertisingEnable"
-bluetooth_profile_connection_state_changed = \
-    "BluetoothProfileConnectionStateChanged"
-bluetooth_le_on = "BleStateChangedOn"
-bluetooth_le_off = "BleStateChangedOff"
-bluetooth_a2dp_codec_config_changed = "BluetoothA2dpCodecConfigChanged"
-# End Callback Strings
-
-batch_scan_not_supported_list = [
-    "Nexus 4",
-    "Nexus 5",
-    "Nexus 7",
-]
-
-### Generic Constants End ###
-
-### Bluetooth Constants Begin ###
-
-# rfcomm test uuids
-rfcomm_secure_uuid = "fa87c0d0-afac-11de-8a39-0800200c9a66"
-rfcomm_insecure_uuid = "8ce255c0-200a-11e0-ac64-0800200c9a66"
-
-# bluetooth socket connection test uuid
-bluetooth_socket_conn_test_uuid = "12345678-1234-5678-9abc-123456789abc"
-
-# Bluetooth Adapter Scan Mode Types
-bt_scan_mode_types = {
-    "state_off": -1,
-    "none": 0,
-    "connectable": 1,
-    "connectable_discoverable": 3
-}
-
-# Bluetooth Adapter State Constants
-bt_adapter_states = {
-    "off": 10,
-    "turning_on": 11,
-    "on": 12,
-    "turning_off": 13,
-    "ble_turning_on": 14,
-    "ble_on": 15,
-    "ble_turning_off": 16
-}
-
-# Should be kept in sync with BluetoothProfile.java
-bt_profile_constants = {
-    "headset": 1,
-    "a2dp": 2,
-    "health": 3,
-    "input_device": 4,
-    "pan": 5,
-    "pbap_server": 6,
-    "gatt": 7,
-    "gatt_server": 8,
-    "map": 9,
-    "sap": 10,
-    "a2dp_sink": 11,
-    "avrcp_controller": 12,
-    "headset_client": 16,
-    "pbap_client": 17,
-    "map_mce": 18
-}
-
-# Bluetooth RFCOMM UUIDs as defined by the SIG
-bt_rfcomm_uuids = {
-    "default_uuid": "457807c0-4897-11df-9879-0800200c9a66",
-    "base_uuid": "00000000-0000-1000-8000-00805F9B34FB",
-    "sdp": "00000001-0000-1000-8000-00805F9B34FB",
-    "udp": "00000002-0000-1000-8000-00805F9B34FB",
-    "rfcomm": "00000003-0000-1000-8000-00805F9B34FB",
-    "tcp": "00000004-0000-1000-8000-00805F9B34FB",
-    "tcs_bin": "00000005-0000-1000-8000-00805F9B34FB",
-    "tcs_at": "00000006-0000-1000-8000-00805F9B34FB",
-    "att": "00000007-0000-1000-8000-00805F9B34FB",
-    "obex": "00000008-0000-1000-8000-00805F9B34FB",
-    "ip": "00000009-0000-1000-8000-00805F9B34FB",
-    "ftp": "0000000A-0000-1000-8000-00805F9B34FB",
-    "http": "0000000C-0000-1000-8000-00805F9B34FB",
-    "wsp": "0000000E-0000-1000-8000-00805F9B34FB",
-    "bnep": "0000000F-0000-1000-8000-00805F9B34FB",
-    "upnp": "00000010-0000-1000-8000-00805F9B34FB",
-    "hidp": "00000011-0000-1000-8000-00805F9B34FB",
-    "hardcopy_control_channel": "00000012-0000-1000-8000-00805F9B34FB",
-    "hardcopy_data_channel": "00000014-0000-1000-8000-00805F9B34FB",
-    "hardcopy_notification": "00000016-0000-1000-8000-00805F9B34FB",
-    "avctp": "00000017-0000-1000-8000-00805F9B34FB",
-    "avdtp": "00000019-0000-1000-8000-00805F9B34FB",
-    "cmtp": "0000001B-0000-1000-8000-00805F9B34FB",
-    "mcap_control_channel": "0000001E-0000-1000-8000-00805F9B34FB",
-    "mcap_data_channel": "0000001F-0000-1000-8000-00805F9B34FB",
-    "l2cap": "00000100-0000-1000-8000-00805F9B34FB"
-}
-
-# Should be kept in sync with BluetoothProfile#STATE_* constants.
-bt_profile_states = {
-    "disconnected": 0,
-    "connecting": 1,
-    "connected": 2,
-    "disconnecting": 3
-}
-
-# Access Levels from BluetoothDevice.
-bt_access_levels = {"access_allowed": 1, "access_denied": 2}
-
-# Priority levels as defined in BluetoothProfile.java.
-bt_priority_levels = {
-    "auto_connect": 1000,
-    "on": 100,
-    "off": 0,
-    "undefined": -1
-}
-
-# A2DP codec configuration constants as defined in
-# frameworks/base/core/java/android/bluetooth/BluetoothCodecConfig.java
-codec_types = {
-    'SBC': 0,
-    'AAC': 1,
-    'APTX': 2,
-    'APTX-HD': 3,
-    'LDAC': 4,
-    'MAX': 5,
-    'INVALID': 1000000
-}
-
-codec_priorities = {'DISABLED': -1, 'DEFAULT': 0, 'HIGHEST': 1000000}
-
-sample_rates = {
-    'NONE': 0,
-    '44100': 0x1 << 0,
-    '48000': 0x1 << 1,
-    '88200': 0x1 << 2,
-    '96000': 0x1 << 3,
-    '176400': 0x1 << 4,
-    '192000': 0x1 << 5
-}
-
-bits_per_samples = {'NONE': 0, '16': 0x1 << 0, '24': 0x1 << 1, '32': 0x1 << 2}
-
-channel_modes = {'NONE': 0, 'MONO': 0x1 << 0, 'STEREO': 0x1 << 1}
-
-# Bluetooth HID constants.
-hid_connection_timeout = 5
-
-# Bluetooth HID EventFacade constants.
-hid_on_set_report_event = "onSetReport"
-hid_on_get_report_event = "onGetReport"
-hid_on_set_protocol_event = "onSetProtocol"
-hid_on_intr_data_event = "onInterruptData"
-hid_on_virtual_cable_unplug_event = "onVirtualCableUnplug"
-hid_id_keyboard = 1
-hid_id_mouse = 2
-hid_default_event_timeout = 15
-hid_default_set_report_payload = "Haha"
-
-### Bluetooth Constants End ###
-
-### Bluetooth Low Energy Constants Begin ###
-
-# Bluetooth Low Energy scan callback types
-ble_scan_settings_callback_types = {
-    "all_matches": 1,
-    "first_match": 2,
-    "match_lost": 4,
-    "found_and_lost": 6
-}
-
-# Bluetooth Low Energy scan settings match mode
-ble_scan_settings_match_modes = {"aggresive": 1, "sticky": 2}
-
-# Bluetooth Low Energy scan settings match nums
-ble_scan_settings_match_nums = {"one": 1, "few": 2, "max": 3}
-
-# Bluetooth Low Energy scan settings result types
-ble_scan_settings_result_types = {"full": 0, "abbreviated": 1}
-
-# Bluetooth Low Energy scan settings mode
-ble_scan_settings_modes = {
-    "opportunistic": -1,
-    "low_power": 0,
-    "balanced": 1,
-    "low_latency": 2
-}
-
-# Bluetooth Low Energy scan settings report delay millis
-ble_scan_settings_report_delay_milli_seconds = {
-    "min": 0,
-    "max": 9223372036854775807
-}
-
-# Bluetooth Low Energy scan settings phy
-ble_scan_settings_phys = {"1m": 1, "coded": 3, "all_supported": 255}
-
-# Bluetooth Low Energy advertise settings types
-ble_advertise_settings_types = {"non_connectable": 0, "connectable": 1}
-
-# Bluetooth Low Energy advertise settings modes
-ble_advertise_settings_modes = {
-    "low_power": 0,
-    "balanced": 1,
-    "low_latency": 2
-}
-
-# Bluetooth Low Energy advertise settings tx power
-ble_advertise_settings_tx_powers = {
-    "ultra_low": 0,
-    "low": 1,
-    "medium": 2,
-    "high": 3
-}
-
-# Bluetooth Low Energy advertise settings own address type
-ble_advertise_settings_own_address_types = {
-    "public": 0,
-    "random": 1
-}
-
-# Bluetooth Low Energy service uuids for specific devices
-ble_uuids = {
-    "p_service": "0000feef-0000-1000-8000-00805f9b34fb",
-    "hr_service": "0000180d-0000-1000-8000-00805f9b34fb"
-}
-
-# Bluetooth Low Energy advertising error codes
-ble_advertise_error_code = {
-    "data_too_large": 1,
-    "too_many_advertisers": 2,
-    "advertisement_already_started": 3,
-    "bluetooth_internal_failure": 4,
-    "feature_not_supported": 5
-}
-
-### Bluetooth Low Energy Constants End ###
-
-### Bluetooth GATT Constants Begin ###
-
-# Gatt Callback error messages
-gatt_cb_err = {
-    "char_write_req_err":
-    "Characteristic Write Request event not found. Expected {}",
-    "char_write_err": "Characteristic Write event not found. Expected {}",
-    "desc_write_req_err":
-    "Descriptor Write Request event not found. Expected {}",
-    "desc_write_err": "Descriptor Write event not found. Expected {}",
-    "char_read_err": "Characteristic Read event not found. Expected {}",
-    "char_read_req_err": "Characteristic Read Request not found. Expected {}",
-    "desc_read_err": "Descriptor Read event not found. Expected {}",
-    "desc_read_req_err":
-    "Descriptor Read Request event not found. Expected {}",
-    "rd_remote_rssi_err": "Read Remote RSSI event not found. Expected {}",
-    "gatt_serv_disc_err":
-    "GATT Services Discovered event not found. Expected {}",
-    "serv_added_err": "Service Added event not found. Expected {}",
-    "mtu_changed_err": "MTU Changed event not found. Expected {}",
-    "mtu_serv_changed_err": "MTU Server Changed event not found. Expected {}",
-    "gatt_conn_changed_err":
-    "GATT Connection Changed event not found. Expected {}",
-    "char_change_err":
-    "GATT Characteristic Changed event not fond. Expected {}",
-    "phy_read_err": "Phy Read event not fond. Expected {}",
-    "phy_update_err": "Phy Update event not fond. Expected {}",
-    "exec_write_err": "GATT Execute Write event not found. Expected {}"
-}
-
-# GATT callback strings as defined in GattClientFacade.java and
-# GattServerFacade.java implemented callbacks.
-gatt_cb_strings = {
-    "char_write_req": "GattServer{}onCharacteristicWriteRequest",
-    "exec_write": "GattServer{}onExecuteWrite",
-    "char_write": "GattConnect{}onCharacteristicWrite",
-    "desc_write_req": "GattServer{}onDescriptorWriteRequest",
-    "desc_write": "GattConnect{}onDescriptorWrite",
-    "char_read": "GattConnect{}onCharacteristicRead",
-    "char_read_req": "GattServer{}onCharacteristicReadRequest",
-    "desc_read": "GattConnect{}onDescriptorRead",
-    "desc_read_req": "GattServer{}onDescriptorReadRequest",
-    "rd_remote_rssi": "GattConnect{}onReadRemoteRssi",
-    "rd_remote_ble_rssi": "BleScan{}onScanResults",
-    "gatt_serv_disc": "GattConnect{}onServicesDiscovered",
-    "serv_added": "GattServer{}onServiceAdded",
-    "mtu_changed": "GattConnect{}onMtuChanged",
-    "mtu_serv_changed": "GattServer{}onMtuChanged",
-    "gatt_conn_change": "GattConnect{}onConnectionStateChange",
-    "char_change": "GattConnect{}onCharacteristicChanged",
-    "phy_read": "GattConnect{}onPhyRead",
-    "phy_update": "GattConnect{}onPhyUpdate",
-    "serv_phy_read": "GattServer{}onPhyRead",
-    "serv_phy_update": "GattServer{}onPhyUpdate",
-}
-
-# GATT event dictionary of expected callbacks and errors.
-gatt_event = {
-    "char_write_req": {
-        "evt": gatt_cb_strings["char_write_req"],
-        "err": gatt_cb_err["char_write_req_err"]
-    },
-    "exec_write": {
-        "evt": gatt_cb_strings["exec_write"],
-        "err": gatt_cb_err["exec_write_err"]
-    },
-    "char_write": {
-        "evt": gatt_cb_strings["char_write"],
-        "err": gatt_cb_err["char_write_err"]
-    },
-    "desc_write_req": {
-        "evt": gatt_cb_strings["desc_write_req"],
-        "err": gatt_cb_err["desc_write_req_err"]
-    },
-    "desc_write": {
-        "evt": gatt_cb_strings["desc_write"],
-        "err": gatt_cb_err["desc_write_err"]
-    },
-    "char_read": {
-        "evt": gatt_cb_strings["char_read"],
-        "err": gatt_cb_err["char_read_err"]
-    },
-    "char_read_req": {
-        "evt": gatt_cb_strings["char_read_req"],
-        "err": gatt_cb_err["char_read_req_err"]
-    },
-    "desc_read": {
-        "evt": gatt_cb_strings["desc_read"],
-        "err": gatt_cb_err["desc_read_err"]
-    },
-    "desc_read_req": {
-        "evt": gatt_cb_strings["desc_read_req"],
-        "err": gatt_cb_err["desc_read_req_err"]
-    },
-    "rd_remote_rssi": {
-        "evt": gatt_cb_strings["rd_remote_rssi"],
-        "err": gatt_cb_err["rd_remote_rssi_err"]
-    },
-    "gatt_serv_disc": {
-        "evt": gatt_cb_strings["gatt_serv_disc"],
-        "err": gatt_cb_err["gatt_serv_disc_err"]
-    },
-    "serv_added": {
-        "evt": gatt_cb_strings["serv_added"],
-        "err": gatt_cb_err["serv_added_err"]
-    },
-    "mtu_changed": {
-        "evt": gatt_cb_strings["mtu_changed"],
-        "err": gatt_cb_err["mtu_changed_err"]
-    },
-    "gatt_conn_change": {
-        "evt": gatt_cb_strings["gatt_conn_change"],
-        "err": gatt_cb_err["gatt_conn_changed_err"]
-    },
-    "char_change": {
-        "evt": gatt_cb_strings["char_change"],
-        "err": gatt_cb_err["char_change_err"]
-    },
-    "phy_read": {
-        "evt": gatt_cb_strings["phy_read"],
-        "err": gatt_cb_err["phy_read_err"]
-    },
-    "phy_update": {
-        "evt": gatt_cb_strings["phy_update"],
-        "err": gatt_cb_err["phy_update_err"]
-    },
-    "serv_phy_read": {
-        "evt": gatt_cb_strings["serv_phy_read"],
-        "err": gatt_cb_err["phy_read_err"]
-    },
-    "serv_phy_update": {
-        "evt": gatt_cb_strings["serv_phy_update"],
-        "err": gatt_cb_err["phy_update_err"]
-    }
-}
-
-# Matches constants of connection states defined in BluetoothGatt.java
-gatt_connection_state = {
-    "disconnected": 0,
-    "connecting": 1,
-    "connected": 2,
-    "disconnecting": 3,
-    "closed": 4
-}
-
-# Matches constants of Bluetooth GATT Characteristic values as defined
-# in BluetoothGattCharacteristic.java
-gatt_characteristic = {
-    "property_broadcast": 0x01,
-    "property_read": 0x02,
-    "property_write_no_response": 0x04,
-    "property_write": 0x08,
-    "property_notify": 0x10,
-    "property_indicate": 0x20,
-    "property_signed_write": 0x40,
-    "property_extended_props": 0x80,
-    "permission_read": 0x01,
-    "permission_read_encrypted": 0x02,
-    "permission_read_encrypted_mitm": 0x04,
-    "permission_write": 0x10,
-    "permission_write_encrypted": 0x20,
-    "permission_write_encrypted_mitm": 0x40,
-    "permission_write_signed": 0x80,
-    "permission_write_signed_mitm": 0x100,
-    "write_type_default": 0x02,
-    "write_type_no_response": 0x01,
-    "write_type_signed": 0x04,
-}
-
-# Matches constants of Bluetooth GATT Characteristic values as defined
-# in BluetoothGattDescriptor.java
-gatt_descriptor = {
-    "enable_notification_value": [0x01, 0x00],
-    "enable_indication_value": [0x02, 0x00],
-    "disable_notification_value": [0x00, 0x00],
-    "permission_read": 0x01,
-    "permission_read_encrypted": 0x02,
-    "permission_read_encrypted_mitm": 0x04,
-    "permission_write": 0x10,
-    "permission_write_encrypted": 0x20,
-    "permission_write_encrypted_mitm": 0x40,
-    "permission_write_signed": 0x80,
-    "permission_write_signed_mitm": 0x100
-}
-
-# https://www.bluetooth.com/specifications/gatt/descriptors
-gatt_char_desc_uuids = {
-    "char_ext_props": '00002900-0000-1000-8000-00805f9b34fb',
-    "char_user_desc": '00002901-0000-1000-8000-00805f9b34fb',
-    "client_char_cfg": '00002902-0000-1000-8000-00805f9b34fb',
-    "server_char_cfg": '00002903-0000-1000-8000-00805f9b34fb',
-    "char_fmt_uuid": '00002904-0000-1000-8000-00805f9b34fb',
-    "char_agreg_fmt": '00002905-0000-1000-8000-00805f9b34fb',
-    "char_valid_range": '00002906-0000-1000-8000-00805f9b34fb',
-    "external_report_reference": '00002907-0000-1000-8000-00805f9b34fb',
-    "report_reference": '00002908-0000-1000-8000-00805f9b34fb'
-}
-
-# https://www.bluetooth.com/specifications/gatt/characteristics
-gatt_char_types = {
-    "device_name": '00002a00-0000-1000-8000-00805f9b34fb',
-    "appearance": '00002a01-0000-1000-8000-00805f9b34fb',
-    "peripheral_priv_flag": '00002a02-0000-1000-8000-00805f9b34fb',
-    "reconnection_address": '00002a03-0000-1000-8000-00805f9b34fb',
-    "peripheral_pref_conn": '00002a04-0000-1000-8000-00805f9b34fb',
-    "service_changed": '00002a05-0000-1000-8000-00805f9b34fb',
-    "system_id": '00002a23-0000-1000-8000-00805f9b34fb',
-    "model_number_string": '00002a24-0000-1000-8000-00805f9b34fb',
-    "serial_number_string": '00002a25-0000-1000-8000-00805f9b34fb',
-    "firmware_revision_string": '00002a26-0000-1000-8000-00805f9b34fb',
-    "hardware_revision_string": '00002a27-0000-1000-8000-00805f9b34fb',
-    "software_revision_string": '00002a28-0000-1000-8000-00805f9b34fb',
-    "manufacturer_name_string": '00002a29-0000-1000-8000-00805f9b34fb',
-    "pnp_id": '00002a50-0000-1000-8000-00805f9b34fb',
-}
-
-# Matches constants of Bluetooth GATT Characteristic values as defined
-# in BluetoothGattCharacteristic.java
-gatt_characteristic_value_format = {
-    "string": 0x1,
-    "byte": 0x2,
-    "sint8": 0x21,
-    "uint8": 0x11,
-    "sint16": 0x22,
-    "unit16": 0x12,
-    "sint32": 0x24,
-    "uint32": 0x14
-}
-
-# Matches constants of Bluetooth Gatt Service types as defined in
-# BluetoothGattService.java
-gatt_service_types = {"primary": 0, "secondary": 1}
-
-# Matches constants of Bluetooth Gatt Connection Priority values as defined in
-# BluetoothGatt.java
-gatt_connection_priority = {"balanced": 0, "high": 1, "low_power": 2}
-
-# Min and max MTU values
-gatt_mtu_size = {"min": 23, "max": 217}
-
-# Gatt Characteristic attribute lengths
-gatt_characteristic_attr_length = {"attr_1": 1, "attr_2": 3, "attr_3": 15}
-
-# Matches constants of Bluetooth Gatt operations status as defined in
-# BluetoothGatt.java
-gatt_status = {"success": 0, "failure": 0x101}
-
-# Matches constants of Bluetooth transport values as defined in
-# BluetoothDevice.java
-gatt_transport = {"auto": 0x00, "bredr": 0x01, "le": 0x02}
-
-# Matches constants of Bluetooth physical channeling values as defined in
-# BluetoothDevice.java
-gatt_phy = {"1m": 1, "2m": 2, "le_coded": 3}
-
-# Matches constants of Bluetooth physical channeling bitmask values as defined
-# in BluetoothDevice.java
-gatt_phy_mask = {"1m_mask": 1, "2m_mask": 2, "coded_mask": 4}
-
-# Values as defiend in the Bluetooth GATT specification
-gatt_server_responses = {
-    "GATT_SUCCESS": 0x0,
-    "GATT_FAILURE": 0x1,
-    "GATT_READ_NOT_PERMITTED": 0x2,
-    "GATT_WRITE_NOT_PERMITTED": 0x3,
-    "GATT_INVALID_PDU": 0x4,
-    "GATT_INSUFFICIENT_AUTHENTICATION": 0x5,
-    "GATT_REQUEST_NOT_SUPPORTED": 0x6,
-    "GATT_INVALID_OFFSET": 0x7,
-    "GATT_INSUFFICIENT_AUTHORIZATION": 0x8,
-    "GATT_INVALID_ATTRIBUTE_LENGTH": 0xd,
-    "GATT_INSUFFICIENT_ENCRYPTION": 0xf,
-    "GATT_CONNECTION_CONGESTED": 0x8f,
-    "GATT_13_ERR": 0x13,
-    "GATT_12_ERR": 0x12,
-    "GATT_0C_ERR": 0x0C,
-    "GATT_16": 0x16
-}
-
-### Bluetooth GATT Constants End ###
-
-### Chameleon Constants Begin ###
-
-# Chameleon audio bits per sample.
-audio_bits_per_sample_16 = 16
-audio_bits_per_sample_24 = 24
-audio_bits_per_sample_32 = 32
-
-# Chameleon audio sample rates.
-audio_sample_rate_44100 = 44100
-audio_sample_rate_48000 = 48000
-audio_sample_rate_88200 = 88200
-audio_sample_rate_96000 = 96000
-
-# Chameleon audio channel modes.
-audio_channel_mode_mono = 1
-audio_channel_mode_stereo = 2
-audio_channel_mode_8 = 8
-
-# Chameleon time delays.
-delay_after_binding_seconds = 0.5
-delay_before_record_seconds = 0.5
-silence_wait_seconds = 5
-
-# Chameleon bus endpoints.
-fpga_linein_bus_endpoint = 'Chameleon FPGA line-in'
-headphone_bus_endpoint = 'Cros device headphone'
-
-### Chameleon Constants End ###
-
-# Begin logcat strings dict"""
-logcat_strings = {
-    "media_playback_vol_changed": "onRouteVolumeChanged",
-}
-
-# End logcat strings dict"""
-
-### Begin Service Discovery UUIDS ###
-# Values match the Bluetooth SIG defined values: """
-""" https://www.bluetooth.com/specifications/assigned-numbers/service-discovery """
-sig_uuid_constants = {
-    "BASE_UUID": "0000{}-0000-1000-8000-00805F9B34FB",
-    "SDP": "0001",
-    "UDP": "0002",
-    "RFCOMM": "0003",
-    "TCP": "0004",
-    "TCS-BIN": "0005",
-    "TCS-AT": "0006",
-    "ATT": "0007",
-    "OBEX": "0008",
-    "IP": "0009",
-    "FTP": "000A",
-    "HTTP": "000C",
-    "WSP": "000E",
-    "BNEP": "000F",
-    "UPNP": "0010",
-    "HIDP": "0011",
-    "HardcopyControlChannel": "0012",
-    "HardcopyDataChannel": "0014",
-    "HardcopyNotification": "0016",
-    "AVCTP": "0017",
-    "AVDTP": "0019",
-    "CMTP": "001B",
-    "MCAPControlChannel": "001E",
-    "MCAPDataChannel": "001F",
-    "L2CAP": "0100",
-    "ServiceDiscoveryServerServiceClassID": "1000",
-    "BrowseGroupDescriptorServiceClassID": "1001",
-    "SerialPort": "1101",
-    "LANAccessUsingPPP": "1102",
-    "DialupNetworking": "1103",
-    "IrMCSync": "1104",
-    "OBEXObjectPush": "1105",
-    "OBEXFileTransfer": "1106",
-    "IrMCSyncCommand": "1107",
-    "Headset": "1108",
-    "CordlessTelephony": "1109",
-    "AudioSource": "110A",
-    "AudioSink": "110B",
-    "A/V_RemoteControlTarget": "110C",
-    "AdvancedAudioDistribution": "110D",
-    "A/V_RemoteControl": "110E",
-    "A/V_RemoteControlController": "110F",
-    "Intercom": "1110",
-    "Fax": "1111",
-    "Headset - Audio Gateway (AG)": "1112",
-    "WAP": "1113",
-    "WAP_CLIENT": "1114",
-    "PANU": "1115",
-    "NAP": "1116",
-    "GN": "1117",
-    "DirectPrinting": "1118",
-    "ReferencePrinting": "1119",
-    "ImagingResponder": "111B",
-    "ImagingAutomaticArchive": "111C",
-    "ImagingReferencedObjects": "111D",
-    "Handsfree": "111E",
-    "HandsfreeAudioGateway": "111F",
-    "DirectPrintingReferenceObjectsService": "1120",
-    "ReflectedUI": "1121",
-    "BasicPrinting": "1122",
-    "PrintingStatus": "1123",
-    "HumanInterfaceDeviceService": "1124",
-    "HardcopyCableReplacement": "1125",
-    "HCR_Print": "1126",
-    "HCR_Scan": "1127",
-    "Common_ISDN_Access": "1128",
-    "SIM_Access": "112D",
-    "Phonebook Access - PCE": "112E",
-    "Phonebook Access - PSE": "112F",
-    "Phonebook Access": "1130",
-    "Headset - HS": "1131",
-    "Message Access Server": "1132",
-    "Message Notification Server": "1133",
-    "Message Access Profile": "1134",
-    "GNSS": "1135",
-    "GNSS_Server": "1136",
-    "PnPInformation": "1200",
-    "GenericNetworking": "1201",
-    "GenericFileTransfer": "1202",
-    "GenericAudio": "1203",
-    "GenericTelephony": "1204",
-    "UPNP_Service": "1205",
-    "UPNP_IP_Service": "1206",
-    "ESDP_UPNP_IP_PAN": "1300",
-    "ESDP_UPNP_IP_LAP": "1301",
-    "ESDP_UPNP_L2CAP": "1302",
-    "VideoSource": "1303",
-    "VideoSink": "1304",
-    "VideoDistribution": "1305",
-    "HDP": "1400"
-}
-
-### End Service Discovery UUIDS ###
-
-### Begin Appearance Constants ###
-# https://www.bluetooth.com/wp-content/uploads/Sitecore-Media-Library/Gatt/Xml/Characteristics/org.bluetooth.characteristic.gap.appearance.xml
-sig_appearance_constants = {
-    "UNKNOWN": 0,
-    "PHONE": 64,
-    "COMPUTER": 128,
-    "WATCH": 192,
-    "WATCH_SPORTS": 193,
-    "CLOCK": 256,
-    "DISPLAY": 320,
-    "REMOTE_CONTROL": 384,
-    "EYE_GLASSES": 448,
-    "TAG": 512,
-    "KEYRING": 576,
-    "MEDIA_PLAYER": 640,
-    "BARCODE_SCANNER": 704,
-    "THERMOMETER": 768,
-    "THERMOMETER_EAR": 769,
-    "HEART_RATE_SENSOR": 832,
-    "HEART_RATE_SENSOR_BELT": 833,
-    "BLOOD_PRESSURE": 896,
-    "BLOOD_PRESSURE_ARM": 897,
-    "BLOOD_PRESSURE_WRIST": 898,
-    "HID": 960,
-    "HID_KEYBOARD": 961,
-    "HID_MOUSE": 962,
-    "HID_JOYSTICK": 963,
-    "HID_GAMEPAD": 964,
-    "HID_DIGITIZER_TABLET": 965,
-    "HID_CARD_READER": 966,
-    "HID_DIGITAL_PEN": 967,
-    "HID_BARCODE_SCANNER": 968,
-    "GLUCOSE_METER": 1024,
-    "RUNNING_WALKING_SENSOR": 1088,
-    "RUNNING_WALKING_SENSOR_IN_SHOE": 1089,
-    "RUNNING_WALKING_SENSOR_ON_SHOE": 1090,
-    "RUNNING_WALKING_SENSOR_ON_HIP": 1091,
-    "CYCLING": 1152,
-    "CYCLING_COMPUTER": 1153,
-    "CYCLING_SPEED_SENSOR": 1154,
-    "CYCLING_CADENCE_SENSOR": 1155,
-    "CYCLING_POWER_SENSOR": 1156,
-    "CYCLING_SPEED_AND_CADENCE_SENSOR": 1157,
-    "PULSE_OXIMETER": 3136,
-    "PULSE_OXIMETER_FINGERTIP": 3137,
-    "PULSE_OXIMETER_WRIST": 3138,
-    "WEIGHT_SCALE": 3200,
-    "PERSONAL_MOBILITY": 3264,
-    "PERSONAL_MOBILITY_WHEELCHAIR": 3265,
-    "PERSONAL_MOBILITY_SCOOTER": 3266,
-    "GLUCOSE_MONITOR": 3328,
-    "SPORTS_ACTIVITY": 5184,
-    "SPORTS_ACTIVITY_LOCATION_DISPLAY": 5185,
-    "SPORTS_ACTIVITY_LOCATION_AND_NAV_DISPLAY": 5186,
-    "SPORTS_ACTIVITY_LOCATION_POD": 5187,
-    "SPORTS_ACTIVITY_LOCATION_AND_NAV_POD": 5188,
-}
-
-### End Appearance Constants ###
-
-# Attribute Record values from the Bluetooth Specification
-# Version 5, Vol 3, Part B
-bt_attribute_values = {
-    'ATTR_SERVICE_RECORD_HANDLE': 0x0000,
-    'ATTR_SERVICE_CLASS_ID_LIST': 0x0001,
-    'ATTR_SERVICE_RECORD_STATE': 0x0002,
-    'ATTR_SERVICE_ID': 0x0003,
-    'ATTR_PROTOCOL_DESCRIPTOR_LIST': 0x0004,
-    'ATTR_ADDITIONAL_PROTOCOL_DESCRIPTOR_LIST': 0x000D,
-    'ATTR_BROWSE_GROUP_LIST': 0x0005,
-    'ATTR_LANGUAGE_BASE_ATTRIBUTE_ID_LIST': 0x0006,
-    'ATTR_SERVICE_INFO_TIME_TO_LIVE': 0x0007,
-    'ATTR_SERVICE_AVAILABILITY': 0x0008,
-    'ATTR_BLUETOOTH_PROFILE_DESCRIPTOR_LIST': 0x0009,
-    'ATTR_A2DP_SUPPORTED_FEATURES': 0x0311,
-}
diff --git a/src/antlion/test_utils/bt/bt_contacts_utils.py b/src/antlion/test_utils/bt/bt_contacts_utils.py
deleted file mode 100644
index 6abafcb..0000000
--- a/src/antlion/test_utils/bt/bt_contacts_utils.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Compare_contacts accepts 2 vcf files, extracts full name, email, and
-telephone numbers from each and reports how many unique cards it finds across
-the two files.
-"""
-
-from mmap import ACCESS_READ
-from mmap import mmap
-import logging
-import re
-import random
-import string
-import time
-from antlion.utils import exe_cmd
-import queue
-
-# CallLog types
-INCOMMING_CALL_TYPE = "1"
-OUTGOING_CALL_TYPE = "2"
-MISSED_CALL_TYPE = "3"
-
-# Callback strings.
-CONTACTS_CHANGED_CALLBACK = "ContactsChanged"
-CALL_LOG_CHANGED = "CallLogChanged"
-CONTACTS_ERASED_CALLBACK = "ContactsErased"
-
-# URI for contacts database on Nexus.
-CONTACTS_URI = "content://com.android.contacts/data/phones"
-
-# Path for temporary file storage on device.
-STORAGE_PATH = "/storage/emulated/0/Download/"
-
-PBAP_SYNC_TIME = 30
-
-log = logging
-
-
-def parse_contacts(file_name):
-    """Read vcf file and generate a list of contacts.
-
-    Contacts full name, prefered email, and all phone numbers are extracted.
-    """
-
-    vcard_regex = re.compile(b"^BEGIN:VCARD((\n*?.*?)*?)END:VCARD",
-                             re.MULTILINE)
-    fullname_regex = re.compile(b"^FN:(.*)", re.MULTILINE)
-    email_regex = re.compile(b"^EMAIL;PREF:(.*)", re.MULTILINE)
-    tel_regex = re.compile(b"^TEL;(.*):(.*)", re.MULTILINE)
-
-    with open(file_name, "r") as contacts_file:
-        contacts = []
-        contacts_map = mmap(
-            contacts_file.fileno(), length=0, access=ACCESS_READ)
-        new_contact = None
-
-        # Find all VCARDs in the input file, then extract the first full name,
-        # first email address, and all phone numbers from it.  If there is at
-        # least a full name add it to the contact list.
-        for current_vcard in vcard_regex.findall(contacts_map):
-            new_contact = VCard()
-
-            fullname = fullname_regex.search(current_vcard[0])
-            if fullname is not None:
-                new_contact.name = fullname.group(1)
-
-            email = email_regex.search(current_vcard[0])
-            if email is not None:
-                new_contact.email = email.group(1)
-
-            for phone_number in tel_regex.findall(current_vcard[0]):
-                new_contact.add_phone_number(
-                    PhoneNumber(phone_number[0], phone_number[1]))
-
-            contacts.append(new_contact)
-
-        return contacts
-
-
-def phone_number_count(destination_path, file_name):
-    """Counts number of phone numbers in a VCF.
-    """
-    tel_regex = re.compile(b"^TEL;(.*):(.*)", re.MULTILINE)
-    with open("{}{}".format(destination_path, file_name),
-              "r") as contacts_file:
-        contacts_map = mmap(
-            contacts_file.fileno(), length=0, access=ACCESS_READ)
-        numbers = tel_regex.findall(contacts_map)
-        return len(numbers)
-
-
-def count_contacts_with_differences(destination_path,
-                                    pce_contacts_vcf_file_name,
-                                    pse_contacts_vcf_file_name):
-    """Compare two contact files and report the number of differences.
-
-    Difference count is returned, and the differences are logged, this is order
-    independent.
-    """
-
-    pce_contacts = parse_contacts("{}{}".format(destination_path,
-                                                pce_contacts_vcf_file_name))
-    pse_contacts = parse_contacts("{}{}".format(destination_path,
-                                                pse_contacts_vcf_file_name))
-
-    differences = set(pce_contacts).symmetric_difference(set(pse_contacts))
-    if not differences:
-        log.info("All {} contacts in the phonebooks match".format(
-            str(len(pce_contacts))))
-    else:
-        log.info("{} contacts match, but ".format(
-            str(len(set(pce_contacts).intersection(set(pse_contacts))))))
-        log.info("the following {} entries don't match:".format(
-            str(len(differences))))
-        for current_vcard in differences:
-            log.info(current_vcard)
-    return len(differences)
-
-
-class PhoneNumber(object):
-    """Simple class for maintaining a phone number entry and type with only the
-    digits.
-    """
-
-    def __init__(self, phone_type, phone_number):
-        self.phone_type = phone_type
-        # remove non digits from phone_number
-        self.phone_number = re.sub(r"\D", "", str(phone_number))
-
-    def __eq__(self, other):
-        return (self.phone_type == other.phone_type and
-                self.phone_number == other.phone_number)
-
-    def __hash__(self):
-        return hash(self.phone_type) ^ hash(self.phone_number)
-
-
-class VCard(object):
-    """Contains name, email, and phone numbers.
-    """
-
-    def __init__(self):
-        self.name = None
-        self.first_name = None
-        self.last_name = None
-        self.email = None
-        self.phone_numbers = []
-        self.photo = None
-
-    def __lt__(self, other):
-        return self.name < other.name
-
-    def __hash__(self):
-        result = hash(self.name) ^ hash(self.email) ^ hash(self.photo == None)
-        for number in self.phone_numbers:
-            result ^= hash(number)
-        return result
-
-    def __eq__(self, other):
-        return hash(self) == hash(other)
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    def __str__(self):
-        vcard_strings = ["BEGIN:VCARD\n", "VERSION:2.1\n"]
-
-        if self.first_name or self.last_name:
-            vcard_strings.append("N:{};{};;;\nFN:{} {}\n".format(
-                self.last_name, self.first_name, self.first_name,
-                self.last_name))
-        elif self.name:
-            vcard_strings.append("FN:{}\n".format(self.name))
-
-        if self.phone_numbers:
-            for phone in self.phone_numbers:
-                vcard_strings.append("TEL;{}:{}\n".format(
-                    str(phone.phone_type), phone.phone_number))
-
-        if self.email:
-            vcard_strings.append("EMAIL;PREF:{}\n".format(self.email))
-
-        vcard_strings.append("END:VCARD\n")
-        return "".join(vcard_strings)
-
-    def add_phone_number(self, phone_number):
-        if phone_number not in self.phone_numbers:
-            self.phone_numbers.append(phone_number)
-
-
-def generate_random_phone_number():
-    """Generate a random phone number/type
-    """
-    return PhoneNumber("CELL",
-                       "+{0:010d}".format(random.randint(0, 9999999999)))
-
-
-def generate_random_string(length=8,
-                           charset="{}{}{}".format(string.digits,
-                                                   string.ascii_letters,
-                                                   string.punctuation)):
-    """Generate a random string of specified length from the characterset
-    """
-    # Remove ; since that would make 2 words.
-    charset = charset.replace(";", "")
-    name = []
-    for i in range(length):
-        name.append(random.choice(charset))
-    return "".join(name)
-
-
-def generate_contact_list(destination_path,
-                          file_name,
-                          contact_count,
-                          phone_number_count=1):
-    """Generate a simple VCF file for count contacts with basic content.
-
-    An example with count = 1 and local_number = 2]
-
-    BEGIN:VCARD
-    VERSION:2.1
-    N:Person;1;;;
-    FN:1 Person
-    TEL;CELL:+1-555-555-1234
-    TEL;CELL:+1-555-555-4321
-    EMAIL;PREF:person1@gmail.com
-    END:VCARD
-    """
-    vcards = []
-    for i in range(contact_count):
-        current_contact = VCard()
-        current_contact.first_name = generate_random_string(
-            random.randint(1, 19))
-        current_contact.last_name = generate_random_string(
-            random.randint(1, 19))
-        current_contact.email = "{}{}@{}.{}".format(
-            current_contact.last_name, current_contact.first_name,
-            generate_random_string(random.randint(1, 19)),
-            generate_random_string(random.randint(1, 4)))
-        for number in range(phone_number_count):
-            current_contact.add_phone_number(generate_random_phone_number())
-        vcards.append(current_contact)
-    create_new_contacts_vcf_from_vcards(destination_path, file_name, vcards)
-
-
-def create_new_contacts_vcf_from_vcards(destination_path, vcf_file_name,
-                                        vcards):
-    """Create a new file with filename
-    """
-    contact_file = open("{}{}".format(destination_path, vcf_file_name), "w+")
-    for card in vcards:
-        contact_file.write(str(card))
-    contact_file.close()
-
-
-def get_contact_count(device):
-    """Returns the number of name:phone number pairs.
-    """
-    contact_list = device.droid.contactsQueryContent(
-        CONTACTS_URI, ["display_name", "data1"], "", [], "display_name")
-    return len(contact_list)
-
-
-def import_device_contacts_from_vcf(device, destination_path, vcf_file, timeout=10):
-    """Uploads and import vcf file to device.
-    """
-    number_count = phone_number_count(destination_path, vcf_file)
-    device.log.info("Trying to add {} phone numbers.".format(number_count))
-    local_phonebook_path = "{}{}".format(destination_path, vcf_file)
-    phone_phonebook_path = "{}{}".format(STORAGE_PATH, vcf_file)
-    device.adb.push("{} {}".format(local_phonebook_path, phone_phonebook_path))
-    device.droid.importVcf("file://{}{}".format(STORAGE_PATH, vcf_file))
-    start_time = time.time()
-    while time.time() < start_time + timeout:
-        #TODO: use unattended way to bypass contact import module instead of keyevent
-        if "ImportVCardActivity" in device.get_my_current_focus_window():
-            # keyevent to allow contacts import from vcf file
-            for key in ["DPAD_RIGHT", "DPAD_RIGHT", "ENTER"]:
-                device.adb.shell("input keyevent KEYCODE_{}".format(key))
-            break
-        time.sleep(1)
-    if wait_for_phone_number_update_complete(device, number_count):
-        return number_count
-    else:
-        return 0
-
-
-def export_device_contacts_to_vcf(device, destination_path, vcf_file):
-    """Export and download vcf file from device.
-    """
-    path_on_phone = "{}{}".format(STORAGE_PATH, vcf_file)
-    device.droid.exportVcf("{}".format(path_on_phone))
-    # Download and then remove file from device
-    device.adb.pull("{} {}".format(path_on_phone, destination_path))
-    return True
-
-
-def delete_vcf_files(device):
-    """Deletes all files with .vcf extension
-    """
-    files = device.adb.shell("ls {}".format(STORAGE_PATH))
-    for file_name in files.split():
-        if ".vcf" in file_name:
-            device.adb.shell("rm -f {}{}".format(STORAGE_PATH, file_name))
-
-
-def erase_contacts(device):
-    """Erase all contacts out of devices contact database.
-    """
-    device.log.info("Erasing contacts.")
-    if get_contact_count(device) > 0:
-        device.droid.contactsEraseAll()
-        try:
-            device.ed.pop_event(CONTACTS_ERASED_CALLBACK, PBAP_SYNC_TIME)
-        except queue.Empty:
-            log.error("Phone book not empty.")
-            return False
-    return True
-
-
-def wait_for_phone_number_update_complete(device, expected_count):
-    """Check phone_number count on device and wait for updates until it has the
-    expected number of phone numbers in its contact database.
-    """
-    update_completed = True
-    try:
-        while (expected_count != get_contact_count(device) and
-               device.ed.pop_event(CONTACTS_CHANGED_CALLBACK, PBAP_SYNC_TIME)):
-            pass
-    except queue.Empty:
-        log.error("Contacts failed to update.")
-        update_completed = False
-    device.log.info("Found {} out of the expected {} contacts.".format(
-        get_contact_count(device), expected_count))
-    return update_completed
-
-
-def wait_for_call_log_update_complete(device, expected_count):
-    """Check call log count on device and wait for updates until it has the
-    expected number of calls in its call log database.
-    """
-    update_completed = True
-    try:
-        while (expected_count != device.droid.callLogGetCount() and
-               device.ed.pop_event(CALL_LOG_CHANGED, PBAP_SYNC_TIME)):
-            pass
-    except queue.Empty:
-        log.error("Call Log failed to update.")
-        update_completed = False
-    device.log.info("Found {} out of the expected {} call logs.".format(
-        device.droid.callLogGetCount(), expected_count))
-    return
-
-
-def add_call_log(device, call_log_type, phone_number, call_time):
-    """Add call number and time to specified log.
-    """
-    new_call_log = {}
-    new_call_log["type"] = str(call_log_type)
-    new_call_log["number"] = phone_number
-    new_call_log["time"] = str(call_time)
-    device.droid.callLogsPut(new_call_log)
-
-
-def get_and_compare_call_logs(pse, pce, call_log_type):
-    """Gather and compare call logs from PSE and PCE for the specified type.
-    """
-    pse_call_log = pse.droid.callLogsGet(call_log_type)
-    pce_call_log = pce.droid.callLogsGet(call_log_type)
-    return compare_call_logs(pse_call_log, pce_call_log)
-
-
-def normalize_phonenumber(phone_number):
-    """Remove all non-digits from phone_number
-    """
-    return re.sub(r"\D", "", phone_number)
-
-
-def compare_call_logs(pse_call_log, pce_call_log):
-    """Gather and compare call logs from PSE and PCE for the specified type.
-    """
-    call_logs_match = True
-    if len(pse_call_log) == len(pce_call_log):
-        for i in range(len(pse_call_log)):
-            # Compare the phone number
-            if normalize_phonenumber(pse_call_log[i][
-                    "number"]) != normalize_phonenumber(pce_call_log[i][
-                         "number"]):
-                log.warning("Call Log numbers differ")
-                call_logs_match = False
-
-            # Compare which log it was taken from (Incomming, Outgoing, Missed
-            if pse_call_log[i]["type"] != pce_call_log[i]["type"]:
-                log.warning("Call Log types differ")
-                call_logs_match = False
-
-            # Compare time to truncated second.
-            if int(pse_call_log[i]["date"]) // 1000 != int(pce_call_log[i][
-                    "date"]) // 1000:
-                log.warning("Call log times don't match, check timezone.")
-                call_logs_match = False
-
-    else:
-        log.warning("Call Log lengths differ {}:{}".format(
-            len(pse_call_log), len(pce_call_log)))
-        call_logs_match = False
-
-    if not call_logs_match:
-        log.info("PSE Call Log:")
-        log.info(pse_call_log)
-        log.info("PCE Call Log:")
-        log.info(pce_call_log)
-
-    return call_logs_match
-
diff --git a/src/antlion/test_utils/bt/bt_factory.py b/src/antlion/test_utils/bt/bt_factory.py
deleted file mode 100644
index e085848..0000000
--- a/src/antlion/test_utils/bt/bt_factory.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import importlib
-
-
-def create(configs):
-    """Used to create instance of bt implementation.
-
-    A list of of configuration is extracted from configs.
-    The modules names are extracted and passed to import_module
-    to get the specific implementation, which gets appended to a
-    device list.
-    Args:
-        configs: A configurations dictionary that contains
-        a list of configs for each device in configs['user_params']['BtDevice'].
-
-    Returns:
-        A list of bt implementations.
-    """
-    bt_devices = []
-    for config in configs:
-        bt_name = config['bt_module']
-        bt = importlib.import_module('acts_contrib.test_utils.bt.bt_implementations.%s'
-                                      % bt_name)
-        bt_devices.append(bt.BluethoothDevice(config))
-    return bt_devices
-
-
-def destroy(bt_device_list):
-    for bt in bt_device_list:
-        bt.close()
diff --git a/src/antlion/test_utils/bt/bt_gatt_utils.py b/src/antlion/test_utils/bt/bt_gatt_utils.py
deleted file mode 100644
index 0347d26..0000000
--- a/src/antlion/test_utils/bt/bt_gatt_utils.py
+++ /dev/null
@@ -1,418 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from antlion.test_utils.bt.bt_test_utils import BtTestUtilsError
-from antlion.test_utils.bt.bt_test_utils import get_mac_address_of_generic_advertisement
-from antlion.test_utils.bt.bt_constants import gatt_cb_err
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_connection_state
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_phy_mask
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import gatt_transport
-import pprint
-from queue import Empty
-
-default_timeout = 10
-log = logging
-
-
-class GattTestUtilsError(Exception):
-    pass
-
-
-def setup_gatt_connection(cen_ad,
-                          mac_address,
-                          autoconnect,
-                          transport=gatt_transport['auto'],
-                          opportunistic=False):
-    gatt_callback = cen_ad.droid.gattCreateGattCallback()
-    log.info("Gatt Connect to mac address {}.".format(mac_address))
-    bluetooth_gatt = cen_ad.droid.gattClientConnectGatt(
-        gatt_callback, mac_address, autoconnect, transport, opportunistic,
-        gatt_phy_mask['1m_mask'])
-    expected_event = gatt_cb_strings['gatt_conn_change'].format(gatt_callback)
-    try:
-        event = cen_ad.ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        close_gatt_client(cen_ad, bluetooth_gatt)
-        raise GattTestUtilsError(
-            "Could not establish a connection to "
-            "peripheral. Expected event: {}".format(expected_event))
-    if event['data']['State'] != gatt_connection_state['connected']:
-        close_gatt_client(cen_ad, bluetooth_gatt)
-        try:
-            cen_ad.droid.gattClientClose(bluetooth_gatt)
-        except Exception:
-            self.log.debug("Failed to close gatt client.")
-        raise GattTestUtilsError("Could not establish a connection to "
-                                 "peripheral. Event Details: {}".format(
-                                     pprint.pformat(event)))
-    return bluetooth_gatt, gatt_callback
-
-
-def close_gatt_client(cen_ad, bluetooth_gatt):
-    try:
-        cen_ad.droid.gattClientClose(bluetooth_gatt)
-    except Exception:
-        log.debug("Failed to close gatt client.")
-
-
-def disconnect_gatt_connection(cen_ad, bluetooth_gatt, gatt_callback):
-    cen_ad.droid.gattClientDisconnect(bluetooth_gatt)
-    wait_for_gatt_disconnect_event(cen_ad, gatt_callback)
-    return
-
-
-def wait_for_gatt_disconnect_event(cen_ad, gatt_callback):
-    expected_event = gatt_cb_strings['gatt_conn_change'].format(gatt_callback)
-    try:
-        event = cen_ad.ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        raise GattTestUtilsError(
-            gatt_cb_err['gatt_conn_change_err'].format(expected_event))
-    found_state = event['data']['State']
-    expected_state = gatt_connection_state['disconnected']
-    if found_state != expected_state:
-        raise GattTestUtilsError(
-            "GATT connection state change expected {}, found {}".format(
-                expected_event, found_state))
-    return
-
-
-def orchestrate_gatt_connection(cen_ad,
-                                per_ad,
-                                transport=gatt_transport['le'],
-                                mac_address=None,
-                                autoconnect=False,
-                                opportunistic=False):
-    adv_callback = None
-    if mac_address is None:
-        if transport == gatt_transport['le']:
-            try:
-                mac_address, adv_callback, scan_callback = (
-                    get_mac_address_of_generic_advertisement(cen_ad, per_ad))
-            except BtTestUtilsError as err:
-                raise GattTestUtilsError(
-                    "Error in getting mac address: {}".format(err))
-        else:
-            mac_address = per_ad.droid.bluetoothGetLocalAddress()
-            adv_callback = None
-    bluetooth_gatt, gatt_callback = setup_gatt_connection(
-        cen_ad, mac_address, autoconnect, transport, opportunistic)
-    return bluetooth_gatt, gatt_callback, adv_callback
-
-
-def run_continuous_write_descriptor(cen_droid,
-                                    cen_ed,
-                                    per_droid,
-                                    per_ed,
-                                    gatt_server,
-                                    gatt_server_callback,
-                                    bluetooth_gatt,
-                                    services_count,
-                                    discovered_services_index,
-                                    number_of_iterations=100000):
-    log.info("Starting continuous write")
-    bt_device_id = 0
-    status = 1
-    offset = 1
-    test_value = [1, 2, 3, 4, 5, 6, 7]
-    test_value_return = [1, 2, 3]
-    for _ in range(number_of_iterations):
-        try:
-            for i in range(services_count):
-                characteristic_uuids = (
-                    cen_droid.gattClientGetDiscoveredCharacteristicUuids(
-                        discovered_services_index, i))
-                log.info(characteristic_uuids)
-                for characteristic in characteristic_uuids:
-                    descriptor_uuids = (
-                        cen_droid.gattClientGetDiscoveredDescriptorUuids(
-                            discovered_services_index, i, characteristic))
-                    log.info(descriptor_uuids)
-                    for descriptor in descriptor_uuids:
-                        cen_droid.gattClientDescriptorSetValue(
-                            bluetooth_gatt, discovered_services_index, i,
-                            characteristic, descriptor, test_value)
-                        cen_droid.gattClientWriteDescriptor(
-                            bluetooth_gatt, discovered_services_index, i,
-                            characteristic, descriptor)
-                        expected_event = gatt_cb_strings[
-                            'desc_write_req'].format(gatt_server_callback)
-                        try:
-                            event = per_ed.pop_event(expected_event,
-                                                     default_timeout)
-                        except Empty:
-                            log.error(gatt_cb_err['desc_write_req_err'].format(
-                                expected_event))
-                            return False
-                        request_id = event['data']['requestId']
-                        found_value = event['data']['value']
-                        if found_value != test_value:
-                            log.error(
-                                "Values didn't match. Found: {}, Expected: "
-                                "{}".format(found_value, test_value))
-                        per_droid.gattServerSendResponse(
-                            gatt_server, bt_device_id, request_id, status,
-                            offset, test_value_return)
-                        expected_event = gatt_cb_strings['desc_write'].format(
-                            bluetooth_gatt)
-                        try:
-                            cen_ed.pop_event(expected_event, default_timeout)
-                        except Empty:
-                            log.error(gatt_cb_strings['desc_write_err'].format(
-                                expected_event))
-                            raise Exception("Thread ended prematurely.")
-        except Exception as err:
-            log.error("Continuing but found exception: {}".format(err))
-
-
-def setup_characteristics_and_descriptors(droid):
-    characteristic_input = [
-        {
-            'uuid':
-            "aa7edd5a-4d1d-4f0e-883a-d145616a1630",
-            'property':
-            gatt_characteristic['property_write']
-            | gatt_characteristic['property_write_no_response'],
-            'permission':
-            gatt_characteristic['permission_write']
-        },
-        {
-            'uuid':
-            "21c0a0bf-ad51-4a2d-8124-b74003e4e8c8",
-            'property':
-            gatt_characteristic['property_notify']
-            | gatt_characteristic['property_read'],
-            'permission':
-            gatt_characteristic['permission_read']
-        },
-        {
-            'uuid':
-            "6774191f-6ec3-4aa2-b8a8-cf830e41fda6",
-            'property':
-            gatt_characteristic['property_notify']
-            | gatt_characteristic['property_read'],
-            'permission':
-            gatt_characteristic['permission_read']
-        },
-    ]
-    descriptor_input = [{
-        'uuid':
-        "aa7edd5a-4d1d-4f0e-883a-d145616a1630",
-        'property':
-        gatt_descriptor['permission_read']
-        | gatt_descriptor['permission_write'],
-    }, {
-        'uuid':
-        "76d5ed92-ca81-4edb-bb6b-9f019665fb32",
-        'property':
-        gatt_descriptor['permission_read']
-        | gatt_characteristic['permission_write'],
-    }]
-    characteristic_list = setup_gatt_characteristics(droid,
-                                                     characteristic_input)
-    descriptor_list = setup_gatt_descriptors(droid, descriptor_input)
-    return characteristic_list, descriptor_list
-
-
-def setup_multiple_services(per_ad):
-    per_droid, per_ed = per_ad.droid, per_ad.ed
-    gatt_server_callback = per_droid.gattServerCreateGattServerCallback()
-    gatt_server = per_droid.gattServerOpenGattServer(gatt_server_callback)
-    characteristic_list, descriptor_list = (
-        setup_characteristics_and_descriptors(per_droid))
-    per_droid.gattServerCharacteristicAddDescriptor(characteristic_list[1],
-                                                    descriptor_list[0])
-    per_droid.gattServerCharacteristicAddDescriptor(characteristic_list[2],
-                                                    descriptor_list[1])
-    gattService = per_droid.gattServerCreateService(
-        "00000000-0000-1000-8000-00805f9b34fb", gatt_service_types['primary'])
-    gattService2 = per_droid.gattServerCreateService(
-        "FFFFFFFF-0000-1000-8000-00805f9b34fb", gatt_service_types['primary'])
-    gattService3 = per_droid.gattServerCreateService(
-        "3846D7A0-69C8-11E4-BA00-0002A5D5C51B", gatt_service_types['primary'])
-    for characteristic in characteristic_list:
-        per_droid.gattServerAddCharacteristicToService(gattService,
-                                                       characteristic)
-    per_droid.gattServerAddService(gatt_server, gattService)
-    expected_event = gatt_cb_strings['serv_added'].format(gatt_server_callback)
-    try:
-        per_ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        per_ad.droid.gattServerClose(gatt_server)
-        raise GattTestUtilsError(
-            gatt_cb_strings['serv_added_err'].format(expected_event))
-    for characteristic in characteristic_list:
-        per_droid.gattServerAddCharacteristicToService(gattService2,
-                                                       characteristic)
-    per_droid.gattServerAddService(gatt_server, gattService2)
-    try:
-        per_ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        per_ad.droid.gattServerClose(gatt_server)
-        raise GattTestUtilsError(
-            gatt_cb_strings['serv_added_err'].format(expected_event))
-    for characteristic in characteristic_list:
-        per_droid.gattServerAddCharacteristicToService(gattService3,
-                                                       characteristic)
-    per_droid.gattServerAddService(gatt_server, gattService3)
-    try:
-        per_ed.pop_event(expected_event, default_timeout)
-    except Empty:
-        per_ad.droid.gattServerClose(gatt_server)
-        raise GattTestUtilsError(
-            gatt_cb_strings['serv_added_err'].format(expected_event))
-    return gatt_server_callback, gatt_server
-
-
-def setup_characteristics_and_descriptors(droid):
-    characteristic_input = [
-        {
-            'uuid':
-            "aa7edd5a-4d1d-4f0e-883a-d145616a1630",
-            'property':
-            gatt_characteristic['property_write']
-            | gatt_characteristic['property_write_no_response'],
-            'permission':
-            gatt_characteristic['property_write']
-        },
-        {
-            'uuid':
-            "21c0a0bf-ad51-4a2d-8124-b74003e4e8c8",
-            'property':
-            gatt_characteristic['property_notify']
-            | gatt_characteristic['property_read'],
-            'permission':
-            gatt_characteristic['permission_read']
-        },
-        {
-            'uuid':
-            "6774191f-6ec3-4aa2-b8a8-cf830e41fda6",
-            'property':
-            gatt_characteristic['property_notify']
-            | gatt_characteristic['property_read'],
-            'permission':
-            gatt_characteristic['permission_read']
-        },
-    ]
-    descriptor_input = [{
-        'uuid':
-        "aa7edd5a-4d1d-4f0e-883a-d145616a1630",
-        'property':
-        gatt_descriptor['permission_read']
-        | gatt_descriptor['permission_write'],
-    }, {
-        'uuid':
-        "76d5ed92-ca81-4edb-bb6b-9f019665fb32",
-        'property':
-        gatt_descriptor['permission_read']
-        | gatt_characteristic['permission_write'],
-    }]
-    characteristic_list = setup_gatt_characteristics(droid,
-                                                     characteristic_input)
-    descriptor_list = setup_gatt_descriptors(droid, descriptor_input)
-    return characteristic_list, descriptor_list
-
-
-def setup_gatt_characteristics(droid, input):
-    characteristic_list = []
-    for item in input:
-        index = droid.gattServerCreateBluetoothGattCharacteristic(
-            item['uuid'], item['property'], item['permission'])
-        characteristic_list.append(index)
-    return characteristic_list
-
-
-def setup_gatt_descriptors(droid, input):
-    descriptor_list = []
-    for item in input:
-        index = droid.gattServerCreateBluetoothGattDescriptor(
-            item['uuid'],
-            item['property'],
-        )
-        descriptor_list.append(index)
-    log.info("setup descriptor list: {}".format(descriptor_list))
-    return descriptor_list
-
-
-def setup_gatt_mtu(cen_ad, bluetooth_gatt, gatt_callback, mtu):
-    """utility function to set mtu for GATT connection.
-
-    Steps:
-    1. Request mtu change.
-    2. Check if the mtu is changed to the new value
-
-    Args:
-        cen_ad: test device for client to scan.
-        bluetooth_gatt: GATT object
-        mtu: new mtu value to be set
-
-    Returns:
-        If success, return True.
-        if fail, return False
-    """
-    cen_ad.droid.gattClientRequestMtu(bluetooth_gatt, mtu)
-    expected_event = gatt_cb_strings['mtu_changed'].format(gatt_callback)
-    try:
-        mtu_event = cen_ad.ed.pop_event(expected_event, default_timeout)
-        mtu_size_found = mtu_event['data']['MTU']
-        if mtu_size_found != mtu:
-            log.error("MTU size found: {}, expected: {}".format(
-                mtu_size_found, mtu))
-            return False
-    except Empty:
-        log.error(gatt_cb_err['mtu_changed_err'].format(expected_event))
-        return False
-    return True
-
-
-def log_gatt_server_uuids(cen_ad,
-                          discovered_services_index,
-                          bluetooth_gatt=None):
-    services_count = cen_ad.droid.gattClientGetDiscoveredServicesCount(
-        discovered_services_index)
-    for i in range(services_count):
-        service = cen_ad.droid.gattClientGetDiscoveredServiceUuid(
-            discovered_services_index, i)
-        log.info("Discovered service uuid {}".format(service))
-        characteristic_uuids = (
-            cen_ad.droid.gattClientGetDiscoveredCharacteristicUuids(
-                discovered_services_index, i))
-        for j in range(len(characteristic_uuids)):
-            descriptor_uuids = (
-                cen_ad.droid.gattClientGetDiscoveredDescriptorUuidsByIndex(
-                    discovered_services_index, i, j))
-            if bluetooth_gatt:
-                char_inst_id = cen_ad.droid.gattClientGetCharacteristicInstanceId(
-                    bluetooth_gatt, discovered_services_index, i, j)
-                log.info("Discovered characteristic handle uuid: {} {}".format(
-                    hex(char_inst_id), characteristic_uuids[j]))
-                for k in range(len(descriptor_uuids)):
-                    desc_inst_id = cen_ad.droid.gattClientGetDescriptorInstanceId(
-                        bluetooth_gatt, discovered_services_index, i, j, k)
-                    log.info("Discovered descriptor handle uuid: {} {}".format(
-                        hex(desc_inst_id), descriptor_uuids[k]))
-            else:
-                log.info("Discovered characteristic uuid: {}".format(
-                    characteristic_uuids[j]))
-                for k in range(len(descriptor_uuids)):
-                    log.info("Discovered descriptor uuid {}".format(
-                        descriptor_uuids[k]))
diff --git a/src/antlion/test_utils/bt/bt_implementations/bt_stub.py b/src/antlion/test_utils/bt/bt_implementations/bt_stub.py
deleted file mode 100644
index 7166379..0000000
--- a/src/antlion/test_utils/bt/bt_implementations/bt_stub.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python3
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A stub implementation of a DUT interface.
-
-This a stub interface which allows automated test to run
-without automating the hardware. This here for two reasons, first
-as an example of how to write a dut implementation, and second as
-an implementation that can be used to test case without writing
-out the full implementation.
-"""
-
-import logging
-
-class BluethoothDevice:
-    """The api interface used in the test for the stub.
-
-    This is interface which defines all the functions that can be
-    called by the bt test suite.
-    """
-
-    def __init__(self, config):
-        print('Init Stub with ', config)
-        logging.info('Init Stub with '+str(config))
-
-    def answer_phone(self):
-        input('Answer the phone and then press enter\n')
-
-    def hang_up(self):
-        input('Hang up the phone and then press enter\n')
-
-    def toggle_pause(self):
-        input('Press pause on device then press enter\n')
-
-    def volume(self, direction):
-        """Adjust the volume specified by the value of direction.
-
-        Args:
-            direction: A string that is either UP or DOWN
-            that indicates which way to adjust the volume.
-        """
-
-        return input('move volume '+direction+' and then press enter\n')
-
-    def connect(self, android):
-        input('Connect device and press enter\n')
-
-    def is_bt_connected(self):
-        con = input('Is device connected? y/n').lower()
-        while con not in ['y', 'n']:
-            con = input('Is device connected? y/n').lower()
-        return con == 'y'
-
-    def close(self):
-        """This where the hardware is released.
-        """
-        print('Close Stub')
-        logging.info('Close Stub')
-
diff --git a/src/antlion/test_utils/bt/bt_metrics_utils.py b/src/antlion/test_utils/bt/bt_metrics_utils.py
deleted file mode 100644
index 9464737..0000000
--- a/src/antlion/test_utils/bt/bt_metrics_utils.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-def get_bluetooth_profile_connection_stats_map(bluetooth_log):
-    return project_pairs_list_to_map(bluetooth_log.profile_connection_stats,
-                                     lambda stats : stats.profile_id,
-                                     lambda stats : stats.num_times_connected,
-                                     lambda a, b : a + b)
-
-def get_bluetooth_headset_profile_connection_stats_map(bluetooth_log):
-    return project_pairs_list_to_map(bluetooth_log.headset_profile_connection_stats,
-                                     lambda stats : stats.profile_id,
-                                     lambda stats : stats.num_times_connected,
-                                     lambda a, b : a + b)
-
-def project_pairs_list_to_map(pairs_list, get_key, get_value, merge_value):
-    """
-    Project a list of pairs (A, B) into a map of [A] --> B
-    :param pairs_list:  list of pairs (A, B)
-    :param get_key: function used to get key from pair (A, B)
-    :param get_value: function used to get value from pair (A, B)
-    :param merge_value: function used to merge values of B
-    :return: a map of [A] --> B
-    """
-    result = {}
-    for item in pairs_list:
-        my_key = get_key(item)
-        if my_key in result:
-            result[my_key] = merge_value(result[my_key], get_value(item))
-        else:
-            result[my_key] = get_value(item)
-    return result
diff --git a/src/antlion/test_utils/bt/bt_power_test_utils.py b/src/antlion/test_utils/bt/bt_power_test_utils.py
deleted file mode 100644
index f76bd6b..0000000
--- a/src/antlion/test_utils/bt/bt_power_test_utils.py
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-import antlion.test_utils.bt.BleEnum as bleenum
-import antlion.test_utils.instrumentation.device.command.instrumentation_command_builder as icb
-
-BLE_LOCATION_SCAN_ENABLE = 'settings put global ble_scan_always_enabled 1'
-BLE_LOCATION_SCAN_DISABLE = 'settings put global ble_scan_always_enabled 0'
-START_BLE_ADV = 'am start -n com.google.bletesting/.ActsCommandExecutor --es command ADVERTISE#2#2#30000'
-START_BLE_SCAN = 'am start -n com.google.bletesting/.ActsCommandExecutor --es command SCAN#2#10000'
-SCAN_DURATION = 10
-SCREEN_WAIT_TIME = 1
-
-
-class MediaControl(object):
-    """Media control using adb shell for power testing.
-
-    Object to control media play status using adb.
-    """
-    def __init__(self, android_device, music_file):
-        """Initialize the media_control class.
-
-        Args:
-            android_dut: android_device object
-            music_file: location of the music file
-        """
-        self.android_device = android_device
-        self.music_file = music_file
-
-    def player_on_foreground(self):
-        """Turn on screen and make sure media play is on foreground
-
-        All media control keycode only works when screen is on and media player
-        is on the foreground. Turn off screen first and turn it on to make sure
-        all operation is based on the same screen status. Otherwise, 'MENU' key
-        would block command to be sent.
-        """
-        self.android_device.droid.goToSleepNow()
-        time.sleep(SCREEN_WAIT_TIME)
-        self.android_device.droid.wakeUpNow()
-        time.sleep(SCREEN_WAIT_TIME)
-        self.android_device.send_keycode('MENU')
-        time.sleep(SCREEN_WAIT_TIME)
-
-    def play(self):
-        """Start playing music.
-
-        """
-        self.player_on_foreground()
-        PLAY = 'am start -a android.intent.action.VIEW -d file://{} -t audio/wav'.format(
-            self.music_file)
-        self.android_device.adb.shell(PLAY)
-
-    def pause(self):
-        """Pause music.
-
-        """
-        self.player_on_foreground()
-        self.android_device.send_keycode('MEDIA_PAUSE')
-
-    def resume(self):
-        """Pause music.
-
-        """
-        self.player_on_foreground()
-        self.android_device.send_keycode('MEDIA_PLAY')
-
-    def stop(self):
-        """Stop music and close media play.
-
-        """
-        self.player_on_foreground()
-        self.android_device.send_keycode('MEDIA_STOP')
-
-
-def start_apk_ble_adv(dut, adv_mode, adv_power_level, adv_duration):
-    """Trigger BLE advertisement from power-test.apk.
-
-    Args:
-        dut: Android device under test, type AndroidDevice obj
-        adv_mode: The BLE advertisement mode.
-            {0: 'LowPower', 1: 'Balanced', 2: 'LowLatency'}
-        adv_power_leve: The BLE advertisement TX power level.
-            {0: 'UltraLowTXPower', 1: 'LowTXPower', 2: 'MediumTXPower,
-            3: HighTXPower}
-        adv_duration: duration of advertisement in seconds, type int
-    """
-
-    adv_duration = str(adv_duration) + 's'
-    builder = icb.InstrumentationTestCommandBuilder.default()
-    builder.add_test_class(
-        "com.google.android.device.power.tests.ble.BleAdvertise")
-    builder.set_manifest_package("com.google.android.device.power")
-    builder.set_runner("androidx.test.runner.AndroidJUnitRunner")
-    builder.add_key_value_param("cool-off-duration", "0s")
-    builder.add_key_value_param("idle-duration", "0s")
-    builder.add_key_value_param(
-        "com.android.test.power.receiver.ADVERTISE_MODE", adv_mode)
-    builder.add_key_value_param("com.android.test.power.receiver.POWER_LEVEL",
-                                adv_power_level)
-    builder.add_key_value_param(
-        "com.android.test.power.receiver.ADVERTISING_DURATION", adv_duration)
-
-    adv_command = builder.build() + ' &'
-    logging.info('Start BLE {} at {} for {} seconds'.format(
-        bleenum.AdvertiseSettingsAdvertiseMode(adv_mode).name,
-        bleenum.AdvertiseSettingsAdvertiseTxPower(adv_power_level).name,
-        adv_duration))
-    dut.adb.shell_nb(adv_command)
-
-
-def start_apk_ble_scan(dut, scan_mode, scan_duration):
-    """Build the command to trigger BLE scan from power-test.apk.
-
-    Args:
-        dut: Android device under test, type AndroidDevice obj
-        scan_mode: The BLE scan mode.
-            {0: 'LowPower', 1: 'Balanced', 2: 'LowLatency', -1: 'Opportunistic'}
-        scan_duration: duration of scan in seconds, type int
-    Returns:
-        adv_command: the command for BLE scan
-    """
-    scan_duration = str(scan_duration) + 's'
-    builder = icb.InstrumentationTestCommandBuilder.default()
-    builder.set_proto_path()
-    builder.add_flag('--no-isolated-storage')
-    builder.add_test_class("com.google.android.device.power.tests.ble.BleScan")
-    builder.set_manifest_package("com.google.android.device.power")
-    builder.set_runner("androidx.test.runner.AndroidJUnitRunner")
-    builder.add_key_value_param("cool-off-duration", "0s")
-    builder.add_key_value_param("idle-duration", "0s")
-    builder.add_key_value_param("com.android.test.power.receiver.SCAN_MODE",
-                                scan_mode)
-    builder.add_key_value_param("com.android.test.power.receiver.MATCH_MODE",
-                                2)
-    builder.add_key_value_param(
-        "com.android.test.power.receiver.SCAN_DURATION", scan_duration)
-    builder.add_key_value_param(
-        "com.android.test.power.receiver.CALLBACK_TYPE", 1)
-    builder.add_key_value_param("com.android.test.power.receiver.FILTER",
-                                'true')
-
-    scan_command = builder.build() + ' &'
-    logging.info('Start BLE {} scans for {} seconds'.format(
-        bleenum.ScanSettingsScanMode(scan_mode).name, scan_duration))
-    dut.adb.shell_nb(scan_command)
-
-
-def establish_ble_connection(client_ad, server_ad, con_priority):
-    """Establish BLE connection using BLE_Test.apk.
-
-    Args:
-        client_ad: the Android device performing the BLE connection.
-        server_ad: the Android device accepting the BLE connection.
-        con_priority: The BLE Connection Priority.
-            {0: 'BALANCED'= Use the connection parameters recommended by the Bluetooth SIG,
-            1: 'HIGH'= Request a high priority, low latency connection,
-            2: 'LOW_POWER= Request low power, reduced data rate connection parameters }
-    """
-    server_ad.adb.shell(START_BLE_ADV)
-    time.sleep(5)
-    client_ad.adb.shell(START_BLE_SCAN)
-    time.sleep(SCAN_DURATION)
-    logging.info("Connection Priority is:{}".format(con_priority))
-    client_ad.adb.shell(
-        'am start -n com.google.bletesting/.ActsCommandExecutor '
-        '--es command GATTCONNECT#{}'.format(con_priority))
-    logging.info(
-        "BLE Connection Successful with Connection Priority:{}".format(
-            con_priority))
diff --git a/src/antlion/test_utils/bt/bt_test_utils.py b/src/antlion/test_utils/bt/bt_test_utils.py
deleted file mode 100644
index 2152c82..0000000
--- a/src/antlion/test_utils/bt/bt_test_utils.py
+++ /dev/null
@@ -1,1922 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import random
-import re
-import string
-import threading
-import time
-try:
-    import pandas as pd
-except ModuleNotFoundError:
-    pass
-from queue import Empty
-from subprocess import call
-from antlion import asserts
-from antlion.test_utils.bt.bt_constants import adv_fail
-from antlion.test_utils.bt.bt_constants import adv_succ
-from antlion.test_utils.bt.bt_constants import batch_scan_not_supported_list
-from antlion.test_utils.bt.bt_constants import batch_scan_result
-from antlion.test_utils.bt.bt_constants import bits_per_samples
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_modes
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_tx_powers
-from antlion.test_utils.bt.bt_constants import bluetooth_a2dp_codec_config_changed
-from antlion.test_utils.bt.bt_constants import bluetooth_off
-from antlion.test_utils.bt.bt_constants import bluetooth_on
-from antlion.test_utils.bt.bt_constants import \
-    bluetooth_profile_connection_state_changed
-from antlion.test_utils.bt.bt_constants import bluetooth_socket_conn_test_uuid
-from antlion.test_utils.bt.bt_constants import bt_default_timeout
-from antlion.test_utils.bt.bt_constants import bt_profile_constants
-from antlion.test_utils.bt.bt_constants import bt_profile_states
-from antlion.test_utils.bt.bt_constants import bt_rfcomm_uuids
-from antlion.test_utils.bt.bt_constants import bt_scan_mode_types
-from antlion.test_utils.bt.bt_constants import btsnoop_last_log_path_on_device
-from antlion.test_utils.bt.bt_constants import btsnoop_log_path_on_device
-from antlion.test_utils.bt.bt_constants import channel_modes
-from antlion.test_utils.bt.bt_constants import codec_types
-from antlion.test_utils.bt.bt_constants import default_bluetooth_socket_timeout_ms
-from antlion.test_utils.bt.bt_constants import default_rfcomm_timeout_ms
-from antlion.test_utils.bt.bt_constants import hid_id_keyboard
-from antlion.test_utils.bt.bt_constants import pairing_variant_passkey_confirmation
-from antlion.test_utils.bt.bt_constants import pan_connect_timeout
-from antlion.test_utils.bt.bt_constants import sample_rates
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_constants import sig_uuid_constants
-from antlion.test_utils.bt.bt_constants import small_timeout
-from antlion.utils import exe_cmd
-
-from antlion import utils
-
-log = logging
-
-advertisements_to_devices = {}
-
-
-class BtTestUtilsError(Exception):
-    pass
-
-
-def _add_android_device_to_dictionary(android_device, profile_list,
-                                      selector_dict):
-    """Adds the AndroidDevice and supported features to the selector dictionary
-
-    Args:
-        android_device: The Android device.
-        profile_list: The list of profiles the Android device supports.
-    """
-    for profile in profile_list:
-        if profile in selector_dict and android_device not in selector_dict[
-                profile]:
-            selector_dict[profile].append(android_device)
-        else:
-            selector_dict[profile] = [android_device]
-
-
-def bluetooth_enabled_check(ad, timeout_sec=5):
-    """Checks if the Bluetooth state is enabled, if not it will attempt to
-    enable it.
-
-    Args:
-        ad: The Android device list to enable Bluetooth on.
-        timeout_sec: number of seconds to wait for toggle to take effect.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    if not ad.droid.bluetoothCheckState():
-        ad.droid.bluetoothToggleState(True)
-        expected_bluetooth_on_event_name = bluetooth_on
-        try:
-            ad.ed.pop_event(expected_bluetooth_on_event_name,
-                            bt_default_timeout)
-        except Empty:
-            ad.log.info("Failed to toggle Bluetooth on(no broadcast received).")
-            # Try one more time to poke at the actual state.
-            if ad.droid.bluetoothCheckState():
-                ad.log.info(".. actual state is ON")
-                return True
-            ad.log.error(".. actual state is OFF")
-            return False
-    end_time = time.time() + timeout_sec
-    while not ad.droid.bluetoothCheckState() and time.time() < end_time:
-        time.sleep(1)
-    return ad.droid.bluetoothCheckState()
-
-
-def check_device_supported_profiles(droid):
-    """Checks for Android device supported profiles.
-
-    Args:
-        droid: The droid object to query.
-
-    Returns:
-        A dictionary of supported profiles.
-    """
-    profile_dict = {}
-    profile_dict['hid'] = droid.bluetoothHidIsReady()
-    profile_dict['hsp'] = droid.bluetoothHspIsReady()
-    profile_dict['a2dp'] = droid.bluetoothA2dpIsReady()
-    profile_dict['avrcp'] = droid.bluetoothAvrcpIsReady()
-    profile_dict['a2dp_sink'] = droid.bluetoothA2dpSinkIsReady()
-    profile_dict['hfp_client'] = droid.bluetoothHfpClientIsReady()
-    profile_dict['pbap_client'] = droid.bluetoothPbapClientIsReady()
-    return profile_dict
-
-
-def cleanup_scanners_and_advertisers(scn_android_device, scn_callback_list,
-                                     adv_android_device, adv_callback_list):
-    """Try to gracefully stop all scanning and advertising instances.
-
-    Args:
-        scn_android_device: The Android device that is actively scanning.
-        scn_callback_list: The scan callback id list that needs to be stopped.
-        adv_android_device: The Android device that is actively advertising.
-        adv_callback_list: The advertise callback id list that needs to be
-            stopped.
-    """
-    scan_droid, scan_ed = scn_android_device.droid, scn_android_device.ed
-    adv_droid = adv_android_device.droid
-    try:
-        for scan_callback in scn_callback_list:
-            scan_droid.bleStopBleScan(scan_callback)
-    except Exception as err:
-        scn_android_device.log.debug(
-            "Failed to stop LE scan... reseting Bluetooth. Error {}".format(
-                err))
-        reset_bluetooth([scn_android_device])
-    try:
-        for adv_callback in adv_callback_list:
-            adv_droid.bleStopBleAdvertising(adv_callback)
-    except Exception as err:
-        adv_android_device.log.debug(
-            "Failed to stop LE advertisement... reseting Bluetooth. Error {}".
-            format(err))
-        reset_bluetooth([adv_android_device])
-
-
-def clear_bonded_devices(ad):
-    """Clear bonded devices from the input Android device.
-
-    Args:
-        ad: the Android device performing the connection.
-    Returns:
-        True if clearing bonded devices was successful, false if unsuccessful.
-    """
-    bonded_device_list = ad.droid.bluetoothGetBondedDevices()
-    while bonded_device_list:
-        device_address = bonded_device_list[0]['address']
-        if not ad.droid.bluetoothUnbond(device_address):
-            log.error("Failed to unbond {} from {}".format(
-                device_address, ad.serial))
-            return False
-        log.info("Successfully unbonded {} from {}".format(
-            device_address, ad.serial))
-        #TODO: wait for BOND_STATE_CHANGED intent instead of waiting
-        time.sleep(1)
-
-        # If device was first connected using LE transport, after bonding it is
-        # accessible through it's LE address, and through it classic address.
-        # Unbonding it will unbond two devices representing different
-        # "addresses". Attempt to unbond such already unbonded devices will
-        # result in bluetoothUnbond returning false.
-        bonded_device_list = ad.droid.bluetoothGetBondedDevices()
-    return True
-
-
-def connect_phone_to_headset(android,
-                             headset,
-                             timeout=bt_default_timeout,
-                             connection_check_period=10):
-    """Connects android phone to bluetooth headset.
-    Headset object must have methods power_on and enter_pairing_mode,
-    and attribute mac_address.
-
-    Args:
-        android: AndroidDevice object with SL4A installed.
-        headset: Object with attribute mac_address and methods power_on and
-            enter_pairing_mode.
-        timeout: Seconds to wait for devices to connect.
-        connection_check_period: how often to check for connection once the
-            SL4A connect RPC has been sent.
-    Returns:
-        connected (bool): True if devices are paired and connected by end of
-        method. False otherwise.
-    """
-    headset_mac_address = headset.mac_address
-    connected = android.droid.audioIsBluetoothA2dpOn()
-    log.info('Devices connected before pair attempt: %s' % connected)
-    if not connected:
-        # Turn on headset and initiate pairing mode.
-        headset.enter_pairing_mode()
-        android.droid.bluetoothStartPairingHelper()
-    start_time = time.time()
-    # If already connected, skip pair and connect attempt.
-    while not connected and (time.time() - start_time < timeout):
-        bonded_info = android.droid.bluetoothGetBondedDevices()
-        connected_info = android.droid.bluetoothGetConnectedDevices()
-        if headset.mac_address not in [info["address"] for info in bonded_info]:
-            # Use SL4A to pair and connect with headset.
-            headset.enter_pairing_mode()
-            android.droid.bluetoothDiscoverAndBond(headset_mac_address)
-        elif headset.mac_address not in [
-                info["address"] for info in connected_info
-        ]:
-            #Device is bonded but not connected
-            android.droid.bluetoothConnectBonded(headset_mac_address)
-        else:
-            #Headset is connected, but A2DP profile is not
-            android.droid.bluetoothA2dpConnect(headset_mac_address)
-        log.info('Waiting for connection...')
-        time.sleep(connection_check_period)
-        # Check for connection.
-        connected = android.droid.audioIsBluetoothA2dpOn()
-    log.info('Devices connected after pair attempt: %s' % connected)
-    return connected
-
-
-def connect_pri_to_sec(pri_ad, sec_ad, profiles_set, attempts=2):
-    """Connects pri droid to secondary droid.
-
-    Args:
-        pri_ad: AndroidDroid initiating connection
-        sec_ad: AndroidDroid accepting connection
-        profiles_set: Set of profiles to be connected
-        attempts: Number of attempts to try until failure.
-
-    Returns:
-        Pass if True
-        Fail if False
-    """
-    device_addr = sec_ad.droid.bluetoothGetLocalAddress()
-    # Allows extra time for the SDP records to be updated.
-    time.sleep(2)
-    curr_attempts = 0
-    while curr_attempts < attempts:
-        log.info("connect_pri_to_sec curr attempt {} total {}".format(
-            curr_attempts, attempts))
-        if _connect_pri_to_sec(pri_ad, sec_ad, profiles_set):
-            return True
-        curr_attempts += 1
-    log.error("connect_pri_to_sec failed to connect after {} attempts".format(
-        attempts))
-    return False
-
-
-def _connect_pri_to_sec(pri_ad, sec_ad, profiles_set):
-    """Connects pri droid to secondary droid.
-
-    Args:
-        pri_ad: AndroidDroid initiating connection.
-        sec_ad: AndroidDroid accepting connection.
-        profiles_set: Set of profiles to be connected.
-
-    Returns:
-        True of connection is successful, false if unsuccessful.
-    """
-    # Check if we support all profiles.
-    supported_profiles = bt_profile_constants.values()
-    for profile in profiles_set:
-        if profile not in supported_profiles:
-            pri_ad.log.info("Profile {} is not supported list {}".format(
-                profile, supported_profiles))
-            return False
-
-    # First check that devices are bonded.
-    paired = False
-    for paired_device in pri_ad.droid.bluetoothGetBondedDevices():
-        if paired_device['address'] == \
-                sec_ad.droid.bluetoothGetLocalAddress():
-            paired = True
-            break
-
-    if not paired:
-        pri_ad.log.error("Not paired to {}".format(sec_ad.serial))
-        return False
-
-    # Now try to connect them, the following call will try to initiate all
-    # connections.
-    pri_ad.droid.bluetoothConnectBonded(sec_ad.droid.bluetoothGetLocalAddress())
-
-    end_time = time.time() + 10
-    profile_connected = set()
-    sec_addr = sec_ad.droid.bluetoothGetLocalAddress()
-    pri_ad.log.info("Profiles to be connected {}".format(profiles_set))
-    # First use APIs to check profile connection state
-    while (time.time() < end_time and
-           not profile_connected.issuperset(profiles_set)):
-        if (bt_profile_constants['headset_client'] not in profile_connected and
-                bt_profile_constants['headset_client'] in profiles_set):
-            if is_hfp_client_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['headset_client'])
-        if (bt_profile_constants['a2dp'] not in profile_connected and
-                bt_profile_constants['a2dp'] in profiles_set):
-            if is_a2dp_src_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['a2dp'])
-        if (bt_profile_constants['a2dp_sink'] not in profile_connected and
-                bt_profile_constants['a2dp_sink'] in profiles_set):
-            if is_a2dp_snk_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['a2dp_sink'])
-        if (bt_profile_constants['map_mce'] not in profile_connected and
-                bt_profile_constants['map_mce'] in profiles_set):
-            if is_map_mce_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['map_mce'])
-        if (bt_profile_constants['map'] not in profile_connected and
-                bt_profile_constants['map'] in profiles_set):
-            if is_map_mse_device_connected(pri_ad, sec_addr):
-                profile_connected.add(bt_profile_constants['map'])
-        time.sleep(0.1)
-    # If APIs fail, try to find the connection broadcast receiver.
-    while not profile_connected.issuperset(profiles_set):
-        try:
-            profile_event = pri_ad.ed.pop_event(
-                bluetooth_profile_connection_state_changed,
-                bt_default_timeout + 10)
-            pri_ad.log.info("Got event {}".format(profile_event))
-        except Exception:
-            pri_ad.log.error("Did not get {} profiles left {}".format(
-                bluetooth_profile_connection_state_changed, profile_connected))
-            return False
-
-        profile = profile_event['data']['profile']
-        state = profile_event['data']['state']
-        device_addr = profile_event['data']['addr']
-        if state == bt_profile_states['connected'] and \
-                device_addr == sec_ad.droid.bluetoothGetLocalAddress():
-            profile_connected.add(profile)
-        pri_ad.log.info(
-            "Profiles connected until now {}".format(profile_connected))
-    # Failure happens inside the while loop. If we came here then we already
-    # connected.
-    return True
-
-
-def determine_max_advertisements(android_device):
-    """Determines programatically how many advertisements the Android device
-    supports.
-
-    Args:
-        android_device: The Android device to determine max advertisements of.
-
-    Returns:
-        The maximum advertisement count.
-    """
-    android_device.log.info(
-        "Determining number of maximum concurrent advertisements...")
-    advertisement_count = 0
-    bt_enabled = False
-    expected_bluetooth_on_event_name = bluetooth_on
-    if not android_device.droid.bluetoothCheckState():
-        android_device.droid.bluetoothToggleState(True)
-    try:
-        android_device.ed.pop_event(expected_bluetooth_on_event_name,
-                                    bt_default_timeout)
-    except Exception:
-        android_device.log.info(
-            "Failed to toggle Bluetooth on(no broadcast received).")
-        # Try one more time to poke at the actual state.
-        if android_device.droid.bluetoothCheckState() is True:
-            android_device.log.info(".. actual state is ON")
-        else:
-            android_device.log.error(
-                "Failed to turn Bluetooth on. Setting default advertisements to 1"
-            )
-            advertisement_count = -1
-            return advertisement_count
-    advertise_callback_list = []
-    advertise_data = android_device.droid.bleBuildAdvertiseData()
-    advertise_settings = android_device.droid.bleBuildAdvertiseSettings()
-    while (True):
-        advertise_callback = android_device.droid.bleGenBleAdvertiseCallback()
-        advertise_callback_list.append(advertise_callback)
-
-        android_device.droid.bleStartBleAdvertising(advertise_callback,
-                                                    advertise_data,
-                                                    advertise_settings)
-
-        regex = "(" + adv_succ.format(
-            advertise_callback) + "|" + adv_fail.format(
-                advertise_callback) + ")"
-        # wait for either success or failure event
-        evt = android_device.ed.pop_events(regex, bt_default_timeout,
-                                           small_timeout)
-        if evt[0]["name"] == adv_succ.format(advertise_callback):
-            advertisement_count += 1
-            android_device.log.info(
-                "Advertisement {} started.".format(advertisement_count))
-        else:
-            error = evt[0]["data"]["Error"]
-            if error == "ADVERTISE_FAILED_TOO_MANY_ADVERTISERS":
-                android_device.log.info(
-                    "Advertisement failed to start. Reached max " +
-                    "advertisements at {}".format(advertisement_count))
-                break
-            else:
-                raise BtTestUtilsError(
-                    "Expected ADVERTISE_FAILED_TOO_MANY_ADVERTISERS," +
-                    " but received bad error code {}".format(error))
-    try:
-        for adv in advertise_callback_list:
-            android_device.droid.bleStopBleAdvertising(adv)
-    except Exception:
-        android_device.log.error(
-            "Failed to stop advertisingment, resetting Bluetooth.")
-        reset_bluetooth([android_device])
-    return advertisement_count
-
-
-def disable_bluetooth(droid):
-    """Disable Bluetooth on input Droid object.
-
-    Args:
-        droid: The droid object to disable Bluetooth on.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    if droid.bluetoothCheckState() is True:
-        droid.bluetoothToggleState(False)
-        if droid.bluetoothCheckState() is True:
-            log.error("Failed to toggle Bluetooth off.")
-            return False
-    return True
-
-
-def disconnect_pri_from_sec(pri_ad, sec_ad, profiles_list):
-    """
-    Disconnect primary from secondary on a specific set of profiles
-    Args:
-        pri_ad - Primary android_device initiating disconnection
-        sec_ad - Secondary android droid (sl4a interface to keep the
-          method signature the same connect_pri_to_sec above)
-        profiles_list - List of profiles we want to disconnect from
-
-    Returns:
-        True on Success
-        False on Failure
-    """
-    # Sanity check to see if all the profiles in the given set is supported
-    supported_profiles = bt_profile_constants.values()
-    for profile in profiles_list:
-        if profile not in supported_profiles:
-            pri_ad.log.info("Profile {} is not in supported list {}".format(
-                profile, supported_profiles))
-            return False
-
-    pri_ad.log.info(pri_ad.droid.bluetoothGetBondedDevices())
-    # Disconnecting on a already disconnected profile is a nop,
-    # so not checking for the connection state
-    try:
-        pri_ad.droid.bluetoothDisconnectConnectedProfile(
-            sec_ad.droid.bluetoothGetLocalAddress(), profiles_list)
-    except Exception as err:
-        pri_ad.log.error(
-            "Exception while trying to disconnect profile(s) {}: {}".format(
-                profiles_list, err))
-        return False
-
-    profile_disconnected = set()
-    pri_ad.log.info("Disconnecting from profiles: {}".format(profiles_list))
-
-    while not profile_disconnected.issuperset(profiles_list):
-        try:
-            profile_event = pri_ad.ed.pop_event(
-                bluetooth_profile_connection_state_changed, bt_default_timeout)
-            pri_ad.log.info("Got event {}".format(profile_event))
-        except Exception as e:
-            pri_ad.log.error(
-                "Did not disconnect from Profiles. Reason {}".format(e))
-            return False
-
-        profile = profile_event['data']['profile']
-        state = profile_event['data']['state']
-        device_addr = profile_event['data']['addr']
-
-        if state == bt_profile_states['disconnected'] and \
-                device_addr == sec_ad.droid.bluetoothGetLocalAddress():
-            profile_disconnected.add(profile)
-        pri_ad.log.info(
-            "Profiles disconnected so far {}".format(profile_disconnected))
-
-    return True
-
-
-def enable_bluetooth(droid, ed):
-    if droid.bluetoothCheckState() is True:
-        return True
-
-    droid.bluetoothToggleState(True)
-    expected_bluetooth_on_event_name = bluetooth_on
-    try:
-        ed.pop_event(expected_bluetooth_on_event_name, bt_default_timeout)
-    except Exception:
-        log.info("Failed to toggle Bluetooth on (no broadcast received)")
-        if droid.bluetoothCheckState() is True:
-            log.info(".. actual state is ON")
-            return True
-        log.info(".. actual state is OFF")
-        return False
-
-    return True
-
-
-def factory_reset_bluetooth(android_devices):
-    """Clears Bluetooth stack of input Android device list.
-
-        Args:
-            android_devices: The Android device list to reset Bluetooth
-
-        Returns:
-            True if successful, false if unsuccessful.
-        """
-    for a in android_devices:
-        droid, ed = a.droid, a.ed
-        a.log.info("Reset state of bluetooth on device.")
-        if not bluetooth_enabled_check(a):
-            return False
-        # TODO: remove device unbond b/79418045
-        # Temporary solution to ensure all devices are unbonded
-        bonded_devices = droid.bluetoothGetBondedDevices()
-        for b in bonded_devices:
-            a.log.info("Removing bond for device {}".format(b['address']))
-            droid.bluetoothUnbond(b['address'])
-
-        droid.bluetoothFactoryReset()
-        wait_for_bluetooth_manager_state(droid)
-        if not enable_bluetooth(droid, ed):
-            return False
-    return True
-
-
-def generate_ble_advertise_objects(droid):
-    """Generate generic LE advertise objects.
-
-    Args:
-        droid: The droid object to generate advertise LE objects from.
-
-    Returns:
-        advertise_callback: The generated advertise callback id.
-        advertise_data: The generated advertise data id.
-        advertise_settings: The generated advertise settings id.
-    """
-    advertise_callback = droid.bleGenBleAdvertiseCallback()
-    advertise_data = droid.bleBuildAdvertiseData()
-    advertise_settings = droid.bleBuildAdvertiseSettings()
-    return advertise_callback, advertise_data, advertise_settings
-
-
-def generate_ble_scan_objects(droid):
-    """Generate generic LE scan objects.
-
-    Args:
-        droid: The droid object to generate LE scan objects from.
-
-    Returns:
-        filter_list: The generated scan filter list id.
-        scan_settings: The generated scan settings id.
-        scan_callback: The generated scan callback id.
-    """
-    filter_list = droid.bleGenFilterList()
-    scan_settings = droid.bleBuildScanSetting()
-    scan_callback = droid.bleGenScanCallback()
-    return filter_list, scan_settings, scan_callback
-
-
-def generate_id_by_size(size,
-                        chars=(string.ascii_lowercase + string.ascii_uppercase +
-                               string.digits)):
-    """Generate random ascii characters of input size and input char types
-
-    Args:
-        size: Input size of string.
-        chars: (Optional) Chars to use in generating a random string.
-
-    Returns:
-        String of random input chars at the input size.
-    """
-    return ''.join(random.choice(chars) for _ in range(size))
-
-
-def get_advanced_droid_list(android_devices):
-    """Add max_advertisement and batch_scan_supported attributes to input
-    Android devices
-
-    This will programatically determine maximum LE advertisements of each
-    input Android device.
-
-    Args:
-        android_devices: The Android devices to setup.
-
-    Returns:
-        List of Android devices with new attribtues.
-    """
-    droid_list = []
-    for a in android_devices:
-        d, e = a.droid, a.ed
-        model = d.getBuildModel()
-        max_advertisements = 1
-        batch_scan_supported = True
-        if model in advertisements_to_devices.keys():
-            max_advertisements = advertisements_to_devices[model]
-        else:
-            max_advertisements = determine_max_advertisements(a)
-            max_tries = 3
-            # Retry to calculate max advertisements
-            while max_advertisements == -1 and max_tries > 0:
-                a.log.info(
-                    "Attempts left to determine max advertisements: {}".format(
-                        max_tries))
-                max_advertisements = determine_max_advertisements(a)
-                max_tries -= 1
-            advertisements_to_devices[model] = max_advertisements
-        if model in batch_scan_not_supported_list:
-            batch_scan_supported = False
-        role = {
-            'droid': d,
-            'ed': e,
-            'max_advertisements': max_advertisements,
-            'batch_scan_supported': batch_scan_supported
-        }
-        droid_list.append(role)
-    return droid_list
-
-
-def get_bluetooth_crash_count(android_device):
-    out = android_device.adb.shell("dumpsys bluetooth_manager")
-    return int(re.search("crashed(.*\d)", out).group(1))
-
-
-def read_otp(ad):
-    """Reads and parses the OTP output to return TX power backoff
-
-    Reads the OTP registers from the phone, parses them to return a
-    dict of TX power backoffs for different power levels
-
-    Args:
-        ad : android device object
-
-    Returns :
-        otp_dict : power backoff dict
-    """
-
-    ad.adb.shell('svc bluetooth disable')
-    time.sleep(2)
-    otp_output = ad.adb.shell('bluetooth_sar_test -r')
-    ad.adb.shell('svc bluetooth enable')
-    time.sleep(2)
-    otp_dict = {
-        "BR": {
-            "10": 0,
-            "9": 0,
-            "8": 0
-        },
-        "EDR": {
-            "10": 0,
-            "9": 0,
-            "8": 0
-        },
-        "BLE": {
-            "10": 0,
-            "9": 0,
-            "8": 0
-        }
-    }
-
-    otp_regex = '\s+\[\s+PL10:\s+(\d+)\s+PL9:\s+(\d+)*\s+PL8:\s+(\d+)\s+\]'
-
-    for key in otp_dict:
-        bank_list = re.findall("{}{}".format(key, otp_regex), otp_output)
-        for bank_tuple in bank_list:
-            if ('0', '0', '0') != bank_tuple:
-                [otp_dict[key]["10"], otp_dict[key]["9"],
-                 otp_dict[key]["8"]] = bank_tuple
-    return otp_dict
-
-
-def get_bt_metric(ad_list,
-                  duration=1,
-                  bqr_tag='Monitoring , Handle:',
-                  tag='',
-                  log_path=False):
-    """ Function to get the bt metric from logcat.
-
-    Captures logcat for the specified duration and returns the bqr results.
-    Takes list of android objects as input. If a single android object is given,
-    converts it into a list.
-
-    Args:
-        ad_list: list of android_device objects
-        duration: time duration (seconds) for which the logcat is parsed
-        bqr_tag: tag of bt metrics
-        tag: tag to be appended to the metrics raw data
-        log_path: path of metrics raw data
-
-    Returns:
-        process_data: dict of process raw data for each android devices
-    """
-
-    # Defining bqr quantites and their regex to extract
-    regex_dict = {
-        "pwlv": "PwLv:\s(\S+)",
-        "rssi": "RSSI:\s[-](\d+)",
-        "rssi_c0": "RSSI_C0:\s[-](\d+)",
-        "rssi_c1": "RSSI_C1:\s[-](\d+)",
-        "txpw_c0": "\sTxPw_C0:\s(-?\d+)",
-        "txpw_c1": "\sTxPw_C1:\s(-?\d+)",
-        "bftx": "BFTx:\s(\w+)",
-        "divtx": "DivTx:\s(\w+)"
-    }
-    metrics_dict = {
-        "rssi": {},
-        "pwlv": {},
-        "rssi_c0": {},
-        "rssi_c1": {},
-        "txpw_c0": {},
-        "txpw_c1": {},
-        "bftx": {},
-        "divtx": {}
-    }
-
-    # Converting a single android device object to list
-    if not isinstance(ad_list, list):
-        ad_list = [ad_list]
-
-    #Time sync with the test machine
-    for ad in ad_list:
-        ad.droid.setTime(int(round(time.time() * 1000)))
-        time.sleep(0.5)
-
-    begin_time = utils.get_current_epoch_time()
-    time.sleep(duration)
-    end_time = utils.get_current_epoch_time()
-
-    for ad in ad_list:
-        bt_rssi_log = ad.cat_adb_log(tag + "_bt_metric", begin_time, end_time)
-
-        # Extracting supporting bqr quantities
-        for metric, regex in regex_dict.items():
-            bqr_metric = []
-            file_bt_log = open(bt_rssi_log, "r")
-            for line in file_bt_log:
-                if bqr_tag in line:
-                    if re.findall(regex, line):
-                        m = re.findall(regex, line)[0].strip(",")
-                        bqr_metric.append(m)
-            metrics_dict[metric][ad.serial] = bqr_metric
-            file_bt_log.close()
-
-        # Formatting and saving the raw data
-        metrics_to_be_formatted = [{
-            "name": "rssi",
-            "averagble": "y"
-        }, {
-            "name": "rssi_c0",
-            "averagble": "y"
-        }, {
-            "name": "rssi_c1",
-            "averagble": "y"
-        }, {
-            "name": "pwlv",
-            "averagble": "n"
-        }, {
-            "name": "txpw_c0",
-            "averagble": "n"
-        }, {
-            "name": "txpw_c1",
-            "averagble": "n"
-        }, {
-            "name": "bftx",
-            "averagble": "n"
-        }, {
-            "name": "divtx",
-            "averagble": "n"
-        }]
-        for metric in metrics_to_be_formatted:
-            if metric["averagble"] == "y":
-                metrics_dict[metric["name"]][ad.serial] = [
-                    (-1) * int(x)
-                    for x in metrics_dict[metric["name"]][ad.serial]
-                ]
-            else:
-                metrics_dict[metric["name"]][ad.serial] = [
-                    int(x, 16) if '0x' in x else int(x, 10)
-                    for x in metrics_dict[metric["name"]][ad.serial]
-                ]
-        # Saving metrics raw data for each attenuation
-        if log_path:
-            output_file_name = ad.serial + "_metrics_raw_data_" + tag + ".csv"
-            output_file = os.path.join(log_path, output_file_name)
-            os.makedirs(log_path, exist_ok=True)
-            df_save_metrics = {}
-            for item in metrics_dict.items():
-                df_save_metrics[item[0]] = next(iter(item[1].items()))[1]
-            MetricsDict_df = pd.DataFrame({key:pd.Series(value) for key, value in df_save_metrics.items()})
-            MetricsDict_df.to_csv(output_file)
-        # Defining the process_data_dict
-        process_data = {
-            "rssi": {},
-            "pwlv": {},
-            "rssi_c0": {},
-            "rssi_c1": {},
-            "txpw_c0": {},
-            "txpw_c1": {},
-            "bftx": {},
-            "divtx": {}
-        }
-
-        # Computing and returning the raw data
-        for metric in metrics_to_be_formatted:
-            if metric["averagble"] == "y":
-                process_data[metric["name"]][ad.serial] = [
-                    x for x in metrics_dict[metric["name"]][ad.serial]
-                    if x != 0 and x != -127
-                ]
-
-                try:
-                    #DOING AVERAGE
-                    process_data[metric["name"]][ad.serial] = round(
-                        sum(metrics_dict[metric["name"]][ad.serial]) /
-                        len(metrics_dict[metric["name"]][ad.serial]), 2)
-                except ZeroDivisionError:
-                    #SETTING VALUE TO 'n/a'
-                    process_data[metric["name"]][ad.serial] = "n/a"
-            else:
-                try:
-                    #GETTING MOST_COMMON_VALUE
-                    process_data[metric["name"]][ad.serial] = max(
-                        metrics_dict[metric["name"]][ad.serial],
-                        key=metrics_dict[metric["name"]][ad.serial].count)
-                except ValueError:
-                    #SETTING VALUE TO 'n/a'
-                    process_data[metric["name"]][ad.serial] = "n/a"
-
-    return process_data
-
-
-def get_bt_rssi(ad, duration=1, processed=True, tag='', log_path=False):
-    """Function to get average bt rssi from logcat.
-
-    This function returns the average RSSI for the given duration. RSSI values are
-    extracted from BQR.
-
-    Args:
-        ad: (list of) android_device object.
-        duration: time duration(seconds) for which logcat is parsed.
-
-    Returns:
-        avg_rssi: average RSSI on each android device for the given duration.
-    """
-    bqr_results = get_bt_metric(ad, duration, tag=tag, log_path=log_path)
-    return bqr_results["rssi"]
-
-
-def enable_bqr(
-    ad_list,
-    bqr_interval=10,
-    bqr_event_mask=15,
-):
-    """Sets up BQR reporting.
-
-       Sets up BQR to report BT metrics at the requested frequency and toggles
-       airplane mode for the bqr settings to take effect.
-
-    Args:
-        ad_list: an android_device or list of android devices.
-    """
-    # Converting a single android device object to list
-    if not isinstance(ad_list, list):
-        ad_list = [ad_list]
-
-    for ad in ad_list:
-        #Setting BQR parameters
-        ad.adb.shell("setprop persist.bluetooth.bqr.event_mask {}".format(
-            bqr_event_mask))
-        ad.adb.shell("setprop persist.bluetooth.bqr.min_interval_ms {}".format(
-            bqr_interval))
-
-        ## Toggle airplane mode
-        ad.droid.connectivityToggleAirplaneMode(True)
-        ad.droid.connectivityToggleAirplaneMode(False)
-
-
-def disable_bqr(ad_list):
-    """Disables BQR reporting.
-
-    Args:
-        ad_list: an android_device or list of android devices.
-    """
-    # Converting a single android device object to list
-    if not isinstance(ad_list, list):
-        ad_list = [ad_list]
-
-    DISABLE_BQR_MASK = 0
-
-    for ad in ad_list:
-        #Disabling BQR
-        ad.adb.shell("setprop persist.bluetooth.bqr.event_mask {}".format(
-            DISABLE_BQR_MASK))
-
-        ## Toggle airplane mode
-        ad.droid.connectivityToggleAirplaneMode(True)
-        ad.droid.connectivityToggleAirplaneMode(False)
-
-
-def get_device_selector_dictionary(android_device_list):
-    """Create a dictionary of Bluetooth features vs Android devices.
-
-    Args:
-        android_device_list: The list of Android devices.
-    Returns:
-        A dictionary of profiles/features to Android devices.
-    """
-    selector_dict = {}
-    for ad in android_device_list:
-        uuids = ad.droid.bluetoothGetLocalUuids()
-
-        for profile, uuid_const in sig_uuid_constants.items():
-            uuid_check = sig_uuid_constants['BASE_UUID'].format(
-                uuid_const).lower()
-            if uuids and uuid_check in uuids:
-                if profile in selector_dict:
-                    selector_dict[profile].append(ad)
-                else:
-                    selector_dict[profile] = [ad]
-
-        # Various services may not be active during BT startup.
-        # If the device can be identified through adb shell pm list features
-        # then try to add them to the appropriate profiles / features.
-
-        # Android TV.
-        if "feature:android.hardware.type.television" in ad.features:
-            ad.log.info("Android TV device found.")
-            supported_profiles = ['AudioSink']
-            _add_android_device_to_dictionary(ad, supported_profiles,
-                                              selector_dict)
-
-        # Android Auto
-        elif "feature:android.hardware.type.automotive" in ad.features:
-            ad.log.info("Android Auto device found.")
-            # Add: AudioSink , A/V_RemoteControl,
-            supported_profiles = [
-                'AudioSink', 'A/V_RemoteControl', 'Message Notification Server'
-            ]
-            _add_android_device_to_dictionary(ad, supported_profiles,
-                                              selector_dict)
-        # Android Wear
-        elif "feature:android.hardware.type.watch" in ad.features:
-            ad.log.info("Android Wear device found.")
-            supported_profiles = []
-            _add_android_device_to_dictionary(ad, supported_profiles,
-                                              selector_dict)
-        # Android Phone
-        elif "feature:android.hardware.telephony" in ad.features:
-            ad.log.info("Android Phone device found.")
-            # Add: AudioSink
-            supported_profiles = [
-                'AudioSource', 'A/V_RemoteControlTarget',
-                'Message Access Server'
-            ]
-            _add_android_device_to_dictionary(ad, supported_profiles,
-                                              selector_dict)
-    return selector_dict
-
-
-def get_mac_address_of_generic_advertisement(scan_ad, adv_ad):
-    """Start generic advertisement and get it's mac address by LE scanning.
-
-    Args:
-        scan_ad: The Android device to use as the scanner.
-        adv_ad: The Android device to use as the advertiser.
-
-    Returns:
-        mac_address: The mac address of the advertisement.
-        advertise_callback: The advertise callback id of the active
-            advertisement.
-    """
-    adv_ad.droid.bleSetAdvertiseDataIncludeDeviceName(True)
-    adv_ad.droid.bleSetAdvertiseSettingsAdvertiseMode(
-        ble_advertise_settings_modes['low_latency'])
-    adv_ad.droid.bleSetAdvertiseSettingsIsConnectable(True)
-    adv_ad.droid.bleSetAdvertiseSettingsTxPowerLevel(
-        ble_advertise_settings_tx_powers['high'])
-    advertise_callback, advertise_data, advertise_settings = (
-        generate_ble_advertise_objects(adv_ad.droid))
-    adv_ad.droid.bleStartBleAdvertising(advertise_callback, advertise_data,
-                                        advertise_settings)
-    try:
-        adv_ad.ed.pop_event(adv_succ.format(advertise_callback),
-                            bt_default_timeout)
-    except Empty as err:
-        raise BtTestUtilsError(
-            "Advertiser did not start successfully {}".format(err))
-    filter_list = scan_ad.droid.bleGenFilterList()
-    scan_settings = scan_ad.droid.bleBuildScanSetting()
-    scan_callback = scan_ad.droid.bleGenScanCallback()
-    scan_ad.droid.bleSetScanFilterDeviceName(
-        adv_ad.droid.bluetoothGetLocalName())
-    scan_ad.droid.bleBuildScanFilter(filter_list)
-    scan_ad.droid.bleStartBleScan(filter_list, scan_settings, scan_callback)
-    try:
-        event = scan_ad.ed.pop_event(
-            "BleScan{}onScanResults".format(scan_callback), bt_default_timeout)
-    except Empty as err:
-        raise BtTestUtilsError(
-            "Scanner did not find advertisement {}".format(err))
-    mac_address = event['data']['Result']['deviceInfo']['address']
-    return mac_address, advertise_callback, scan_callback
-
-
-def hid_device_send_key_data_report(host_id, device_ad, key, interval=1):
-    """Send a HID report simulating a 1-second keyboard press from host_ad to
-    device_ad
-
-    Args:
-        host_id: the Bluetooth MAC address or name of the HID host
-        device_ad: HID device
-        key: the key we want to send
-        interval: the interval between key press and key release
-    """
-    device_ad.droid.bluetoothHidDeviceSendReport(host_id, hid_id_keyboard,
-                                                 hid_keyboard_report(key))
-    time.sleep(interval)
-    device_ad.droid.bluetoothHidDeviceSendReport(host_id, hid_id_keyboard,
-                                                 hid_keyboard_report("00"))
-
-
-def hid_keyboard_report(key, modifier="00"):
-    """Get the HID keyboard report for the given key
-
-    Args:
-        key: the key we want
-        modifier: HID keyboard modifier bytes
-    Returns:
-        The byte array for the HID report.
-    """
-    return str(
-        bytearray.fromhex(" ".join(
-            [modifier, "00", key, "00", "00", "00", "00", "00"])), "utf-8")
-
-
-def is_a2dp_connected(sink, source):
-    """
-    Convenience Function to see if the 2 devices are connected on
-    A2dp.
-    Args:
-        sink:       Audio Sink
-        source:     Audio Source
-    Returns:
-        True if Connected
-        False if Not connected
-    """
-
-    devices = sink.droid.bluetoothA2dpSinkGetConnectedDevices()
-    for device in devices:
-        sink.log.info("A2dp Connected device {}".format(device["name"]))
-        if (device["address"] == source.droid.bluetoothGetLocalAddress()):
-            return True
-    return False
-
-
-def is_a2dp_snk_device_connected(ad, addr):
-    """Determines if an AndroidDevice has A2DP snk connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothA2dpSinkGetConnectedDevices()
-    ad.log.info("Connected A2DP Sink devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def is_a2dp_src_device_connected(ad, addr):
-    """Determines if an AndroidDevice has A2DP connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothA2dpGetConnectedDevices()
-    ad.log.info("Connected A2DP Source devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def is_hfp_client_device_connected(ad, addr):
-    """Determines if an AndroidDevice has HFP connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothHfpClientGetConnectedDevices()
-    ad.log.info("Connected HFP Client devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def is_map_mce_device_connected(ad, addr):
-    """Determines if an AndroidDevice has MAP MCE connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothMapClientGetConnectedDevices()
-    ad.log.info("Connected MAP MCE devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def is_map_mse_device_connected(ad, addr):
-    """Determines if an AndroidDevice has MAP MSE connectivity to input address
-
-    Args:
-        ad: the Android device
-        addr: the address that's expected
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    devices = ad.droid.bluetoothMapGetConnectedDevices()
-    ad.log.info("Connected MAP MSE devices: {}".format(devices))
-    if addr in {d['address'] for d in devices}:
-        return True
-    return False
-
-
-def kill_bluetooth_process(ad):
-    """Kill Bluetooth process on Android device.
-
-    Args:
-        ad: Android device to kill BT process on.
-    """
-    ad.log.info("Killing Bluetooth process.")
-    pid = ad.adb.shell(
-        "ps | grep com.android.bluetooth | awk '{print $2}'").decode('ascii')
-    call(["adb -s " + ad.serial + " shell kill " + pid], shell=True)
-
-
-def log_energy_info(android_devices, state):
-    """Logs energy info of input Android devices.
-
-    Args:
-        android_devices: input Android device list to log energy info from.
-        state: the input state to log. Usually 'Start' or 'Stop' for logging.
-
-    Returns:
-        A logging string of the Bluetooth energy info reported.
-    """
-    return_string = "{} Energy info collection:\n".format(state)
-    # Bug: b/31966929
-    return return_string
-
-
-def orchestrate_and_verify_pan_connection(pan_dut, panu_dut):
-    """Setups up a PAN conenction between two android devices.
-
-    Args:
-        pan_dut: the Android device providing tethering services
-        panu_dut: the Android device using the internet connection from the
-            pan_dut
-    Returns:
-        True if PAN connection and verification is successful,
-        false if unsuccessful.
-    """
-    pan_dut.droid.bluetoothStartConnectionStateChangeMonitor("")
-    panu_dut.droid.bluetoothStartConnectionStateChangeMonitor("")
-    if not bluetooth_enabled_check(panu_dut):
-        return False
-    if not bluetooth_enabled_check(pan_dut):
-        return False
-    pan_dut.droid.bluetoothPanSetBluetoothTethering(True)
-    if not (pair_pri_to_sec(pan_dut, panu_dut)):
-        return False
-    if not pan_dut.droid.bluetoothPanIsTetheringOn():
-        pan_dut.log.error("Failed to enable Bluetooth tethering.")
-        return False
-    # Magic sleep needed to give the stack time in between bonding and
-    # connecting the PAN profile.
-    time.sleep(pan_connect_timeout)
-    panu_dut.droid.bluetoothConnectBonded(
-        pan_dut.droid.bluetoothGetLocalAddress())
-    return True
-
-
-def orchestrate_bluetooth_socket_connection(
-        client_ad,
-        server_ad,
-        accept_timeout_ms=default_bluetooth_socket_timeout_ms,
-        uuid=None):
-    """Sets up the Bluetooth Socket connection between two Android devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    server_ad.droid.bluetoothStartPairingHelper()
-    client_ad.droid.bluetoothStartPairingHelper()
-
-    server_ad.droid.bluetoothSocketConnBeginAcceptThreadUuid(
-        (bluetooth_socket_conn_test_uuid if uuid is None else uuid),
-        accept_timeout_ms)
-    client_ad.droid.bluetoothSocketConnBeginConnectThreadUuid(
-        server_ad.droid.bluetoothGetLocalAddress(),
-        (bluetooth_socket_conn_test_uuid if uuid is None else uuid))
-
-    end_time = time.time() + bt_default_timeout
-    result = False
-    test_result = True
-    while time.time() < end_time:
-        if len(client_ad.droid.bluetoothSocketConnActiveConnections()) > 0:
-            test_result = True
-            client_ad.log.info("Bluetooth socket Client Connection Active")
-            break
-        else:
-            test_result = False
-        time.sleep(1)
-    if not test_result:
-        client_ad.log.error("Failed to establish a Bluetooth socket connection")
-        return False
-    return True
-
-
-def orchestrate_rfcomm_connection(client_ad,
-                                  server_ad,
-                                  accept_timeout_ms=default_rfcomm_timeout_ms,
-                                  uuid=None):
-    """Sets up the RFCOMM connection between two Android devices.
-
-    Args:
-        client_ad: the Android device performing the connection.
-        server_ad: the Android device accepting the connection.
-    Returns:
-        True if connection was successful, false if unsuccessful.
-    """
-    result = orchestrate_bluetooth_socket_connection(
-        client_ad, server_ad, accept_timeout_ms,
-        (bt_rfcomm_uuids['default_uuid'] if uuid is None else uuid))
-
-    return result
-
-
-def pair_pri_to_sec(pri_ad, sec_ad, attempts=2, auto_confirm=True):
-    """Pairs pri droid to secondary droid.
-
-    Args:
-        pri_ad: Android device initiating connection
-        sec_ad: Android device accepting connection
-        attempts: Number of attempts to try until failure.
-        auto_confirm: Auto confirm passkey match for both devices
-
-    Returns:
-        Pass if True
-        Fail if False
-    """
-    pri_ad.droid.bluetoothStartConnectionStateChangeMonitor(
-        sec_ad.droid.bluetoothGetLocalAddress())
-    curr_attempts = 0
-    while curr_attempts < attempts:
-        if _pair_pri_to_sec(pri_ad, sec_ad, auto_confirm):
-            return True
-        # Wait 2 seconds before unbound
-        time.sleep(2)
-        if not clear_bonded_devices(pri_ad):
-            log.error(
-                "Failed to clear bond for primary device at attempt {}".format(
-                    str(curr_attempts)))
-            return False
-        if not clear_bonded_devices(sec_ad):
-            log.error("Failed to clear bond for secondary device at attempt {}".
-                      format(str(curr_attempts)))
-            return False
-        # Wait 2 seconds after unbound
-        time.sleep(2)
-        curr_attempts += 1
-    log.error("pair_pri_to_sec failed to connect after {} attempts".format(
-        str(attempts)))
-    return False
-
-
-def _pair_pri_to_sec(pri_ad, sec_ad, auto_confirm):
-    # Enable discovery on sec_ad so that pri_ad can find it.
-    # The timeout here is based on how much time it would take for two devices
-    # to pair with each other once pri_ad starts seeing devices.
-    pri_droid = pri_ad.droid
-    sec_droid = sec_ad.droid
-    pri_ad.ed.clear_all_events()
-    sec_ad.ed.clear_all_events()
-    log.info("Bonding device {} to {}".format(
-        pri_droid.bluetoothGetLocalAddress(),
-        sec_droid.bluetoothGetLocalAddress()))
-    sec_droid.bluetoothMakeDiscoverable(bt_default_timeout)
-    target_address = sec_droid.bluetoothGetLocalAddress()
-    log.debug("Starting paring helper on each device")
-    pri_droid.bluetoothStartPairingHelper(auto_confirm)
-    sec_droid.bluetoothStartPairingHelper(auto_confirm)
-    pri_ad.log.info("Primary device starting discovery and executing bond")
-    result = pri_droid.bluetoothDiscoverAndBond(target_address)
-    if not auto_confirm:
-        if not _wait_for_passkey_match(pri_ad, sec_ad):
-            return False
-    # Loop until we have bonded successfully or timeout.
-    end_time = time.time() + bt_default_timeout
-    pri_ad.log.info("Verifying devices are bonded")
-    while time.time() < end_time:
-        bonded_devices = pri_droid.bluetoothGetBondedDevices()
-        bonded = False
-        for d in bonded_devices:
-            if d['address'] == target_address:
-                pri_ad.log.info("Successfully bonded to device")
-                return True
-        time.sleep(0.1)
-    # Timed out trying to bond.
-    pri_ad.log.info("Failed to bond devices.")
-    return False
-
-
-def reset_bluetooth(android_devices):
-    """Resets Bluetooth state of input Android device list.
-
-    Args:
-        android_devices: The Android device list to reset Bluetooth state on.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    for a in android_devices:
-        droid, ed = a.droid, a.ed
-        a.log.info("Reset state of bluetooth on device.")
-        if droid.bluetoothCheckState() is True:
-            droid.bluetoothToggleState(False)
-            expected_bluetooth_off_event_name = bluetooth_off
-            try:
-                ed.pop_event(expected_bluetooth_off_event_name,
-                             bt_default_timeout)
-            except Exception:
-                a.log.error("Failed to toggle Bluetooth off.")
-                return False
-        # temp sleep for b/17723234
-        time.sleep(3)
-        if not bluetooth_enabled_check(a):
-            return False
-    return True
-
-
-def scan_and_verify_n_advertisements(scn_ad, max_advertisements):
-    """Verify that input number of advertisements can be found from the scanning
-    Android device.
-
-    Args:
-        scn_ad: The Android device to start LE scanning on.
-        max_advertisements: The number of advertisements the scanner expects to
-        find.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    test_result = False
-    address_list = []
-    filter_list = scn_ad.droid.bleGenFilterList()
-    scn_ad.droid.bleBuildScanFilter(filter_list)
-    scan_settings = scn_ad.droid.bleBuildScanSetting()
-    scan_callback = scn_ad.droid.bleGenScanCallback()
-    scn_ad.droid.bleStartBleScan(filter_list, scan_settings, scan_callback)
-    start_time = time.time()
-    while (start_time + bt_default_timeout) > time.time():
-        event = None
-        try:
-            event = scn_ad.ed.pop_event(scan_result.format(scan_callback),
-                                        bt_default_timeout)
-        except Empty as error:
-            raise BtTestUtilsError(
-                "Failed to find scan event: {}".format(error))
-        address = event['data']['Result']['deviceInfo']['address']
-        if address not in address_list:
-            address_list.append(address)
-        if len(address_list) == max_advertisements:
-            test_result = True
-            break
-    scn_ad.droid.bleStopBleScan(scan_callback)
-    return test_result
-
-
-def set_bluetooth_codec(android_device,
-                        codec_type,
-                        sample_rate,
-                        bits_per_sample,
-                        channel_mode,
-                        codec_specific_1=0):
-    """Sets the A2DP codec configuration on the AndroidDevice.
-
-    Args:
-        android_device (antlion.controllers.android_device.AndroidDevice): the
-            android device for which to switch the codec.
-        codec_type (str): the desired codec type. Must be a key in
-            bt_constants.codec_types.
-        sample_rate (str): the desired sample rate. Must be a key in
-            bt_constants.sample_rates.
-        bits_per_sample (str): the desired bits per sample. Must be a key in
-            bt_constants.bits_per_samples.
-        channel_mode (str): the desired channel mode. Must be a key in
-            bt_constants.channel_modes.
-        codec_specific_1 (int): the desired bit rate (quality) for LDAC codec.
-    Returns:
-        bool: True if the codec config was successfully changed to the desired
-            values. Else False.
-    """
-    message = ("Set Android Device A2DP Bluetooth codec configuration:\n"
-               "\tCodec: {codec_type}\n"
-               "\tSample Rate: {sample_rate}\n"
-               "\tBits per Sample: {bits_per_sample}\n"
-               "\tChannel Mode: {channel_mode}".format(
-                   codec_type=codec_type,
-                   sample_rate=sample_rate,
-                   bits_per_sample=bits_per_sample,
-                   channel_mode=channel_mode))
-    android_device.log.info(message)
-
-    # Send SL4A command
-    droid, ed = android_device.droid, android_device.ed
-    if not droid.bluetoothA2dpSetCodecConfigPreference(
-            codec_types[codec_type], sample_rates[str(sample_rate)],
-            bits_per_samples[str(bits_per_sample)], channel_modes[channel_mode],
-            codec_specific_1):
-        android_device.log.warning("SL4A command returned False. Codec was not "
-                                   "changed.")
-    else:
-        try:
-            ed.pop_event(bluetooth_a2dp_codec_config_changed,
-                         bt_default_timeout)
-        except Exception:
-            android_device.log.warning("SL4A event not registered. Codec "
-                                       "may not have been changed.")
-
-    # Validate codec value through ADB
-    # TODO (aidanhb): validate codec more robustly using SL4A
-    command = "dumpsys bluetooth_manager | grep -i 'current codec'"
-    out = android_device.adb.shell(command)
-    split_out = out.split(": ")
-    if len(split_out) != 2:
-        android_device.log.warning("Could not verify codec config change "
-                                   "through ADB.")
-    elif split_out[1].strip().upper() != codec_type:
-        android_device.log.error("Codec config was not changed.\n"
-                                 "\tExpected codec: {exp}\n"
-                                 "\tActual codec: {act}".format(
-                                     exp=codec_type, act=split_out[1].strip()))
-        return False
-    android_device.log.info("Bluetooth codec successfully changed.")
-    return True
-
-
-def set_bt_scan_mode(ad, scan_mode_value):
-    """Set Android device's Bluetooth scan mode.
-
-    Args:
-        ad: The Android device to set the scan mode on.
-        scan_mode_value: The value to set the scan mode to.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    droid, ed = ad.droid, ad.ed
-    if scan_mode_value == bt_scan_mode_types['state_off']:
-        disable_bluetooth(droid)
-        scan_mode = droid.bluetoothGetScanMode()
-        reset_bluetooth([ad])
-        if scan_mode != scan_mode_value:
-            return False
-    elif scan_mode_value == bt_scan_mode_types['none']:
-        droid.bluetoothMakeUndiscoverable()
-        scan_mode = droid.bluetoothGetScanMode()
-        if scan_mode != scan_mode_value:
-            return False
-    elif scan_mode_value == bt_scan_mode_types['connectable']:
-        droid.bluetoothMakeUndiscoverable()
-        droid.bluetoothMakeConnectable()
-        scan_mode = droid.bluetoothGetScanMode()
-        if scan_mode != scan_mode_value:
-            return False
-    elif (scan_mode_value == bt_scan_mode_types['connectable_discoverable']):
-        droid.bluetoothMakeDiscoverable()
-        scan_mode = droid.bluetoothGetScanMode()
-        if scan_mode != scan_mode_value:
-            return False
-    else:
-        # invalid scan mode
-        return False
-    return True
-
-
-def set_device_name(droid, name):
-    """Set and check Bluetooth local name on input droid object.
-
-    Args:
-        droid: Droid object to set local name on.
-        name: the Bluetooth local name to set.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    droid.bluetoothSetLocalName(name)
-    time.sleep(2)
-    droid_name = droid.bluetoothGetLocalName()
-    if droid_name != name:
-        return False
-    return True
-
-
-def set_profile_priority(host_ad, client_ad, profiles, priority):
-    """Sets the priority of said profile(s) on host_ad for client_ad"""
-    for profile in profiles:
-        host_ad.log.info("Profile {} on {} for {} set to priority {}".format(
-            profile, host_ad.droid.bluetoothGetLocalName(),
-            client_ad.droid.bluetoothGetLocalAddress(), priority.value))
-        if bt_profile_constants['a2dp_sink'] == profile:
-            host_ad.droid.bluetoothA2dpSinkSetPriority(
-                client_ad.droid.bluetoothGetLocalAddress(), priority.value)
-        elif bt_profile_constants['headset_client'] == profile:
-            host_ad.droid.bluetoothHfpClientSetPriority(
-                client_ad.droid.bluetoothGetLocalAddress(), priority.value)
-        elif bt_profile_constants['pbap_client'] == profile:
-            host_ad.droid.bluetoothPbapClientSetPriority(
-                client_ad.droid.bluetoothGetLocalAddress(), priority.value)
-        else:
-            host_ad.log.error(
-                "Profile {} not yet supported for priority settings".format(
-                    profile))
-
-
-def setup_multiple_devices_for_bt_test(android_devices):
-    """A common setup routine for Bluetooth on input Android device list.
-
-    Things this function sets up:
-    1. Resets Bluetooth
-    2. Set Bluetooth local name to random string of size 4
-    3. Disable BLE background scanning.
-    4. Enable Bluetooth snoop logging.
-
-    Args:
-        android_devices: Android device list to setup Bluetooth on.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    log.info("Setting up Android Devices")
-    # TODO: Temp fix for an selinux error.
-    for ad in android_devices:
-        ad.adb.shell("setenforce 0")
-    threads = []
-    try:
-        for a in android_devices:
-            thread = threading.Thread(target=factory_reset_bluetooth,
-                                      args=([[a]]))
-            threads.append(thread)
-            thread.start()
-        for t in threads:
-            t.join()
-
-        for a in android_devices:
-            d = a.droid
-            # TODO: Create specific RPC command to instantiate
-            # BluetoothConnectionFacade. This is just a workaround.
-            d.bluetoothStartConnectionStateChangeMonitor("")
-            setup_result = d.bluetoothSetLocalName(generate_id_by_size(4))
-            if not setup_result:
-                a.log.error("Failed to set device name.")
-                return setup_result
-            d.bluetoothDisableBLE()
-            utils.set_location_service(a, True)
-            bonded_devices = d.bluetoothGetBondedDevices()
-            for b in bonded_devices:
-                a.log.info("Removing bond for device {}".format(b['address']))
-                d.bluetoothUnbond(b['address'])
-        for a in android_devices:
-            a.adb.shell("setprop persist.bluetooth.btsnooplogmode full")
-            getprop_result = a.adb.shell(
-                "getprop persist.bluetooth.btsnooplogmode") == "full"
-            if not getprop_result:
-                a.log.warning("Failed to enable Bluetooth Hci Snoop Logging.")
-    except Exception as err:
-        log.error("Something went wrong in multi device setup: {}".format(err))
-        return False
-    return setup_result
-
-
-def setup_n_advertisements(adv_ad, num_advertisements):
-    """Setup input number of advertisements on input Android device.
-
-    Args:
-        adv_ad: The Android device to start LE advertisements on.
-        num_advertisements: The number of advertisements to start.
-
-    Returns:
-        advertise_callback_list: List of advertisement callback ids.
-    """
-    adv_ad.droid.bleSetAdvertiseSettingsAdvertiseMode(
-        ble_advertise_settings_modes['low_latency'])
-    advertise_data = adv_ad.droid.bleBuildAdvertiseData()
-    advertise_settings = adv_ad.droid.bleBuildAdvertiseSettings()
-    advertise_callback_list = []
-    for i in range(num_advertisements):
-        advertise_callback = adv_ad.droid.bleGenBleAdvertiseCallback()
-        advertise_callback_list.append(advertise_callback)
-        adv_ad.droid.bleStartBleAdvertising(advertise_callback, advertise_data,
-                                            advertise_settings)
-        try:
-            adv_ad.ed.pop_event(adv_succ.format(advertise_callback),
-                                bt_default_timeout)
-            adv_ad.log.info("Advertisement {} started.".format(i + 1))
-        except Empty as error:
-            adv_ad.log.error("Advertisement {} failed to start.".format(i + 1))
-            raise BtTestUtilsError(
-                "Test failed with Empty error: {}".format(error))
-    return advertise_callback_list
-
-
-def take_btsnoop_log(ad, testcase, testname):
-    """Grabs the btsnoop_hci log on a device and stores it in the log directory
-    of the test class.
-
-    If you want grab the btsnoop_hci log, call this function with android_device
-    objects in on_fail. Bug report takes a relative long time to take, so use
-    this cautiously.
-
-    Args:
-        ad: The android_device instance to take bugreport on.
-        testcase: Name of the test calss that triggered this snoop log.
-        testname: Name of the test case that triggered this bug report.
-    """
-    testname = "".join(x for x in testname if x.isalnum())
-    serial = ad.serial
-    device_model = ad.droid.getBuildModel()
-    device_model = device_model.replace(" ", "")
-    out_name = ','.join((testname, device_model, serial))
-    snoop_path = os.path.join(ad.device_log_path, 'BluetoothSnoopLogs')
-    os.makedirs(snoop_path, exist_ok=True)
-    cmd = ''.join(("adb -s ", serial, " pull ", btsnoop_log_path_on_device, " ",
-                   snoop_path + '/' + out_name, ".btsnoop_hci.log"))
-    exe_cmd(cmd)
-    try:
-        cmd = ''.join(
-            ("adb -s ", serial, " pull ", btsnoop_last_log_path_on_device, " ",
-             snoop_path + '/' + out_name, ".btsnoop_hci.log.last"))
-        exe_cmd(cmd)
-    except Exception as err:
-        testcase.log.info(
-            "File does not exist {}".format(btsnoop_last_log_path_on_device))
-
-
-def take_btsnoop_logs(android_devices, testcase, testname):
-    """Pull btsnoop logs from an input list of android devices.
-
-    Args:
-        android_devices: the list of Android devices to pull btsnoop logs from.
-        testcase: Name of the test calss that triggered this snoop log.
-        testname: Name of the test case that triggered this bug report.
-    """
-    for a in android_devices:
-        take_btsnoop_log(a, testcase, testname)
-
-
-def teardown_n_advertisements(adv_ad, num_advertisements,
-                              advertise_callback_list):
-    """Stop input number of advertisements on input Android device.
-
-    Args:
-        adv_ad: The Android device to stop LE advertisements on.
-        num_advertisements: The number of advertisements to stop.
-        advertise_callback_list: The list of advertisement callbacks to stop.
-
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    for n in range(num_advertisements):
-        adv_ad.droid.bleStopBleAdvertising(advertise_callback_list[n])
-    return True
-
-
-def verify_server_and_client_connected(client_ad, server_ad, log=True):
-    """Verify that input server and client Android devices are connected.
-
-    This code is under the assumption that there will only be
-    a single connection.
-
-    Args:
-        client_ad: the Android device to check number of active connections.
-        server_ad: the Android device to check number of active connections.
-
-    Returns:
-        True both server and client have at least 1 active connection,
-        false if unsuccessful.
-    """
-    test_result = True
-    if len(server_ad.droid.bluetoothSocketConnActiveConnections()) == 0:
-        if log:
-            server_ad.log.error("No socket connections found on server.")
-        test_result = False
-    if len(client_ad.droid.bluetoothSocketConnActiveConnections()) == 0:
-        if log:
-            client_ad.log.error("No socket connections found on client.")
-        test_result = False
-    return test_result
-
-
-def wait_for_bluetooth_manager_state(droid,
-                                     state=None,
-                                     timeout=10,
-                                     threshold=5):
-    """ Waits for BlueTooth normalized state or normalized explicit state
-    args:
-        droid: droid device object
-        state: expected BlueTooth state
-        timeout: max timeout threshold
-        threshold: list len of bt state
-    Returns:
-        True if successful, false if unsuccessful.
-    """
-    all_states = []
-    get_state = lambda: droid.bluetoothGetLeState()
-    start_time = time.time()
-    while time.time() < start_time + timeout:
-        all_states.append(get_state())
-        if len(all_states) >= threshold:
-            # for any normalized state
-            if state is None:
-                if len(set(all_states[-threshold:])) == 1:
-                    log.info("State normalized {}".format(
-                        set(all_states[-threshold:])))
-                    return True
-            else:
-                # explicit check against normalized state
-                if set([state]).issubset(all_states[-threshold:]):
-                    return True
-        time.sleep(0.5)
-    log.error(
-        "Bluetooth state fails to normalize" if state is None else
-        "Failed to match bluetooth state, current state {} expected state {}".
-        format(get_state(), state))
-    return False
-
-
-def _wait_for_passkey_match(pri_ad, sec_ad):
-    pri_pin, sec_pin = -1, 1
-    pri_variant, sec_variant = -1, 1
-    pri_pairing_req, sec_pairing_req = None, None
-    try:
-        pri_pairing_req = pri_ad.ed.pop_event(
-            event_name="BluetoothActionPairingRequest",
-            timeout=bt_default_timeout)
-        pri_variant = pri_pairing_req["data"]["PairingVariant"]
-        pri_pin = pri_pairing_req["data"]["Pin"]
-        pri_ad.log.info("Primary device received Pin: {}, Variant: {}".format(
-            pri_pin, pri_variant))
-        sec_pairing_req = sec_ad.ed.pop_event(
-            event_name="BluetoothActionPairingRequest",
-            timeout=bt_default_timeout)
-        sec_variant = sec_pairing_req["data"]["PairingVariant"]
-        sec_pin = sec_pairing_req["data"]["Pin"]
-        sec_ad.log.info("Secondary device received Pin: {}, Variant: {}".format(
-            sec_pin, sec_variant))
-    except Empty as err:
-        log.error("Wait for pin error: {}".format(err))
-        log.error("Pairing request state, Primary: {}, Secondary: {}".format(
-            pri_pairing_req, sec_pairing_req))
-        return False
-    if pri_variant == sec_variant == pairing_variant_passkey_confirmation:
-        confirmation = pri_pin == sec_pin
-        if confirmation:
-            log.info("Pairing code matched, accepting connection")
-        else:
-            log.info("Pairing code mismatched, rejecting connection")
-        pri_ad.droid.eventPost("BluetoothActionPairingRequestUserConfirm",
-                               str(confirmation))
-        sec_ad.droid.eventPost("BluetoothActionPairingRequestUserConfirm",
-                               str(confirmation))
-        if not confirmation:
-            return False
-    elif pri_variant != sec_variant:
-        log.error("Pairing variant mismatched, abort connection")
-        return False
-    return True
-
-
-def write_read_verify_data(client_ad, server_ad, msg, binary=False):
-    """Verify that the client wrote data to the server Android device correctly.
-
-    Args:
-        client_ad: the Android device to perform the write.
-        server_ad: the Android device to read the data written.
-        msg: the message to write.
-        binary: if the msg arg is binary or not.
-
-    Returns:
-        True if the data written matches the data read, false if not.
-    """
-    client_ad.log.info("Write message.")
-    try:
-        if binary:
-            client_ad.droid.bluetoothSocketConnWriteBinary(msg)
-        else:
-            client_ad.droid.bluetoothSocketConnWrite(msg)
-    except Exception as err:
-        client_ad.log.error("Failed to write data: {}".format(err))
-        return False
-    server_ad.log.info("Read message.")
-    try:
-        if binary:
-            read_msg = server_ad.droid.bluetoothSocketConnReadBinary().rstrip(
-                "\r\n")
-        else:
-            read_msg = server_ad.droid.bluetoothSocketConnRead()
-    except Exception as err:
-        server_ad.log.error("Failed to read data: {}".format(err))
-        return False
-    log.info("Verify message.")
-    if msg != read_msg:
-        log.error("Mismatch! Read: {}, Expected: {}".format(read_msg, msg))
-        return False
-    return True
-
-
-class MediaControlOverSl4a(object):
-    """Media control using sl4a facade for general purpose.
-
-    """
-
-    def __init__(self, android_device, music_file):
-        """Initialize the media_control class.
-
-        Args:
-            android_dut: android_device object
-            music_file: location of the music file
-        """
-        self.android_device = android_device
-        self.music_file = music_file
-
-    def play(self):
-        """Play media.
-
-        """
-        self.android_device.droid.mediaPlayOpen('file://%s' % self.music_file,
-                                                'default', True)
-        playing = self.android_device.droid.mediaIsPlaying()
-        asserts.assert_true(playing,
-                            'Failed to play music %s' % self.music_file)
-
-    def pause(self):
-        """Pause media.
-
-        """
-        self.android_device.droid.mediaPlayPause('default')
-        paused = not self.android_device.droid.mediaIsPlaying()
-        asserts.assert_true(paused,
-                            'Failed to pause music %s' % self.music_file)
-
-    def resume(self):
-        """Resume media.
-
-        """
-        self.android_device.droid.mediaPlayStart('default')
-        playing = self.android_device.droid.mediaIsPlaying()
-        asserts.assert_true(playing,
-                            'Failed to play music %s' % self.music_file)
-
-    def stop(self):
-        """Stop media.
-
-        """
-        self.android_device.droid.mediaPlayStop('default')
-        stopped = not self.android_device.droid.mediaIsPlaying()
-        asserts.assert_true(stopped,
-                            'Failed to stop music %s' % self.music_file)
diff --git a/src/antlion/test_utils/bt/bta_lib.py b/src/antlion/test_utils/bt/bta_lib.py
deleted file mode 100644
index 60e0980..0000000
--- a/src/antlion/test_utils/bt/bta_lib.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Bluetooth adapter libraries
-"""
-
-from antlion.test_utils.bt.bt_constants import bt_scan_mode_types
-from antlion.test_utils.bt.bt_test_utils import set_bt_scan_mode
-
-import pprint
-
-
-class BtaLib():
-    def __init__(self, log, dut, target_mac_address=None):
-        self.advertisement_list = []
-        self.dut = dut
-        self.log = log
-        self.target_mac_addr = target_mac_address
-
-    def set_target_mac_addr(self, mac_addr):
-        self.target_mac_addr = mac_addr
-
-    def set_scan_mode(self, scan_mode):
-        """Set the Scan mode of the Bluetooth Adapter"""
-        set_bt_scan_mode(self.dut, bt_scan_mode_types[scan_mode])
-
-    def set_device_name(self, line):
-        """Set Bluetooth Adapter Name"""
-        self.dut.droid.bluetoothSetLocalName(line)
-
-    def enable(self):
-        """Enable Bluetooth Adapter"""
-        self.dut.droid.bluetoothToggleState(True)
-
-    def disable(self):
-        """Disable Bluetooth Adapter"""
-        self.dut.droid.bluetoothToggleState(False)
-
-    def init_bond(self):
-        """Initiate bond to PTS device"""
-        self.dut.droid.bluetoothDiscoverAndBond(self.target_mac_addr)
-
-    def start_discovery(self):
-        """Start BR/EDR Discovery"""
-        self.dut.droid.bluetoothStartDiscovery()
-
-    def stop_discovery(self):
-        """Stop BR/EDR Discovery"""
-        self.dut.droid.bluetoothCancelDiscovery()
-
-    def get_discovered_devices(self):
-        """Get Discovered Br/EDR Devices"""
-        if self.dut.droid.bluetoothIsDiscovering():
-            self.dut.droid.bluetoothCancelDiscovery()
-        self.log.info(
-            pprint.pformat(self.dut.droid.bluetoothGetDiscoveredDevices()))
-
-    def bond(self):
-        """Bond to PTS device"""
-        self.dut.droid.bluetoothBond(self.target_mac_addr)
-
-    def disconnect(self):
-        """BTA disconnect"""
-        self.dut.droid.bluetoothDisconnectConnected(self.target_mac_addr)
-
-    def unbond(self):
-        """Unbond from PTS device"""
-        self.dut.droid.bluetoothUnbond(self.target_mac_addr)
-
-    def start_pairing_helper(self, line):
-        """Start or stop Bluetooth Pairing Helper"""
-        if line:
-            self.dut.droid.bluetoothStartPairingHelper(bool(line))
-        else:
-            self.dut.droid.bluetoothStartPairingHelper()
-
-    def push_pairing_pin(self, line):
-        """Push pairing pin to the Android Device"""
-        self.dut.droid.eventPost("BluetoothActionPairingRequestUserConfirm",
-                                 line)
-
-    def get_pairing_pin(self):
-        """Get pairing PIN"""
-        self.log.info(
-            self.dut.ed.pop_event("BluetoothActionPairingRequest", 1))
-
-    def fetch_uuids_with_sdp(self):
-        """BTA fetch UUIDS with SDP"""
-        self.log.info(
-            self.dut.droid.bluetoothFetchUuidsWithSdp(self.target_mac_addr))
-
-    def connect_profiles(self):
-        """Connect available profiles"""
-        self.dut.droid.bluetoothConnectBonded(self.target_mac_addr)
-
-    def tts_speak(self):
-        """Open audio channel by speaking characters"""
-        self.dut.droid.ttsSpeak(self.target_mac_addr)
diff --git a/src/antlion/test_utils/bt/config_lib.py b/src/antlion/test_utils/bt/config_lib.py
deleted file mode 100644
index c1db8f5..0000000
--- a/src/antlion/test_utils/bt/config_lib.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Bluetooth Config Pusher
-"""
-
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_mtu
-from antlion.test_utils.bt.bt_gatt_utils import log_gatt_server_uuids
-
-import os
-
-
-class ConfigLib():
-    bluetooth_config_path = "/system/etc/bluetooth/bt_stack.conf"
-    conf_path = "{}/configs".format(os.path.dirname(
-        os.path.realpath(__file__)))
-    reset_config_path = "{}/bt_stack.conf".format(conf_path)
-    non_bond_config_path = "{}/non_bond_bt_stack.conf".format(conf_path)
-    disable_mitm_config_path = "{}/dis_mitm_bt_stack.conf".format(conf_path)
-
-    def __init__(self, log, dut):
-        self.dut = dut
-        self.log = log
-
-    def _reset_bluetooth(self):
-        self.dut.droid.bluetoothToggleState(False)
-        self.dut.droid.bluetoothToggleState(True)
-
-    def reset(self):
-        self.dut.adb.push("{} {}".format(self.reset_config_path,
-                                         self.bluetooth_config_path))
-        self._reset_bluetooth()
-
-    def set_nonbond(self):
-        self.dut.adb.push("{} {}".format(self.non_bond_config_path,
-                                         self.bluetooth_config_path))
-        self._reset_bluetooth()
-
-    def set_disable_mitm(self):
-        self.dut.adb.push("{} {}".format(self.disable_mitm_config_path,
-                                         self.bluetooth_config_path))
-        self._reset_bluetooth()
diff --git a/src/antlion/test_utils/bt/configs/bt_stack.conf b/src/antlion/test_utils/bt/configs/bt_stack.conf
deleted file mode 100644
index 4bcf15a..0000000
--- a/src/antlion/test_utils/bt/configs/bt_stack.conf
+++ /dev/null
@@ -1,29 +0,0 @@
-# Enable trace level reconfiguration function
-# Must be present before any TRC_ trace level settings
-TraceConf=true
-
-# Trace level configuration
-#   BT_TRACE_LEVEL_NONE    0    ( No trace messages to be generated )
-#   BT_TRACE_LEVEL_ERROR   1    ( Error condition trace messages )
-#   BT_TRACE_LEVEL_WARNING 2    ( Warning condition trace messages )
-#   BT_TRACE_LEVEL_API     3    ( API traces )
-#   BT_TRACE_LEVEL_EVENT   4    ( Debug messages for events )
-#   BT_TRACE_LEVEL_DEBUG   5    ( Full debug messages )
-#   BT_TRACE_LEVEL_VERBOSE 6    ( Verbose messages ) - Currently supported for TRC_BTAPP only.
-TRC_BTM=5
-TRC_HCI=5
-TRC_L2CAP=5
-TRC_RFCOMM=5
-TRC_OBEX=5
-TRC_AVCT=5
-TRC_AVDT=5
-TRC_AVRC=5
-TRC_AVDT_SCB=5
-TRC_AVDT_CCB=5
-TRC_A2D=2
-TRC_SDP=5
-TRC_GATT=5
-TRC_SMP=5
-TRC_BTAPP=5
-TRC_BTIF=5
-
diff --git a/src/antlion/test_utils/bt/configs/dis_mitm_bt_stack.conf b/src/antlion/test_utils/bt/configs/dis_mitm_bt_stack.conf
deleted file mode 100644
index 120fc1e..0000000
--- a/src/antlion/test_utils/bt/configs/dis_mitm_bt_stack.conf
+++ /dev/null
@@ -1,30 +0,0 @@
-# Enable trace level reconfiguration function
-# Must be present before any TRC_ trace level settings
-TraceConf=true
-
-# Trace level configuration
-#   BT_TRACE_LEVEL_NONE    0    ( No trace messages to be generated )
-#   BT_TRACE_LEVEL_ERROR   1    ( Error condition trace messages )
-#   BT_TRACE_LEVEL_WARNING 2    ( Warning condition trace messages )
-#   BT_TRACE_LEVEL_API     3    ( API traces )
-#   BT_TRACE_LEVEL_EVENT   4    ( Debug messages for events )
-#   BT_TRACE_LEVEL_DEBUG   5    ( Full debug messages )
-#   BT_TRACE_LEVEL_VERBOSE 6    ( Verbose messages ) - Currently supported for TRC_BTAPP only.
-TRC_BTM=5
-TRC_HCI=5
-TRC_L2CAP=5
-TRC_RFCOMM=5
-TRC_OBEX=5
-TRC_AVCT=5
-TRC_AVDT=5
-TRC_AVRC=5
-TRC_AVDT_SCB=5
-TRC_AVDT_CCB=5
-TRC_A2D=2
-TRC_SDP=5
-TRC_GATT=5
-TRC_SMP=5
-TRC_BTAPP=5
-TRC_BTIF=5
-
-PTS_SmpOptions=0x9,0x4,0xf,0xf,0x10
diff --git a/src/antlion/test_utils/bt/configs/non_bond_bt_stack.conf b/src/antlion/test_utils/bt/configs/non_bond_bt_stack.conf
deleted file mode 100644
index 3dedf7e..0000000
--- a/src/antlion/test_utils/bt/configs/non_bond_bt_stack.conf
+++ /dev/null
@@ -1,30 +0,0 @@
-# Enable trace level reconfiguration function
-# Must be present before any TRC_ trace level settings
-TraceConf=true
-
-# Trace level configuration
-#   BT_TRACE_LEVEL_NONE    0    ( No trace messages to be generated )
-#   BT_TRACE_LEVEL_ERROR   1    ( Error condition trace messages )
-#   BT_TRACE_LEVEL_WARNING 2    ( Warning condition trace messages )
-#   BT_TRACE_LEVEL_API     3    ( API traces )
-#   BT_TRACE_LEVEL_EVENT   4    ( Debug messages for events )
-#   BT_TRACE_LEVEL_DEBUG   5    ( Full debug messages )
-#   BT_TRACE_LEVEL_VERBOSE 6    ( Verbose messages ) - Currently supported for TRC_BTAPP only.
-TRC_BTM=5
-TRC_HCI=5
-TRC_L2CAP=5
-TRC_RFCOMM=5
-TRC_OBEX=5
-TRC_AVCT=5
-TRC_AVDT=5
-TRC_AVRC=5
-TRC_AVDT_SCB=5
-TRC_AVDT_CCB=5
-TRC_A2D=2
-TRC_SDP=5
-TRC_GATT=5
-TRC_SMP=5
-TRC_BTAPP=5
-TRC_BTIF=5
-
-PTS_SmpOptions=0xC,0x4,0xf,0xf,0x10
diff --git a/src/antlion/test_utils/bt/gatt_test_database.py b/src/antlion/test_utils/bt/gatt_test_database.py
deleted file mode 100644
index 6c452cd..0000000
--- a/src/antlion/test_utils/bt/gatt_test_database.py
+++ /dev/null
@@ -1,1705 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import gatt_char_types
-from antlion.test_utils.bt.bt_constants import gatt_characteristic_value_format
-from antlion.test_utils.bt.bt_constants import gatt_char_desc_uuids
-
-STRING_512BYTES = '''
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-11111222223333344444555556666677777888889999900000
-111112222233
-'''
-STRING_50BYTES = '''
-11111222223333344444555556666677777888889999900000
-'''
-STRING_25BYTES = '''
-1111122222333334444455555
-'''
-
-INVALID_SMALL_DATABASE = {
-    'services': [{
-        'uuid': '00001800-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': gatt_char_types['device_name'],
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x0003,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'Test Database'
-        }, {
-            'uuid': gatt_char_types['appearance'],
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x0005,
-            'value_type': gatt_characteristic_value_format['sint32'],
-            'offset': 0,
-            'value': 17
-        }, {
-            'uuid': gatt_char_types['peripheral_pref_conn'],
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x0007
-        }]
-    }, {
-        'uuid': '00001801-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': gatt_char_types['service_changed'],
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'instance_id': 0x0012,
-            'value_type': gatt_characteristic_value_format['byte'],
-            'value': [0x0000],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }]
-        }, {
-            'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x0015,
-            'value_type': gatt_characteristic_value_format['byte'],
-            'value': [0x04]
-        }]
-    }]
-}
-
-# Corresponds to the PTS defined LARGE_DB_1
-LARGE_DB_1 = {
-    'services': [
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 7,
-            'characteristics': [{
-                'uuid': '0000b008-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'] |
-                gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x08],
-                'descriptors': [{
-                    'uuid': '0000b015-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                }, {
-                    'uuid': '0000b016-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                }, {
-                    'uuid': '0000b017-0000-1000-8000-00805f9b34fb',
-                    'permissions':
-                    gatt_characteristic['permission_read_encrypted_mitm'],
-                }]
-            }]
-        },
-        {
-            'uuid': '0000a00d-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['secondary'],
-            'handles': 6,
-            'characteristics': [{
-                'uuid': '0000b00c-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0C],
-            }, {
-                'uuid': '0000b00b-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0B],
-            }]
-        },
-        {
-            'uuid': '0000a00a-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 10,
-            'characteristics': [{
-                'uuid': '0000b001-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x01],
-            }, {
-                'uuid': '0000b002-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            }, {
-                'uuid': '0000b004-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            }, {
-                'uuid': '0000b002-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '11111222223333344444555556666677777888889999900000',
-            }, {
-                'uuid': '0000b003-0000-0000-0123-456789abcdef',
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x03],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 3,
-            'characteristics': [{
-                'uuid': '0000b007-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x07],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 3,
-            'characteristics': [{
-                'uuid': '0000b006-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'] |
-                gatt_characteristic['property_write_no_response'] |
-                gatt_characteristic['property_notify'] |
-                gatt_characteristic['property_indicate'],
-                'permissions': gatt_characteristic['permission_write'] |
-                gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x06],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 12,
-            'characteristics': [
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_write'] |
-                    gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                },
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_write'] |
-                    gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['server_char_cfg'],
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': gatt_descriptor['disable_notification_value']
-                    }]
-                },
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'properties': 0x0,
-                    'permissions': 0x0,
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                    'descriptors': [{
-                        'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': [
-                            0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88,
-                            0x99, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                            0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44,
-                            0x55, 0x66, 0x77, 0x88, 0x99, 0x00, 0x11, 0x22,
-                            0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                            0x11, 0x22, 0x33
-                        ]
-                    }]
-                },
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                    'descriptors': [{
-                        'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [
-                            0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88,
-                            0x99, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                            0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44,
-                            0x55, 0x66, 0x77, 0x88, 0x99, 0x00, 0x11, 0x22,
-                            0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                            0x11, 0x22, 0x33
-                        ]
-                    }]
-                },
-            ]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 7,
-            'characteristics': [{
-                'uuid': '0000b005-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_write'] |
-                gatt_characteristic['property_extended_props'],
-                'permissions': gatt_characteristic['permission_write'] |
-                gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x05],
-                'descriptors': [{
-                    'uuid': gatt_char_desc_uuids['char_ext_props'],
-                    'permissions': gatt_descriptor['permission_read'],
-                    'value': [0x03, 0x00]
-                }, {
-                    'uuid': gatt_char_desc_uuids['char_user_desc'],
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x65, 0x66, 0x67, 0x68, 0x69, 0x70, 0x71, 0x72, 0x73,
-                        0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x80, 0x81, 0x82,
-                        0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x90
-                    ]
-                }, {
-                    'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                    'permissions':
-                    gatt_descriptor['permission_read_encrypted_mitm'],
-                    'value': [0x00, 0x01, 0x30, 0x01, 0x11, 0x31]
-                }, {
-                    'uuid': '0000d5d4-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_read'],
-                    'value': [0x44]
-                }]
-            }]
-        },
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abcdef',
-            'type': gatt_service_types['primary'],
-            'handles': 7,
-            'characteristics': [{
-                'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                'enforce_initial_attribute_length': True,
-                'properties': gatt_characteristic['property_write'] |
-                gatt_characteristic['property_extended_props'] |
-                gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_write'] |
-                gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x09],
-                'descriptors': [{
-                    'uuid': gatt_char_desc_uuids['char_ext_props'],
-                    'permissions': gatt_descriptor['permission_read'],
-                    'value': gatt_descriptor['enable_notification_value']
-                }, {
-                    'uuid': '0000d9d2-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [0x22]
-                }, {
-                    'uuid': '0000d9d3-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_write'],
-                    'value': [0x33]
-                }]
-            }]
-        },
-        {
-            'uuid': '0000a00f-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 18,
-            'characteristics': [
-                {
-                    'uuid': '0000b00e-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': "Length is ",
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x19, 0x00, 0x00, 0x30, 0x01, 0x00, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b00f-0000-1000-8000-00805f9b34fb',
-                    'enforce_initial_attribute_length': True,
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x65],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x04, 0x00, 0x01, 0x27, 0x01, 0x01, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b006-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x34, 0x12],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x06, 0x00, 0x10, 0x27, 0x01, 0x02, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b007-0000-1000-8000-00805f9b34fb',
-                    'enforce_initial_attribute_length': True,
-                    'properties': gatt_characteristic['property_read'] |
-                    gatt_characteristic['property_write'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04, 0x03, 0x02, 0x01],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x08, 0x00, 0x17, 0x27, 0x01, 0x03, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b010-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x65, 0x34, 0x12, 0x04, 0x03, 0x02, 0x01],
-                    'descriptors': [{
-                        'uuid': gatt_char_desc_uuids['char_agreg_fmt'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0xa6, 0x00, 0xa9, 0x00, 0xac, 0x00]
-                    }]
-                },
-                {
-                    'uuid': '0000b011-0000-1000-8000-00805f9b34fb',
-                    'properties': gatt_characteristic['write_type_signed']
-                    |  #for some reason 0x40 is not working...
-                    gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x12]
-                }
-            ]
-        },
-        {
-            'uuid': '0000a00c-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 30,
-            'characteristics': [{
-                'uuid': '0000b00a-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0a],
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "111112222233333444445",
-                'descriptors': [{
-                    'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x11
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "2222233333444445555566",
-                'descriptors': [{
-                    'uuid': '0000b013-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x11, 0x22
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "33333444445555566666777",
-                'descriptors': [{
-                    'uuid': '0000b014-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x11, 0x22, 0x33
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33
-                ],
-                'descriptors': [{
-                    'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44
-                ],
-                'descriptors': [{
-                    'uuid': '0000b013-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x11, 0x22, 0x33, 0x44, 0x55
-                ],
-                'descriptors': [{
-                    'uuid': '0000b014-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "1111122222333334444455555666667777788888999",
-                'descriptors': [{
-                    'uuid': '0000b012-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "22222333334444455555666667777788888999990000",
-                'descriptors': [{
-                    'uuid': '0000b013-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44
-                    ]
-                }]
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'properties': gatt_characteristic['property_read'] |
-                gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': "333334444455555666667777788888999990000011111",
-                'descriptors': [{
-                    'uuid': '0000b014-0000-1000-8000-00805f9b34fb',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [
-                        0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99,
-                        0x00, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56,
-                        0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34,
-                        0x56, 0x78, 0x90, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66,
-                        0x77, 0x88, 0x99, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55
-                    ]
-                }]
-            }]
-        },
-    ]
-}
-
-# Corresponds to the PTS defined LARGE_DB_2
-LARGE_DB_2 = {
-    'services': [
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abdcef',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b00a-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0003,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x04],
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0005,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '111112222233333444445',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0007,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '2222233333444445555566',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0009,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '33333444445555566666777',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x000b,
-                'properties': 0x0a0,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '1111122222333334444455555666667777788888999',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x000d,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '22222333334444455555666667777788888999990000',
-            }, {
-                'uuid': '0000b0002-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x000f,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '333334444455555666667777788888999990000011111',
-            }]
-        },
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abcdef',
-            'handles': 5,
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                'instance_id': 0x0023,
-                'properties': 0x8a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x09],
-                'descriptors': [{
-                    'uuid': '0000d9d2-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_read'] |
-                    gatt_descriptor['permission_write'],
-                    'value': [0x22]
-                }, {
-                    'uuid': '0000d9d3-0000-0000-0123-456789abcdef',
-                    'permissions': gatt_descriptor['permission_write'],
-                    'value': [0x33]
-                }, {
-                    'uuid': gatt_char_desc_uuids['char_ext_props'],
-                    'permissions': gatt_descriptor['permission_write'],
-                    'value': gatt_descriptor['enable_notification_value']
-                }]
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b007-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0012,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x04],
-            }]
-        },
-    ]
-}
-
-DB_TEST = {
-    'services': [{
-        'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-            'properties': 0x02 | 0x08,
-            'permissions': 0x10 | 0x01,
-            'value_type': gatt_characteristic_value_format['byte'],
-            'value': [0x01],
-            'enforce_initial_attribute_length': True,
-            'descriptors': [{
-                'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-                'value': [0x01] * 30
-            }]
-        }, ]
-    }]
-}
-
-PTS_TEST2 = {
-    'services': [{
-        'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [
-            {
-                'uuid': '000018ba-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000060aa-0000-0000-0123-456789abcdef',
-                'properties': 0x02,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x20,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000004d5e-0000-1000-8000-00805f9b34fb',
-                'properties': 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000001b44-0000-1000-8000-00805f9b34fb',
-                'properties': 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000006b98-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08 | 0x10 | 0x04,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000247f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000247f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000247f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x00,
-                'permissions': 0x00,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000247f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000d62-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08 | 0x80,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000002e85-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000004a64-0000-0000-0123-456789abcdef',
-                'properties': 0x02 | 0x08 | 0x80,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000005b4a-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000001c81-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000006b98-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000001b44-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '0000014dd-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000c55-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '00000008f-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02,
-                'permissions': 0x10,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000000af2-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [
-                    0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0x00,
-                    0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x32
-                ],
-            },
-            {
-                'uuid': '000002aad-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000002ab0-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-            {
-                'uuid': '000002ab3-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_512BYTES,
-            },
-        ]
-    }]
-}
-
-PTS_TEST = {
-    'services': [{
-        'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [
-            {
-                'uuid': '000018ba-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_25BYTES,
-            },
-            {
-                'uuid': '000060aa-0000-1000-8000-00805f9b34fb',
-                'properties': 0x02 | 0x08,
-                'permissions': 0x10 | 0x01,
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': STRING_25BYTES,
-            },
-        ]
-    }]
-}
-
-# Corresponds to the PTS defined LARGE_DB_3
-LARGE_DB_3 = {
-    'services': [
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x0003,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                },
-                {
-                    'uuid': '0000b004-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x0013,
-                    'properties': 0x10,
-                    'permissions': 0x17,
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x04],
-                    'descriptors': [
-                        {
-                            'uuid': gatt_char_desc_uuids['char_ext_props'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x09]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['char_user_desc'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['client_char_cfg'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x01, 0x00]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['server_char_cfg'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['char_agreg_fmt'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['char_valid_range'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid':
-                            gatt_char_desc_uuids['external_report_reference'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                        {
-                            'uuid': gatt_char_desc_uuids['report_reference'],
-                            'permissions': gatt_descriptor['permission_read'] |
-                            gatt_descriptor['permission_write'],
-                            'value': [0x22]
-                        },
-                    ]
-                },
-                {
-                    'uuid': gatt_char_types['service_changed'],
-                    'instance_id': 0x0023,
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['appearance'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['peripheral_priv_flag'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['reconnection_address'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['system_id'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['model_number_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['serial_number_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['firmware_revision_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['hardware_revision_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['software_revision_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['manufacturer_name_string'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-                {
-                    'uuid': gatt_char_types['pnp_id'],
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-            ]
-        },
-        {
-            'uuid': '0000a00d-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['secondary'],
-            'handles': 5,
-            'characteristics': [{
-                'uuid': '0000b00c-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0023,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0c],
-            }, {
-                'uuid': '0000b00b-0000-0000-0123-456789abcdef',
-                'instance_id': 0x0025,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0b],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b008-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0032,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x08],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b007-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0042,
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x07],
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b006-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0052,
-                'properties': 0x3e,
-                'permissions': gatt_characteristic['permission_write_encrypted_mitm'] |
-                gatt_characteristic['permission_read_encrypted_mitm'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x06],
-            }]
-        },
-        {
-            'uuid': '0000a00a-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'handles': 10,
-            'characteristics': [{
-                'uuid': '0000b001-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0074,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x01],
-            }, {
-                'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                'enforce_initial_attribute_length': True,
-                'instance_id': 0x0076,
-                'properties': 0x0a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['string'],
-                'value': '11111222223333344444555556666677777888889999900000',
-            }, {
-                'uuid': '0000b003-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0x0078,
-                'properties': gatt_characteristic['property_write'],
-                'permissions': gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x03],
-            }]
-        },
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abcdef',
-            'type': gatt_service_types['primary'],
-            'handles': 10,
-            'characteristics': [{
-                'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                'instance_id': 0x0082,
-                'properties': 0x8a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x09],
-                'descriptors': [
-                    {
-                        'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': [0x09]
-                    },
-                    {
-                        'uuid': '0000d9d2-0000-0000-0123-456789abcdef',
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': [0x22]
-                    },
-                    {
-                        'uuid': gatt_char_desc_uuids['char_ext_props'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x01, 0x00]
-                    },
-                    {
-                        'uuid': '0000d9d3-0000-0000-0123-456789abcdef',
-                        'permissions': gatt_descriptor['permission_write'],
-                        'value': [0x22]
-                    },
-                ]
-            }]
-        },
-        {
-            'uuid': '0000a00b-0000-0000-0123-456789abcdef',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b009-0000-0000-0123-456789abcdef',
-                'instance_id': 0x0092,
-                'properties': 0x8a,
-                'permissions': gatt_characteristic['permission_read'] |
-                gatt_characteristic['permission_write'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x05],
-                'descriptors': [
-                    {
-                        'uuid': gatt_char_desc_uuids['char_user_desc'],
-                        'permissions': gatt_descriptor['permission_read'] |
-                        gatt_descriptor['permission_write'],
-                        'value': [0] * 26
-                    },
-                    {
-                        'uuid': gatt_char_desc_uuids['char_ext_props'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x03, 0x00]
-                    },
-                    {
-                        'uuid': '0000d5d4-0000-0000-0123-456789abcdef',
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x44]
-                    },
-                    {
-                        'uuid': gatt_char_desc_uuids['char_fmt_uuid'],
-                        'permissions': gatt_descriptor['permission_read'],
-                        'value': [0x04, 0x00, 0x01, 0x30, 0x01, 0x11, 0x31]
-                    },
-                ]
-            }]
-        },
-        {
-            'uuid': '0000a00c-0000-0000-0123-456789abcdef',
-            'type': gatt_service_types['primary'],
-            'characteristics': [
-                {
-                    'uuid': '0000b00a-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00a2,
-                    'properties': gatt_characteristic['property_read'],
-                    'permissions': gatt_characteristic['permission_read'],
-                    'value_type': gatt_characteristic_value_format['byte'],
-                    'value': [0x0a],
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00a4,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '111112222233333444445',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00a6,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '2222233333444445555566',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00a8,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '33333444445555566666777',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00aa,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '1111122222333334444455555666667777788888999',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00ac,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '22222333334444455555666667777788888999990000',
-                },
-                {
-                    'uuid': '0000b002-0000-1000-8000-00805f9b34fb',
-                    'instance_id': 0x00ae,
-                    'enforce_initial_attribute_length': True,
-                    'properties': 0x0a,
-                    'permissions': gatt_characteristic['permission_read'] |
-                    gatt_characteristic['permission_write'],
-                    'value_type': gatt_characteristic_value_format['string'],
-                    'value': '333334444455555666667777788888999990000011111',
-                },
-            ]
-        },
-        {
-            'uuid': '0000a00e-0000-1000-8000-00805f9b34fb',
-            'type': gatt_service_types['primary'],
-            'characteristics': [{
-                'uuid': '0000b00d-0000-1000-8000-00805f9b34fb',
-                'instance_id': 0xffff,
-                'properties': gatt_characteristic['property_read'],
-                'permissions': gatt_characteristic['permission_read'],
-                'value_type': gatt_characteristic_value_format['byte'],
-                'value': [0x0d],
-            }]
-        },
-    ]
-}
-
-TEST_DB_1 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002a,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'],
-                'value': [0x01]
-            }]
-        }]
-    }]
-}
-
-TEST_DB_2 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions':
-            gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002a,
-        }, {
-            'uuid': '00002a30-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions':
-            gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002b,
-        }]
-    }]
-}
-
-TEST_DB_3 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002a,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'],
-                'value': [0x01]
-            }, {
-                'uuid': '00002a20-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-                'instance_id': 0x002c,
-                'value': [0x01]
-            }]
-        }, {
-            'uuid': '00002a30-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002b,
-        }]
-    }]
-}
-
-TEST_DB_4 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': "test",
-            'instance_id': 0x002a,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions':
-                gatt_descriptor['permission_read_encrypted_mitm'],
-                'value': [0] * 512
-            }]
-        }]
-    }]
-}
-
-TEST_DB_5 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': 'b2c83efa-34ca-11e6-ac61-9e71128cae77',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['byte'],
-            'value': [0x1],
-            'instance_id': 0x002c,
-            'descriptors': [{
-                'uuid': '00002902-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }]
-        }]
-    }]
-}
-
-TEST_DB_6 = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'handles': 4,
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] | gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'instance_id': 0x002a,
-            'descriptors': [{
-                'uuid': '00002a19-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-                'value': [0x01] * 30
-            }]
-        }]
-    }]
-}
-
-SIMPLE_READ_DESCRIPTOR = {
-    'services': [{
-        'uuid': '0000a00a-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': 'aa7edd5a-4d1d-4f0e-883a-d145616a1630',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'instance_id': 0x002a,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'Test Database',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }]
-    }]
-}
-
-CHARACTERISTIC_PROPERTY_WRITE_NO_RESPONSE = {
-    'services': [{
-        'uuid': '0000a00a-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': 'aa7edd5a-4d1d-4f0e-883a-d145616a1630',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_write'] |
-            gatt_characteristic['permission_read'],
-            'instance_id': 0x0042,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'Test Database'
-        }, {
-            'uuid': 'aa7edd6a-4d1d-4f0e-883a-d145616a1630',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_write'] |
-            gatt_characteristic['permission_read'],
-            'instance_id': 0x004d,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'Test Database'
-        }]
-    }]
-}
-
-GATT_SERVER_DB_MAPPING = {
-    'LARGE_DB_1': LARGE_DB_1,
-    'LARGE_DB_3': LARGE_DB_3,
-    'INVALID_SMALL_DATABASE': INVALID_SMALL_DATABASE,
-    'SIMPLE_READ_DESCRIPTOR': SIMPLE_READ_DESCRIPTOR,
-    'CHARACTERISTIC_PROPERTY_WRITE_NO_RESPONSE':
-    CHARACTERISTIC_PROPERTY_WRITE_NO_RESPONSE,
-    'TEST_DB_1': TEST_DB_1,
-    'TEST_DB_2': TEST_DB_2,
-    'TEST_DB_3': TEST_DB_3,
-    'TEST_DB_4': TEST_DB_4,
-    'TEST_DB_5': TEST_DB_5,
-    'LARGE_DB_3_PLUS': LARGE_DB_3,
-    'DB_TEST': DB_TEST,
-    'PTS_TEST': PTS_TEST,
-    'PTS_TEST2': PTS_TEST2,
-    'TEST_DB_6': TEST_DB_6,
-}
diff --git a/src/antlion/test_utils/bt/gattc_lib.py b/src/antlion/test_utils/bt/gattc_lib.py
deleted file mode 100644
index efd0c4a..0000000
--- a/src/antlion/test_utils/bt/gattc_lib.py
+++ /dev/null
@@ -1,575 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-GATT Client Libraries
-"""
-
-from antlion.test_utils.bt.bt_constants import default_le_connection_interval_ms
-from antlion.test_utils.bt.bt_constants import default_bluetooth_socket_timeout_ms
-from antlion.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_connection
-from antlion.test_utils.bt.bt_gatt_utils import setup_gatt_mtu
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_char_desc_uuids
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_transport
-from antlion.test_utils.bt.bt_constants import le_default_supervision_timeout
-from antlion.test_utils.bt.bt_constants import le_connection_interval_time_step_ms
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_gatt_utils import log_gatt_server_uuids
-
-import time
-
-
-class GattClientLib():
-    def __init__(self, log, dut, target_mac_addr=None):
-        self.dut = dut
-        self.log = log
-        self.gatt_callback = None
-        self.bluetooth_gatt = None
-        self.discovered_services_index = None
-        self.target_mac_addr = target_mac_addr
-        self.generic_uuid = "0000{}-0000-1000-8000-00805f9b34fb"
-
-    def set_target_mac_addr(self, mac_addr):
-        self.target_mac_addr = mac_addr
-
-    def connect_over_le_based_off_name(self, autoconnect, name):
-        """Perform GATT connection over LE"""
-        self.dut.droid.bleSetScanSettingsScanMode(
-            ble_scan_settings_modes['low_latency'])
-        filter_list = self.dut.droid.bleGenFilterList()
-        scan_settings = self.dut.droid.bleBuildScanSetting()
-        scan_callback = self.dut.droid.bleGenScanCallback()
-        event_name = scan_result.format(scan_callback)
-        self.dut.droid.bleSetScanFilterDeviceName("BLE Rect")
-        self.dut.droid.bleBuildScanFilter(filter_list)
-        self.dut.droid.bleStartBleScan(filter_list, scan_settings,
-                                       scan_callback)
-
-        try:
-            event = self.dut.ed.pop_event(event_name, 10)
-            self.log.info("Found scan result: {}".format(event))
-        except Exception:
-            self.log.info("Didn't find any scan results.")
-        mac_addr = event['data']['Result']['deviceInfo']['address']
-        self.bluetooth_gatt, self.gatt_callback = setup_gatt_connection(
-            self.dut, mac_addr, autoconnect, transport=gatt_transport['le'])
-        self.dut.droid.bleStopBleScan(scan_callback)
-        self.discovered_services_index = None
-
-    def connect_over_le(self, autoconnect):
-        """Perform GATT connection over LE"""
-        self.bluetooth_gatt, self.gatt_callback = setup_gatt_connection(
-            self.dut,
-            self.target_mac_addr,
-            autoconnect,
-            transport=gatt_transport['le'])
-        self.discovered_services_index = None
-
-    def connect_over_bredr(self):
-        """Perform GATT connection over BREDR"""
-        self.bluetooth_gatt, self.gatt_callback = setup_gatt_connection(
-            self.dut,
-            self.target_mac_addr,
-            False,
-            transport=gatt_transport['bredr'])
-
-    def disconnect(self):
-        """Perform GATT disconnect"""
-        cmd = "Disconnect GATT connection"
-        try:
-            disconnect_gatt_connection(self.dut, self.bluetooth_gatt,
-                                       self.gatt_callback)
-        except Exception as err:
-            self.log.info("Cmd {} failed with {}".format(cmd, err))
-        try:
-            self.dut.droid.gattClientClose(self.bluetooth_gatt)
-        except Exception as err:
-            self.log.info("Cmd failed with {}".format(err))
-
-    def _setup_discovered_services_index(self):
-        if not self.discovered_services_index:
-            self.dut.droid.gattClientDiscoverServices(self.bluetooth_gatt)
-            expected_event = gatt_cb_strings['gatt_serv_disc'].format(
-                self.gatt_callback)
-            event = self.dut.ed.pop_event(expected_event, 10)
-            self.discovered_services_index = event['data']['ServicesIndex']
-
-    def read_char_by_uuid(self, line):
-        """GATT client read Characteristic by UUID."""
-        uuid = line
-        if len(line) == 4:
-            uuid = self.generic_uuid.format(line)
-        self.dut.droid.gattClientReadUsingCharacteristicUuid(
-            self.bluetooth_gatt, uuid, 0x0001, 0xFFFF)
-
-    def request_mtu(self, mtu):
-        """Request MTU Change of input value"""
-        setup_gatt_mtu(self.dut, self.bluetooth_gatt, self.gatt_callback,
-                       int(mtu))
-
-    def list_all_uuids(self):
-        """From the GATT Client, discover services and list all services,
-        chars and descriptors
-        """
-        self._setup_discovered_services_index()
-        log_gatt_server_uuids(self.dut, self.discovered_services_index,
-                              self.bluetooth_gatt)
-
-    def discover_services(self):
-        """GATT Client discover services of GATT Server"""
-        self.dut.droid.gattClientDiscoverServices(self.bluetooth_gatt)
-
-    def refresh(self):
-        """Perform Gatt Client Refresh"""
-        self.dut.droid.gattClientRefresh(self.bluetooth_gatt)
-
-    def read_char_by_instance_id(self, id):
-        """From the GATT Client, discover services and list all services,
-        chars and descriptors
-        """
-        if not id:
-            self.log.info("Invalid id")
-            return
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index, int(id, 16))
-
-    def write_char_by_instance_id(self, line):
-        """GATT Client Write to Characteristic by instance ID"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def write_char_by_instance_id_value(self, line):
-        """GATT Client Write to Characteristic by instance ID"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        write_value = args[1]
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), [int(write_value)])
-
-    def mod_write_char_by_instance_id(self, line):
-        """GATT Client Write to Char that doesn't have write permission"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndWriteCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def write_invalid_char_by_instance_id(self, line):
-        """GATT Client Write to Char that doesn't exists"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteInvalidCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def mod_read_char_by_instance_id(self, line):
-        """GATT Client Read Char that doesn't have write permission"""
-        instance_id = line
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndReadCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16))
-
-    def read_invalid_char_by_instance_id(self, line):
-        """GATT Client Read Char that doesn't exists"""
-        instance_id = line
-        self.dut.droid.gattClientReadInvalidCharacteristicByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16))
-
-    def mod_write_desc_by_instance_id(self, line):
-        """GATT Client Write to Desc that doesn't have write permission"""
-        cmd = ""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndWriteDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def write_invalid_desc_by_instance_id(self, line):
-        """GATT Client Write to Desc that doesn't exists"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [InstanceId] [Size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteInvalidDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def mod_read_desc_by_instance_id(self, line):
-        """GATT Client Read Desc that doesn't have write permission"""
-        cmd = ""
-        instance_id = line
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndReadDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16))
-
-    def read_invalid_desc_by_instance_id(self, line):
-        """GATT Client Read Desc that doesn't exists"""
-        instance_id = line
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadInvalidDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16))
-
-    def mod_read_char_by_uuid_and_instance_id(self, line):
-        """GATT Client Read Char that doesn't have write permission"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [uuid] [instance_id]")
-            return
-        uuid = args[0]
-        instance_id = args[1]
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientModifyAccessAndReadCharacteristicByUuidAndInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), self.generic_uuid.format(uuid))
-
-    def read_invalid_char_by_uuid(self, line):
-        """GATT Client Read Char that doesn't exists"""
-        uuid = line
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientReadInvalidCharacteristicByUuid(
-            self.bluetooth_gatt, self.discovered_services_index,
-            self.generic_uuid.format(uuid))
-
-    def write_desc_by_instance_id(self, line):
-        """GATT Client Write to Descriptor by instance ID"""
-        args = line.split()
-        if len(args) != 2:
-            self.log.info("2 Arguments required: [instanceID] [size]")
-            return
-        instance_id = args[0]
-        size = args[1]
-        write_value = []
-        for i in range(int(size)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def write_desc_notification_by_instance_id(self, line):
-        """GATT Client Write to Descriptor by instance ID"""
-        args = line.split()
-        instance_id = args[0]
-        switch = int(args[1])
-        write_value = [0x00, 0x00]
-        if switch == 2:
-            write_value = [0x02, 0x00]
-        self._setup_discovered_services_index()
-        self.dut.droid.gattClientWriteDescriptorByInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index,
-            int(instance_id, 16), write_value)
-
-    def enable_notification_desc_by_instance_id(self, line):
-        """GATT Client Enable Notification on Descriptor by instance ID"""
-        instance_id = line
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                descriptor_uuids = (
-                    self.dut.droid.
-                    gattClientGetDiscoveredDescriptorUuidsByIndex(
-                        self.discovered_services_index, i, j))
-                for k in range(len(descriptor_uuids)):
-                    desc_inst_id = self.dut.droid.gattClientGetDescriptorInstanceId(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j, k)
-                    if desc_inst_id == int(instance_id, 16):
-                        self.dut.droid.gattClientDescriptorSetValueByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k,
-                            gatt_descriptor['enable_notification_value'])
-                        time.sleep(2)  #Necessary for PTS
-                        self.dut.droid.gattClientWriteDescriptorByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k)
-                        time.sleep(2)  #Necessary for PTS
-                        self.dut.droid.gattClientSetCharacteristicNotificationByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, True)
-
-    def enable_indication_desc_by_instance_id(self, line):
-        """GATT Client Enable indication on Descriptor by instance ID"""
-        instance_id = line
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                descriptor_uuids = (
-                    self.dut.droid.
-                    gattClientGetDiscoveredDescriptorUuidsByIndex(
-                        self.discovered_services_index, i, j))
-                for k in range(len(descriptor_uuids)):
-                    desc_inst_id = self.dut.droid.gattClientGetDescriptorInstanceId(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j, k)
-                    if desc_inst_id == int(instance_id, 16):
-                        self.dut.droid.gattClientDescriptorSetValueByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k,
-                            gatt_descriptor['enable_indication_value'])
-                        time.sleep(2)  #Necessary for PTS
-                        self.dut.droid.gattClientWriteDescriptorByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k)
-                        time.sleep(2)  #Necessary for PTS
-                        self.dut.droid.gattClientSetCharacteristicNotificationByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, True)
-
-    def char_enable_all_notifications(self):
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                self.dut.droid.gattClientSetCharacteristicNotificationByIndex(
-                    self.bluetooth_gatt, self.discovered_services_index, i, j,
-                    True)
-
-    def read_char_by_invalid_instance_id(self, line):
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        self.dut.droid.gattClientReadInvalidCharacteristicInstanceId(
-            self.bluetooth_gatt, self.discovered_services_index, 0,
-            int(line, 16))
-
-    def begin_reliable_write(self):
-        """Begin a reliable write on the Bluetooth Gatt Client"""
-        self.dut.droid.gattClientBeginReliableWrite(self.bluetooth_gatt)
-
-    def abort_reliable_write(self):
-        """Abort a reliable write on the Bluetooth Gatt Client"""
-        self.dut.droid.gattClientAbortReliableWrite(self.bluetooth_gatt)
-
-    def execute_reliable_write(self):
-        """Execute a reliable write on the Bluetooth Gatt Client"""
-        self.dut.droid.gattExecuteReliableWrite(self.bluetooth_gatt)
-
-    def read_all_char(self):
-        """GATT Client read all Characteristic values"""
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                char_inst_id = self.dut.droid.gattClientGetCharacteristicInstanceId(
-                    self.bluetooth_gatt, self.discovered_services_index, i, j)
-                self.log.info("Reading characteristic {} {}".format(
-                    hex(char_inst_id), characteristic_uuids[j]))
-                self.dut.droid.gattClientReadCharacteristicByIndex(
-                    self.bluetooth_gatt, self.discovered_services_index, i, j)
-                time.sleep(1)  # Necessary for PTS
-
-    def read_all_desc(self):
-        """GATT Client read all Descriptor values"""
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                descriptor_uuids = (
-                    self.dut.droid.
-                    gattClientGetDiscoveredDescriptorUuidsByIndex(
-                        self.discovered_services_index, i, j))
-                for k in range(len(descriptor_uuids)):
-                    time.sleep(1)
-                    try:
-                        self.log.info("Reading descriptor {}".format(
-                            descriptor_uuids[k]))
-                        self.dut.droid.gattClientReadDescriptorByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k)
-                    except Exception as err:
-                        self.log.info(
-                            "Failed to read to descriptor: {}".format(
-                                descriptor_uuids[k]))
-
-    def write_all_char(self, line):
-        """Write to every Characteristic on the GATT server"""
-        args = line.split()
-        write_value = []
-        for i in range(int(line)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                char_inst_id = self.dut.droid.gattClientGetCharacteristicInstanceId(
-                    self.bluetooth_gatt, self.discovered_services_index, i, j)
-                self.log.info("Writing to {} {}".format(
-                    hex(char_inst_id), characteristic_uuids[j]))
-                try:
-                    self.dut.droid.gattClientCharacteristicSetValueByIndex(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j, write_value)
-                    self.dut.droid.gattClientWriteCharacteristicByIndex(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j)
-                    time.sleep(1)
-                except Exception as err:
-                    self.log.info(
-                        "Failed to write to characteristic: {}".format(
-                            characteristic_uuids[j]))
-
-    def write_all_desc(self, line):
-        """ Write to every Descriptor on the GATT server """
-        args = line.split()
-        write_value = []
-        for i in range(int(line)):
-            write_value.append(i % 256)
-        self._setup_discovered_services_index()
-        services_count = self.dut.droid.gattClientGetDiscoveredServicesCount(
-            self.discovered_services_index)
-        for i in range(services_count):
-            characteristic_uuids = (
-                self.dut.droid.gattClientGetDiscoveredCharacteristicUuids(
-                    self.discovered_services_index, i))
-            for j in range(len(characteristic_uuids)):
-                descriptor_uuids = (
-                    self.dut.droid.
-                    gattClientGetDiscoveredDescriptorUuidsByIndex(
-                        self.discovered_services_index, i, j))
-                for k in range(len(descriptor_uuids)):
-                    time.sleep(1)
-                    desc_inst_id = self.dut.droid.gattClientGetDescriptorInstanceId(
-                        self.bluetooth_gatt, self.discovered_services_index, i,
-                        j, k)
-                    self.log.info("Writing to {} {}".format(
-                        hex(desc_inst_id), descriptor_uuids[k]))
-                    try:
-                        self.dut.droid.gattClientDescriptorSetValueByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k,
-                            write_value)
-                        self.dut.droid.gattClientWriteDescriptorByIndex(
-                            self.bluetooth_gatt,
-                            self.discovered_services_index, i, j, k)
-                    except Exception as err:
-                        self.log.info(
-                            "Failed to write to descriptor: {}".format(
-                                descriptor_uuids[k]))
-
-    def discover_service_by_uuid(self, line):
-        """ Discover service by UUID """
-        uuid = line
-        if len(line) == 4:
-            uuid = self.generic_uuid.format(line)
-        self.dut.droid.gattClientDiscoverServiceByUuid(self.bluetooth_gatt,
-                                                       uuid)
-
-    def request_le_connection_parameters(self):
-        le_min_ce_len = 0
-        le_max_ce_len = 0
-        le_connection_interval = 0
-        minInterval = default_le_connection_interval_ms / le_connection_interval_time_step_ms
-        maxInterval = default_le_connection_interval_ms / le_connection_interval_time_step_ms
-        return_status = self.dut.droid.gattClientRequestLeConnectionParameters(
-            self.bluetooth_gatt, minInterval, maxInterval, 0,
-            le_default_supervision_timeout, le_min_ce_len, le_max_ce_len)
-        self.log.info(
-            "Result of request le connection param: {}".format(return_status))
-
-    def socket_conn_begin_connect_thread_psm(self, line):
-        args = line.split()
-        is_ble = bool(int(args[0]))
-        secured_conn = bool(int(args[1]))
-        psm_value = int(args[2])  # 1
-        self.dut.droid.bluetoothSocketConnBeginConnectThreadPsm(
-            self.target_mac_addr, is_ble, psm_value, secured_conn)
-
-    def socket_conn_begin_accept_thread_psm(self, line):
-        accept_timeout_ms = default_bluetooth_socket_timeout_ms
-        is_ble = True
-        secured_conn = False
-        self.dut.droid.bluetoothSocketConnBeginAcceptThreadPsm(
-            accept_timeout_ms, is_ble, secured_conn)
diff --git a/src/antlion/test_utils/bt/gatts_lib.py b/src/antlion/test_utils/bt/gatts_lib.py
deleted file mode 100644
index e7828d0..0000000
--- a/src/antlion/test_utils/bt/gatts_lib.py
+++ /dev/null
@@ -1,380 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion.keys import Config
-from antlion.utils import rand_ascii_str
-from antlion.test_utils.bt.bt_constants import gatt_cb_strings
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_characteristic_value_format
-from antlion.test_utils.bt.bt_constants import gatt_cb_err
-from antlion.test_utils.bt.bt_constants import gatt_transport
-from antlion.test_utils.bt.bt_constants import gatt_event
-from antlion.test_utils.bt.bt_constants import gatt_server_responses
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import small_timeout
-from antlion.test_utils.bt.gatt_test_database import STRING_512BYTES
-
-from antlion.utils import exe_cmd
-from math import ceil
-
-
-class GattServerLib():
-
-    characteristic_list = []
-    default_timeout = 10
-    descriptor_list = []
-    dut = None
-    gatt_server = None
-    gatt_server_callback = None
-    gatt_server_list = []
-    log = None
-    service_list = []
-    write_mapping = {}
-
-    def __init__(self, log, dut):
-        self.dut = dut
-        self.log = log
-
-    def list_all_uuids(self):
-        """From the GATT Client, discover services and list all services,
-        chars and descriptors.
-        """
-        self.log.info("Service List:")
-        for service in self.dut.droid.gattGetServiceUuidList(self.gatt_server):
-            self.dut.log.info("GATT Server service uuid: {}".format(service))
-        self.log.info("Characteristics List:")
-        for characteristic in self.characteristic_list:
-            instance_id = self.dut.droid.gattServerGetCharacteristicInstanceId(
-                characteristic)
-            uuid = self.dut.droid.gattServerGetCharacteristicUuid(
-                characteristic)
-            self.dut.log.info(
-                "GATT Server characteristic handle uuid: {} {}".format(
-                    hex(instance_id), uuid))
-        # TODO: add getting insance ids and uuids from each descriptor.
-
-    def open(self):
-        """Open an empty GATT Server instance"""
-        self.gatt_server_callback = self.dut.droid.gattServerCreateGattServerCallback(
-        )
-        self.gatt_server = self.dut.droid.gattServerOpenGattServer(
-            self.gatt_server_callback)
-        self.gatt_server_list.append(self.gatt_server)
-
-    def clear_services(self):
-        """Clear BluetoothGattServices from BluetoothGattServer"""
-        self.dut.droid.gattServerClearServices(self.gatt_server)
-
-    def close_bluetooth_gatt_servers(self):
-        """Close Bluetooth Gatt Servers"""
-        try:
-            for btgs in self.gatt_server_list:
-                self.dut.droid.gattServerClose(btgs)
-        except Exception as err:
-            self.log.error(
-                "Failed to close Bluetooth GATT Servers: {}".format(err))
-        self.characteristic_list = []
-        self.descriptor_list = []
-        self.gatt_server_list = []
-        self.service_list = []
-
-    def characteristic_set_value_by_instance_id(self, instance_id, value):
-        """Set Characteristic value by instance id"""
-        self.dut.droid.gattServerCharacteristicSetValueByInstanceId(
-            int(instance_id, 16), value)
-
-    def notify_characteristic_changed(self, instance_id, confirm):
-        """ Notify characteristic changed """
-        self.dut.droid.gattServerNotifyCharacteristicChangedByInstanceId(
-            self.gatt_server, 0, int(instance_id, 16), confirm)
-
-    def send_response(self, user_input):
-        """Send a single response to the GATT Client"""
-        args = user_input.split()
-        mtu = 23
-        if len(args) == 2:
-            user_input = args[0]
-            mtu = int(args[1])
-        desc_read = gatt_event['desc_read_req']['evt'].format(
-            self.gatt_server_callback)
-        desc_write = gatt_event['desc_write_req']['evt'].format(
-            self.gatt_server_callback)
-        char_read = gatt_event['char_read_req']['evt'].format(
-            self.gatt_server_callback)
-        char_write_req = gatt_event['char_write_req']['evt'].format(
-            self.gatt_server_callback)
-        char_write = gatt_event['char_write']['evt'].format(
-            self.gatt_server_callback)
-        execute_write = gatt_event['exec_write']['evt'].format(
-            self.gatt_server_callback)
-        regex = "({}|{}|{}|{}|{}|{})".format(desc_read, desc_write, char_read,
-                                             char_write, execute_write,
-                                             char_write_req)
-        events = self.dut.ed.pop_events(regex, 5, small_timeout)
-        status = 0
-        if user_input:
-            status = gatt_server_responses.get(user_input)
-        for event in events:
-            self.log.debug("Found event: {}.".format(event))
-            request_id = event['data']['requestId']
-            if event['name'] == execute_write:
-                if ('execute' in event['data']
-                        and event['data']['execute'] == True):
-                    for key in self.write_mapping:
-                        value = self.write_mapping[key]
-                        self.log.info("Writing key, value: {}, {}".format(
-                            key, value))
-                        self.dut.droid.gattServerSetByteArrayValueByInstanceId(
-                            key, value)
-                else:
-                    self.log.info("Execute result is false")
-                self.write_mapping = {}
-                self.dut.droid.gattServerSendResponse(self.gatt_server, 0,
-                                                      request_id, status, 0,
-                                                      [])
-                continue
-            offset = event['data']['offset']
-            instance_id = event['data']['instanceId']
-            if (event['name'] == desc_write or event['name'] == char_write
-                    or event['name'] == char_write_req):
-                if ('preparedWrite' in event['data']
-                        and event['data']['preparedWrite'] == True):
-                    value = event['data']['value']
-                    if instance_id in self.write_mapping.keys():
-                        self.write_mapping[instance_id] = self.write_mapping[
-                            instance_id] + value
-                        self.log.info(
-                            "New Prepared Write Value for {}: {}".format(
-                                instance_id, self.write_mapping[instance_id]))
-                    else:
-                        self.log.info("write mapping key, value {}, {}".format(
-                            instance_id, value))
-                        self.write_mapping[instance_id] = value
-                        self.log.info("current value {}, {}".format(
-                            instance_id, value))
-                    self.dut.droid.gattServerSendResponse(
-                        self.gatt_server, 0, request_id, status, 0, value)
-                    continue
-                else:
-                    self.dut.droid.gattServerSetByteArrayValueByInstanceId(
-                        event['data']['instanceId'], event['data']['value'])
-
-            try:
-                data = self.dut.droid.gattServerGetReadValueByInstanceId(
-                    int(event['data']['instanceId']))
-            except Exception as err:
-                self.log.error(err)
-            if not data:
-                data = [1]
-            self.log.info(
-                "GATT Server Send Response [request_id, status, offset, data]" \
-                " [{}, {}, {}, {}]".
-                format(request_id, status, offset, data))
-            data = data[offset:offset + mtu - 1]
-            self.dut.droid.gattServerSendResponse(self.gatt_server, 0,
-                                                  request_id, status, offset,
-                                                  data)
-
-    def _setup_service(self, serv):
-        service = self.dut.droid.gattServerCreateService(
-            serv['uuid'], serv['type'])
-        if 'handles' in serv:
-            self.dut.droid.gattServerServiceSetHandlesToReserve(
-                service, serv['handles'])
-        return service
-
-    def _setup_characteristic(self, char):
-        characteristic = \
-            self.dut.droid.gattServerCreateBluetoothGattCharacteristic(
-                char['uuid'], char['properties'], char['permissions'])
-        if 'instance_id' in char:
-            self.dut.droid.gattServerCharacteristicSetInstanceId(
-                characteristic, char['instance_id'])
-            set_id = self.dut.droid.gattServerCharacteristicGetInstanceId(
-                characteristic)
-            if set_id != char['instance_id']:
-                self.log.error(
-                    "Instance ID did not match up. Found {} Expected {}".
-                    format(set_id, char['instance_id']))
-        if 'value_type' in char:
-            value_type = char['value_type']
-            value = char['value']
-            if value_type == gatt_characteristic_value_format['string']:
-                self.log.info("Set String value result: {}".format(
-                    self.dut.droid.gattServerCharacteristicSetStringValue(
-                        characteristic, value)))
-            elif value_type == gatt_characteristic_value_format['byte']:
-                self.log.info("Set Byte Array value result: {}".format(
-                    self.dut.droid.gattServerCharacteristicSetByteValue(
-                        characteristic, value)))
-            else:
-                self.log.info("Set Int value result: {}".format(
-                    self.dut.droid.gattServerCharacteristicSetIntValue(
-                        characteristic, value, value_type, char['offset'])))
-        return characteristic
-
-    def _setup_descriptor(self, desc):
-        descriptor = self.dut.droid.gattServerCreateBluetoothGattDescriptor(
-            desc['uuid'], desc['permissions'])
-        if 'value' in desc:
-            self.dut.droid.gattServerDescriptorSetByteValue(
-                descriptor, desc['value'])
-        if 'instance_id' in desc:
-            self.dut.droid.gattServerDescriptorSetInstanceId(
-                descriptor, desc['instance_id'])
-        self.descriptor_list.append(descriptor)
-        return descriptor
-
-    def setup_gatts_db(self, database):
-        """Setup GATT Server database"""
-        self.gatt_server_callback = \
-            self.dut.droid.gattServerCreateGattServerCallback()
-        self.gatt_server = self.dut.droid.gattServerOpenGattServer(
-            self.gatt_server_callback)
-        self.gatt_server_list.append(self.gatt_server)
-        for serv in database['services']:
-            service = self._setup_service(serv)
-            self.service_list.append(service)
-            if 'characteristics' in serv:
-                for char in serv['characteristics']:
-                    characteristic = self._setup_characteristic(char)
-                    if 'descriptors' in char:
-                        for desc in char['descriptors']:
-                            descriptor = self._setup_descriptor(desc)
-                            self.dut.droid.gattServerCharacteristicAddDescriptor(
-                                characteristic, descriptor)
-                    self.characteristic_list.append(characteristic)
-                    self.dut.droid.gattServerAddCharacteristicToService(
-                        service, characteristic)
-            self.dut.droid.gattServerAddService(self.gatt_server, service)
-            expected_event = gatt_cb_strings['serv_added'].format(
-                self.gatt_server_callback)
-            self.dut.ed.pop_event(expected_event, 10)
-        return self.gatt_server, self.gatt_server_callback
-
-    def send_continuous_response(self, user_input):
-        """Send the same response"""
-        desc_read = gatt_event['desc_read_req']['evt'].format(
-            self.gatt_server_callback)
-        desc_write = gatt_event['desc_write_req']['evt'].format(
-            self.gatt_server_callback)
-        char_read = gatt_event['char_read_req']['evt'].format(
-            self.gatt_server_callback)
-        char_write = gatt_event['char_write']['evt'].format(
-            self.gatt_server_callback)
-        execute_write = gatt_event['exec_write']['evt'].format(
-            self.gatt_server_callback)
-        regex = "({}|{}|{}|{}|{})".format(desc_read, desc_write, char_read,
-                                          char_write, execute_write)
-        offset = 0
-        status = 0
-        mtu = 23
-        char_value = []
-        for i in range(512):
-            char_value.append(i % 256)
-        len_min = 470
-        end_time = time.time() + 180
-        i = 0
-        num_packets = ceil((len(char_value) + 1) / (mtu - 1))
-        while time.time() < end_time:
-            events = self.dut.ed.pop_events(regex, 10, small_timeout)
-            for event in events:
-                start_offset = i * (mtu - 1)
-                i += 1
-                self.log.debug("Found event: {}.".format(event))
-                request_id = event['data']['requestId']
-                data = char_value[start_offset:start_offset + mtu - 1]
-                if not data:
-                    data = [1]
-                self.log.debug(
-                    "GATT Server Send Response [request_id, status, offset, " \
-                    "data] [{}, {}, {}, {}]".format(request_id, status, offset,
-                        data))
-                self.dut.droid.gattServerSendResponse(self.gatt_server, 0,
-                                                      request_id, status,
-                                                      offset, data)
-
-    def send_continuous_response_data(self, user_input):
-        """Send the same response with data"""
-        desc_read = gatt_event['desc_read_req']['evt'].format(
-            self.gatt_server_callback)
-        desc_write = gatt_event['desc_write_req']['evt'].format(
-            self.gatt_server_callback)
-        char_read = gatt_event['char_read_req']['evt'].format(
-            self.gatt_server_callback)
-        char_write = gatt_event['char_write']['evt'].format(
-            self.gatt_server_callback)
-        execute_write = gatt_event['exec_write']['evt'].format(
-            self.gatt_server_callback)
-        regex = "({}|{}|{}|{}|{})".format(desc_read, desc_write, char_read,
-                                          char_write, execute_write)
-        offset = 0
-        status = 0
-        mtu = 11
-        char_value = []
-        len_min = 470
-        end_time = time.time() + 180
-        i = 0
-        num_packets = ceil((len(char_value) + 1) / (mtu - 1))
-        while time.time() < end_time:
-            events = self.dut.ed.pop_events(regex, 10, small_timeout)
-            for event in events:
-                self.log.info(event)
-                request_id = event['data']['requestId']
-                if event['name'] == execute_write:
-                    if ('execute' in event['data']
-                            and event['data']['execute'] == True):
-                        for key in self.write_mapping:
-                            value = self.write_mapping[key]
-                            self.log.debug("Writing key, value: {}, {}".format(
-                                key, value))
-                            self.dut.droid.gattServerSetByteArrayValueByInstanceId(
-                                key, value)
-                        self.write_mapping = {}
-                    self.dut.droid.gattServerSendResponse(
-                        self.gatt_server, 0, request_id, status, 0, [1])
-                    continue
-                offset = event['data']['offset']
-                instance_id = event['data']['instanceId']
-                if (event['name'] == desc_write
-                        or event['name'] == char_write):
-                    if ('preparedWrite' in event['data']
-                            and event['data']['preparedWrite'] == True):
-                        value = event['data']['value']
-                        if instance_id in self.write_mapping:
-                            self.write_mapping[
-                                instance_id] = self.write_mapping[
-                                    instance_id] + value
-                        else:
-                            self.write_mapping[instance_id] = value
-                    else:
-                        self.dut.droid.gattServerSetByteArrayValueByInstanceId(
-                            event['data']['instanceId'],
-                            event['data']['value'])
-                try:
-                    data = self.dut.droid.gattServerGetReadValueByInstanceId(
-                        int(event['data']['instanceId']))
-                except Exception as err:
-                    self.log.error(err)
-                if not data:
-                    self.dut.droid.gattServerSendResponse(
-                        self.gatt_server, 0, request_id, status, offset, [1])
-                else:
-                    self.dut.droid.gattServerSendResponse(
-                        self.gatt_server, 0, request_id, status, offset,
-                        data[offset:offset + 17])
diff --git a/src/antlion/test_utils/bt/native_bt_test_utils.py b/src/antlion/test_utils/bt/native_bt_test_utils.py
deleted file mode 100644
index 822de8c..0000000
--- a/src/antlion/test_utils/bt/native_bt_test_utils.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-
-from subprocess import call
-import time
-
-log = logging
-
-
-def setup_native_bluetooth(native_devices):
-    for n in native_devices:
-        droid = n.droid
-        pid = n.adb.shell("pidof -s bluetoothtbd")
-        if not pid:
-            call(
-                ["adb -s " + n.serial + " shell sh -c \"bluetoothtbd\" &"],
-                shell=True)
-        droid.BtBinderInitInterface()
-        time.sleep(5)  #temporary sleep statement
-        droid.BtBinderEnable()
-        time.sleep(5)  #temporary sleep statement
-        droid.BtBinderRegisterBLE()
-        time.sleep(5)  #temporary sleep statement
diff --git a/src/antlion/test_utils/bt/protos/__init__.py b/src/antlion/test_utils/bt/protos/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/bt/protos/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/bt/protos/bluetooth.proto b/src/antlion/test_utils/bt/protos/bluetooth.proto
deleted file mode 100644
index 969dbd6..0000000
--- a/src/antlion/test_utils/bt/protos/bluetooth.proto
+++ /dev/null
@@ -1,301 +0,0 @@
- /*
-  * Copyright 2022 The Fuchsia Authors
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-syntax = "proto2";
-
-option optimize_for = LITE_RUNTIME;
-
-// C++ namespace: bluetooth::metrics::BluetoothMetricsProto
-package bluetooth.metrics.BluetoothMetricsProto;
-
-option java_package = "com.android.bluetooth";
-option java_outer_classname = "BluetoothMetricsProto";
-
-message BluetoothLog {
-  // Session information that gets logged for every BT connection.
-  repeated BluetoothSession session = 1;
-
-  // Session information that gets logged for every Pair event.
-  repeated PairEvent pair_event = 2;
-
-  // Information for Wake locks.
-  repeated WakeEvent wake_event = 3;
-
-  // Scan event information.
-  repeated ScanEvent scan_event = 4;
-
-  // Number of bonded devices.
-  optional int32 num_bonded_devices = 5;
-
-  // Number of BluetoothSession including discarded ones beyond capacity
-  optional int64 num_bluetooth_session = 6;
-
-  // Number of PairEvent including discarded ones beyond capacity
-  optional int64 num_pair_event = 7;
-
-  // Number of WakeEvent including discarded ones beyond capacity
-  optional int64 num_wake_event = 8;
-
-  // Number of ScanEvent including discarded ones beyond capacity
-  optional int64 num_scan_event = 9;
-
-  // Statistics about Bluetooth profile connections
-  repeated ProfileConnectionStats profile_connection_stats = 10;
-
-  // Statistics about Headset profile connections
-  repeated HeadsetProfileConnectionStats headset_profile_connection_stats = 11;
-}
-
-// The information about the device.
-message DeviceInfo {
-  // Device type.
-  enum DeviceType {
-    // Type is unknown.
-    DEVICE_TYPE_UNKNOWN = 0;
-
-    DEVICE_TYPE_BREDR = 1;
-
-    DEVICE_TYPE_LE = 2;
-
-    DEVICE_TYPE_DUMO = 3;
-  }
-
-  // Device class
-  // https://cs.corp.google.com/#android/system/bt/stack/include/btm_api.h&q=major_computer.
-  optional int32 device_class = 1;
-
-  // Device type.
-  optional DeviceType device_type = 2;
-}
-
-// Information that gets logged for every Bluetooth connection.
-message BluetoothSession {
-  // Type of technology used in the connection.
-  enum ConnectionTechnologyType {
-    CONNECTION_TECHNOLOGY_TYPE_UNKNOWN = 0;
-
-    CONNECTION_TECHNOLOGY_TYPE_LE = 1;
-
-    CONNECTION_TECHNOLOGY_TYPE_BREDR = 2;
-  }
-
-  enum DisconnectReasonType {
-    UNKNOWN = 0;
-
-    // A metrics dump takes a snapshot of current Bluetooth session and thus
-    // is not a real disconnect, but a discontinuation in metrics logging.
-    // This enum indicates this situation.
-    METRICS_DUMP = 1;
-
-    NEXT_START_WITHOUT_END_PREVIOUS = 2;
-  }
-
-  // Duration of the session.
-  optional int64 session_duration_sec = 2;
-
-  // Technology type.
-  optional ConnectionTechnologyType connection_technology_type = 3;
-
-  // Reason for disconnecting.
-  optional string disconnect_reason = 4 [deprecated = true];
-
-  // The information about the device which it is connected to.
-  optional DeviceInfo device_connected_to = 5;
-
-  // The information about the RFComm session.
-  optional RFCommSession rfcomm_session = 6;
-
-  // The information about the A2DP audio session.
-  optional A2DPSession a2dp_session = 7;
-
-  // Numeric reason for disconnecting as defined in metrics.h
-  optional DisconnectReasonType disconnect_reason_type = 8;
-}
-
-message RFCommSession {
-  // bytes transmitted.
-  optional int32 rx_bytes = 1;
-
-  // bytes transmitted.
-  optional int32 tx_bytes = 2;
-}
-
-enum A2dpSourceCodec {
-  A2DP_SOURCE_CODEC_UNKNOWN = 0;
-  A2DP_SOURCE_CODEC_SBC = 1;
-  A2DP_SOURCE_CODEC_AAC = 2;
-  A2DP_SOURCE_CODEC_APTX = 3;
-  A2DP_SOURCE_CODEC_APTX_HD = 4;
-  A2DP_SOURCE_CODEC_LDAC = 5;
-}
-
-// Session information that gets logged for A2DP session.
-message A2DPSession {
-  // Media timer in milliseconds.
-  optional int32 media_timer_min_millis = 1;
-
-  // Media timer in milliseconds.
-  optional int32 media_timer_max_millis = 2;
-
-  // Media timer in milliseconds.
-  optional int32 media_timer_avg_millis = 3;
-
-  // Buffer overruns count.
-  optional int32 buffer_overruns_max_count = 4;
-
-  // Buffer overruns total.
-  optional int32 buffer_overruns_total = 5;
-
-  // Buffer underruns average.
-  optional float buffer_underruns_average = 6;
-
-  // Buffer underruns count.
-  optional int32 buffer_underruns_count = 7;
-
-  // Total audio time in this A2DP session
-  optional int64 audio_duration_millis = 8;
-
-  // Audio codec used in this A2DP session in A2DP source role
-  optional A2dpSourceCodec source_codec = 9;
-
-  // Whether A2DP offload is enabled in this A2DP session
-  optional bool is_a2dp_offload = 10;
-}
-
-message PairEvent {
-  // The reason for disconnecting
-  // See: system/bt/stack/include/hcidefs.h, HCI_ERR_CONN_FAILED_ESTABLISHMENT
-  optional int32 disconnect_reason = 1;
-
-  // Pair event time
-  optional int64 event_time_millis =
-      2;  // [(datapol.semantic_type) = ST_TIMESTAMP];
-
-  // The information about the device which it is paired to.
-  optional DeviceInfo device_paired_with = 3;
-}
-
-message WakeEvent {
-  // Information about the wake event type.
-  enum WakeEventType {
-    UNKNOWN = 0;
-    // WakeLock was acquired.
-    ACQUIRED = 1;
-    // WakeLock was released.
-    RELEASED = 2;
-  }
-
-  // Information about the wake event type.
-  optional WakeEventType wake_event_type = 1;
-
-  // Initiator of the scan. Only the first three names will be stored.
-  // e.g. com.company.app
-  optional string requestor = 2;
-
-  // Name of the wakelock (e.g. bluedroid_timer).
-  optional string name = 3;
-
-  // Time of the event.
-  optional int64 event_time_millis =
-      4;  // [(datapol.semantic_type) = ST_TIMESTAMP];
-}
-
-message ScanEvent {
-  // Scan type.
-  enum ScanTechnologyType {
-    SCAN_TYPE_UNKNOWN = 0;
-
-    SCAN_TECH_TYPE_LE = 1;
-
-    SCAN_TECH_TYPE_BREDR = 2;
-
-    SCAN_TECH_TYPE_BOTH = 3;
-  }
-
-  // Scan event type.
-  enum ScanEventType {
-    // Scan started.
-    SCAN_EVENT_START = 0;
-    // Scan stopped.
-    SCAN_EVENT_STOP = 1;
-  }
-
-  // Scan event type.
-  optional ScanEventType scan_event_type = 1;
-
-  // Initiator of the scan. Only the first three names will be stored.
-  // e.g. com.company.app
-  optional string initiator = 2;
-
-  // Technology used for scanning.
-  optional ScanTechnologyType scan_technology_type = 3;
-
-  // Number of results returned.
-  optional int32 number_results = 4;
-
-  // Time of the event.
-  optional int64 event_time_millis =
-      5;  // [(datapol.semantic_type) = ST_TIMESTAMP];
-}
-
-// Profile IDs defined in BluetoothProfile API class
-// Values must match API class values
-enum ProfileId {
-  PROFILE_UNKNOWN = 0;
-  HEADSET = 1;
-  A2DP = 2;
-  HEALTH = 3;
-  HID_HOST = 4;
-  PAN = 5;
-  PBAP = 6;
-  GATT = 7;
-  GATT_SERVER = 8;
-  MAP = 9;
-  SAP = 10;
-  A2DP_SINK = 11;
-  AVRCP_CONTROLLER = 12;
-  AVRCP = 13;
-  HEADSET_CLIENT = 16;
-  PBAP_CLIENT = 17;
-  MAP_CLIENT = 18;
-  HID_DEVICE = 19;
-  OPP = 20;
-  HEARING_AID = 21;
-}
-
-// Statistics about Bluetooth profile connections
-message ProfileConnectionStats {
-  // Profile id defined in BluetoothProfile.java
-  optional ProfileId profile_id = 1;
-
-  // Number of times that this profile is connected since last metrics dump
-  optional int32 num_times_connected = 2;
-}
-
-enum HeadsetProfileType {
-  HEADSET_PROFILE_UNKNOWN = 0;
-  HSP = 1;
-  HFP = 2;
-}
-
-// Statistics about headset profile connections
-message HeadsetProfileConnectionStats {
-  // Type of headset profile connected
-  optional HeadsetProfileType headset_profile_type = 1;
-
-  // Number of times this type of headset profile is connected
-  optional int32 num_times_connected = 2;
-}
diff --git a/src/antlion/test_utils/bt/protos/bluetooth_pb2.py b/src/antlion/test_utils/bt/protos/bluetooth_pb2.py
deleted file mode 100644
index 1188f77..0000000
--- a/src/antlion/test_utils/bt/protos/bluetooth_pb2.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: bluetooth.proto
-"""Generated protocol buffer code."""
-from google.protobuf.internal import builder as _builder
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0f\x62luetooth.proto\x12\'bluetooth.metrics.BluetoothMetricsProto\"\x8a\x05\n\x0c\x42luetoothLog\x12J\n\x07session\x18\x01 \x03(\x0b\x32\x39.bluetooth.metrics.BluetoothMetricsProto.BluetoothSession\x12\x46\n\npair_event\x18\x02 \x03(\x0b\x32\x32.bluetooth.metrics.BluetoothMetricsProto.PairEvent\x12\x46\n\nwake_event\x18\x03 \x03(\x0b\x32\x32.bluetooth.metrics.BluetoothMetricsProto.WakeEvent\x12\x46\n\nscan_event\x18\x04 \x03(\x0b\x32\x32.bluetooth.metrics.BluetoothMetricsProto.ScanEvent\x12\x1a\n\x12num_bonded_devices\x18\x05 \x01(\x05\x12\x1d\n\x15num_bluetooth_session\x18\x06 \x01(\x03\x12\x16\n\x0enum_pair_event\x18\x07 \x01(\x03\x12\x16\n\x0enum_wake_event\x18\x08 \x01(\x03\x12\x16\n\x0enum_scan_event\x18\t \x01(\x03\x12\x61\n\x18profile_connection_stats\x18\n \x03(\x0b\x32?.bluetooth.metrics.BluetoothMetricsProto.ProfileConnectionStats\x12p\n headset_profile_connection_stats\x18\x0b \x03(\x0b\x32\x46.bluetooth.metrics.BluetoothMetricsProto.HeadsetProfileConnectionStats\"\xdf\x01\n\nDeviceInfo\x12\x14\n\x0c\x64\x65vice_class\x18\x01 \x01(\x05\x12S\n\x0b\x64\x65vice_type\x18\x02 \x01(\x0e\x32>.bluetooth.metrics.BluetoothMetricsProto.DeviceInfo.DeviceType\"f\n\nDeviceType\x12\x17\n\x13\x44\x45VICE_TYPE_UNKNOWN\x10\x00\x12\x15\n\x11\x44\x45VICE_TYPE_BREDR\x10\x01\x12\x12\n\x0e\x44\x45VICE_TYPE_LE\x10\x02\x12\x14\n\x10\x44\x45VICE_TYPE_DUMO\x10\x03\"\x8f\x06\n\x10\x42luetoothSession\x12\x1c\n\x14session_duration_sec\x18\x02 \x01(\x03\x12v\n\x1a\x63onnection_technology_type\x18\x03 \x01(\x0e\x32R.bluetooth.metrics.BluetoothMetricsProto.BluetoothSession.ConnectionTechnologyType\x12\x1d\n\x11\x64isconnect_reason\x18\x04 \x01(\tB\x02\x18\x01\x12P\n\x13\x64\x65vice_connected_to\x18\x05 \x01(\x0b\x32\x33.bluetooth.metrics.BluetoothMetricsProto.DeviceInfo\x12N\n\x0erfcomm_session\x18\x06 \x01(\x0b\x32\x36.bluetooth.metrics.BluetoothMetricsProto.RFCommSession\x12J\n\x0c\x61\x32\x64p_session\x18\x07 \x01(\x0b\x32\x34.bluetooth.metrics.BluetoothMetricsProto.A2DPSession\x12n\n\x16\x64isconnect_reason_type\x18\x08 \x01(\x0e\x32N.bluetooth.metrics.BluetoothMetricsProto.BluetoothSession.DisconnectReasonType\"\x8b\x01\n\x18\x43onnectionTechnologyType\x12&\n\"CONNECTION_TECHNOLOGY_TYPE_UNKNOWN\x10\x00\x12!\n\x1d\x43ONNECTION_TECHNOLOGY_TYPE_LE\x10\x01\x12$\n CONNECTION_TECHNOLOGY_TYPE_BREDR\x10\x02\"Z\n\x14\x44isconnectReasonType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x10\n\x0cMETRICS_DUMP\x10\x01\x12#\n\x1fNEXT_START_WITHOUT_END_PREVIOUS\x10\x02\"3\n\rRFCommSession\x12\x10\n\x08rx_bytes\x18\x01 \x01(\x05\x12\x10\n\x08tx_bytes\x18\x02 \x01(\x05\"\xf9\x02\n\x0b\x41\x32\x44PSession\x12\x1e\n\x16media_timer_min_millis\x18\x01 \x01(\x05\x12\x1e\n\x16media_timer_max_millis\x18\x02 \x01(\x05\x12\x1e\n\x16media_timer_avg_millis\x18\x03 \x01(\x05\x12!\n\x19\x62uffer_overruns_max_count\x18\x04 \x01(\x05\x12\x1d\n\x15\x62uffer_overruns_total\x18\x05 \x01(\x05\x12 \n\x18\x62uffer_underruns_average\x18\x06 \x01(\x02\x12\x1e\n\x16\x62uffer_underruns_count\x18\x07 \x01(\x05\x12\x1d\n\x15\x61udio_duration_millis\x18\x08 \x01(\x03\x12N\n\x0csource_codec\x18\t \x01(\x0e\x32\x38.bluetooth.metrics.BluetoothMetricsProto.A2dpSourceCodec\x12\x17\n\x0fis_a2dp_offload\x18\n \x01(\x08\"\x92\x01\n\tPairEvent\x12\x19\n\x11\x64isconnect_reason\x18\x01 \x01(\x05\x12\x19\n\x11\x65vent_time_millis\x18\x02 \x01(\x03\x12O\n\x12\x64\x65vice_paired_with\x18\x03 \x01(\x0b\x32\x33.bluetooth.metrics.BluetoothMetricsProto.DeviceInfo\"\xdc\x01\n\tWakeEvent\x12Y\n\x0fwake_event_type\x18\x01 \x01(\x0e\x32@.bluetooth.metrics.BluetoothMetricsProto.WakeEvent.WakeEventType\x12\x11\n\trequestor\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x19\n\x11\x65vent_time_millis\x18\x04 \x01(\x03\"8\n\rWakeEventType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x43QUIRED\x10\x01\x12\x0c\n\x08RELEASED\x10\x02\"\xc4\x03\n\tScanEvent\x12Y\n\x0fscan_event_type\x18\x01 \x01(\x0e\x32@.bluetooth.metrics.BluetoothMetricsProto.ScanEvent.ScanEventType\x12\x11\n\tinitiator\x18\x02 \x01(\t\x12\x63\n\x14scan_technology_type\x18\x03 \x01(\x0e\x32\x45.bluetooth.metrics.BluetoothMetricsProto.ScanEvent.ScanTechnologyType\x12\x16\n\x0enumber_results\x18\x04 \x01(\x05\x12\x19\n\x11\x65vent_time_millis\x18\x05 \x01(\x03\"u\n\x12ScanTechnologyType\x12\x15\n\x11SCAN_TYPE_UNKNOWN\x10\x00\x12\x15\n\x11SCAN_TECH_TYPE_LE\x10\x01\x12\x18\n\x14SCAN_TECH_TYPE_BREDR\x10\x02\x12\x17\n\x13SCAN_TECH_TYPE_BOTH\x10\x03\":\n\rScanEventType\x12\x14\n\x10SCAN_EVENT_START\x10\x00\x12\x13\n\x0fSCAN_EVENT_STOP\x10\x01\"}\n\x16ProfileConnectionStats\x12\x46\n\nprofile_id\x18\x01 \x01(\x0e\x32\x32.bluetooth.metrics.BluetoothMetricsProto.ProfileId\x12\x1b\n\x13num_times_connected\x18\x02 \x01(\x05\"\x97\x01\n\x1dHeadsetProfileConnectionStats\x12Y\n\x14headset_profile_type\x18\x01 \x01(\x0e\x32;.bluetooth.metrics.BluetoothMetricsProto.HeadsetProfileType\x12\x1b\n\x13num_times_connected\x18\x02 \x01(\x05*\xbd\x01\n\x0f\x41\x32\x64pSourceCodec\x12\x1d\n\x19\x41\x32\x44P_SOURCE_CODEC_UNKNOWN\x10\x00\x12\x19\n\x15\x41\x32\x44P_SOURCE_CODEC_SBC\x10\x01\x12\x19\n\x15\x41\x32\x44P_SOURCE_CODEC_AAC\x10\x02\x12\x1a\n\x16\x41\x32\x44P_SOURCE_CODEC_APTX\x10\x03\x12\x1d\n\x19\x41\x32\x44P_SOURCE_CODEC_APTX_HD\x10\x04\x12\x1a\n\x16\x41\x32\x44P_SOURCE_CODEC_LDAC\x10\x05*\xa0\x02\n\tProfileId\x12\x13\n\x0fPROFILE_UNKNOWN\x10\x00\x12\x0b\n\x07HEADSET\x10\x01\x12\x08\n\x04\x41\x32\x44P\x10\x02\x12\n\n\x06HEALTH\x10\x03\x12\x0c\n\x08HID_HOST\x10\x04\x12\x07\n\x03PAN\x10\x05\x12\x08\n\x04PBAP\x10\x06\x12\x08\n\x04GATT\x10\x07\x12\x0f\n\x0bGATT_SERVER\x10\x08\x12\x07\n\x03MAP\x10\t\x12\x07\n\x03SAP\x10\n\x12\r\n\tA2DP_SINK\x10\x0b\x12\x14\n\x10\x41VRCP_CONTROLLER\x10\x0c\x12\t\n\x05\x41VRCP\x10\r\x12\x12\n\x0eHEADSET_CLIENT\x10\x10\x12\x0f\n\x0bPBAP_CLIENT\x10\x11\x12\x0e\n\nMAP_CLIENT\x10\x12\x12\x0e\n\nHID_DEVICE\x10\x13\x12\x07\n\x03OPP\x10\x14\x12\x0f\n\x0bHEARING_AID\x10\x15*C\n\x12HeadsetProfileType\x12\x1b\n\x17HEADSET_PROFILE_UNKNOWN\x10\x00\x12\x07\n\x03HSP\x10\x01\x12\x07\n\x03HFP\x10\x02\x42\x30\n\x15\x63om.android.bluetoothB\x15\x42luetoothMetricsProtoH\x03')
-
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'bluetooth_pb2', globals())
-if _descriptor._USE_C_DESCRIPTORS == False:
-
-  DESCRIPTOR._options = None
-  DESCRIPTOR._serialized_options = b'\n\025com.android.bluetoothB\025BluetoothMetricsProtoH\003'
-  _BLUETOOTHSESSION.fields_by_name['disconnect_reason']._options = None
-  _BLUETOOTHSESSION.fields_by_name['disconnect_reason']._serialized_options = b'\030\001'
-  _A2DPSOURCECODEC._serialized_start=3267
-  _A2DPSOURCECODEC._serialized_end=3456
-  _PROFILEID._serialized_start=3459
-  _PROFILEID._serialized_end=3747
-  _HEADSETPROFILETYPE._serialized_start=3749
-  _HEADSETPROFILETYPE._serialized_end=3816
-  _BLUETOOTHLOG._serialized_start=61
-  _BLUETOOTHLOG._serialized_end=711
-  _DEVICEINFO._serialized_start=714
-  _DEVICEINFO._serialized_end=937
-  _DEVICEINFO_DEVICETYPE._serialized_start=835
-  _DEVICEINFO_DEVICETYPE._serialized_end=937
-  _BLUETOOTHSESSION._serialized_start=940
-  _BLUETOOTHSESSION._serialized_end=1723
-  _BLUETOOTHSESSION_CONNECTIONTECHNOLOGYTYPE._serialized_start=1492
-  _BLUETOOTHSESSION_CONNECTIONTECHNOLOGYTYPE._serialized_end=1631
-  _BLUETOOTHSESSION_DISCONNECTREASONTYPE._serialized_start=1633
-  _BLUETOOTHSESSION_DISCONNECTREASONTYPE._serialized_end=1723
-  _RFCOMMSESSION._serialized_start=1725
-  _RFCOMMSESSION._serialized_end=1776
-  _A2DPSESSION._serialized_start=1779
-  _A2DPSESSION._serialized_end=2156
-  _PAIREVENT._serialized_start=2159
-  _PAIREVENT._serialized_end=2305
-  _WAKEEVENT._serialized_start=2308
-  _WAKEEVENT._serialized_end=2528
-  _WAKEEVENT_WAKEEVENTTYPE._serialized_start=2472
-  _WAKEEVENT_WAKEEVENTTYPE._serialized_end=2528
-  _SCANEVENT._serialized_start=2531
-  _SCANEVENT._serialized_end=2983
-  _SCANEVENT_SCANTECHNOLOGYTYPE._serialized_start=2806
-  _SCANEVENT_SCANTECHNOLOGYTYPE._serialized_end=2923
-  _SCANEVENT_SCANEVENTTYPE._serialized_start=2925
-  _SCANEVENT_SCANEVENTTYPE._serialized_end=2983
-  _PROFILECONNECTIONSTATS._serialized_start=2985
-  _PROFILECONNECTIONSTATS._serialized_end=3110
-  _HEADSETPROFILECONNECTIONSTATS._serialized_start=3113
-  _HEADSETPROFILECONNECTIONSTATS._serialized_end=3264
-# @@protoc_insertion_point(module_scope)
diff --git a/src/antlion/test_utils/bt/pts/fuchsia_pts_ics_lib.py b/src/antlion/test_utils/bt/pts/fuchsia_pts_ics_lib.py
deleted file mode 100644
index 8c4d3e9..0000000
--- a/src/antlion/test_utils/bt/pts/fuchsia_pts_ics_lib.py
+++ /dev/null
@@ -1,365 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This is a placeholder for all ICS values in PTS
-    that matter to Fuchsia devices.
-"""
-
-# A2DP Values are just a placeholder.
-A2DP_ICS = {
-    b'TSPC_ALL': b'FALSE',
-    b'TSPC_A2DP_0_1': b'FALSE',
-    b'TSPC_A2DP_0_2': b'FALSE',
-    b'TSPC_A2DP_0_3': b'FALSE',
-    b'TSPC_A2DP_1_1': b'TRUE',
-    b'TSPC_A2DP_1_2': b'TRUE',
-    b'TSPC_A2DP_2_1': b'TRUE',
-    b'TSPC_A2DP_2a_1': b'FALSE',
-    b'TSPC_A2DP_2a_2': b'TRUE',
-    b'TSPC_A2DP_2a_3': b'FALSE',
-    b'TSPC_A2DP_2b_1': b'FALSE',
-    b'TSPC_A2DP_2b_2': b'FALSE',
-    b'TSPC_A2DP_2_2': b'TRUE',
-    b'TSPC_A2DP_2_3': b'TRUE',
-    b'TSPC_A2DP_2_4': b'TRUE',
-    b'TSPC_A2DP_2_5': b'TRUE',
-    b'TSPC_A2DP_2_6': b'TRUE',
-    b'TSPC_A2DP_2_7': b'TRUE',
-    b'TSPC_A2DP_2_8': b'FALSE',
-    b'TSPC_A2DP_2_9': b'FALSE',
-    b'TSPC_A2DP_2_10': b'TRUE',
-    b'TSPC_A2DP_2_10a': b'FALSE',
-    b'TSPC_A2DP_2_11': b'FALSE',
-    b'TSPC_A2DP_2_12': b'FALSE',
-    b'TSPC_A2DP_2_13': b'TRUE',
-    b'TSPC_A2DP_2_14': b'TRUE',
-    b'TSPC_A2DP_2_15': b'FALSE',
-    b'TSPC_A2DP_2_16': b'FALSE',
-    b'TSPC_A2DP_2_17': b'FALSE',
-    b'TSPC_A2DP_3_1': b'TRUE',
-    b'TSPC_A2DP_3_1a': b'FALSE',
-    b'TSPC_A2DP_3_2': b'TRUE',
-    b'TSPC_A2DP_3_3': b'FALSE',
-    b'TSPC_A2DP_3_4': b'FALSE',
-    b'TSPC_A2DP_3_5': b'TRUE',
-    b'TSPC_A2DP_3_6': b'FALSE',
-    b'TSPC_A2DP_3_7': b'FALSE',
-    b'TSPC_A2DP_3_8': b'FALSE',
-    b'TSPC_A2DP_3a_1': b'TRUE',
-    b'TSPC_A2DP_3a_2': b'FALSE',
-    b'TSPC_A2DP_3a_3': b'TRUE',
-    b'TSPC_A2DP_3a_4': b'TRUE',
-    b'TSPC_A2DP_3a_5': b'TRUE',
-    b'TSPC_A2DP_3a_6': b'TRUE',
-    b'TSPC_A2DP_3a_7': b'TRUE',
-    b'TSPC_A2DP_3a_8': b'TRUE',
-    b'TSPC_A2DP_3a_9': b'FALSE',
-    b'TSPC_A2DP_3a_10': b'TRUE',
-    b'TSPC_A2DP_3a_11': b'FALSE',
-    b'TSPC_A2DP_3a_12': b'TRUE',
-    b'TSPC_A2DP_4_1': b'TRUE',
-    b'TSPC_A2DP_4_2': b'TRUE',
-    b'TSPC_A2DP_4_3': b'FALSE',
-    b'TSPC_A2DP_4_4': b'TRUE',
-    b'TSPC_A2DP_4_5': b'TRUE',
-    b'TSPC_A2DP_4_6': b'FALSE',
-    b'TSPC_A2DP_4_7': b'TRUE',
-    b'TSPC_A2DP_4_8': b'FALSE',
-    b'TSPC_A2DP_4_9': b'TRUE',
-    b'TSPC_A2DP_4_10': b'TRUE',
-    b'TSPC_A2DP_4_10a': b'FALSE',
-    b'TSPC_A2DP_4_11': b'FALSE',
-    b'TSPC_A2DP_4_12': b'FALSE',
-    b'TSPC_A2DP_4_13': b'TRUE',
-    b'TSPC_A2DP_4_14': b'TRUE',
-    b'TSPC_A2DP_4_15': b'FALSE',
-    b'TSPC_A2DP_5_1': b'TRUE',
-    b'TSPC_A2DP_5_1a': b'TRUE',
-    b'TSPC_A2DP_5_2': b'TRUE',
-    b'TSPC_A2DP_5_3': b'FALSE',
-    b'TSPC_A2DP_5_4': b'FALSE',
-    b'TSPC_A2DP_5_5': b'FALSE',
-    b'TSPC_A2DP_5a_1': b'TRUE',
-    b'TSPC_A2DP_5a_2': b'TRUE',
-    b'TSPC_A2DP_5a_3': b'TRUE',
-    b'TSPC_A2DP_5a_4': b'TRUE',
-    b'TSPC_A2DP_5a_5': b'TRUE',
-    b'TSPC_A2DP_5a_6': b'TRUE',
-    b'TSPC_A2DP_5a_7': b'TRUE',
-    b'TSPC_A2DP_5a_8': b'TRUE',
-    b'TSPC_A2DP_5a_9': b'TRUE',
-    b'TSPC_A2DP_5a_10': b'TRUE',
-    b'TSPC_A2DP_5a_11': b'TRUE',
-    b'TSPC_A2DP_5a_12': b'TRUE',
-    b'TSPC_A2DP_7a_1': b'FALSE',
-    b'TSPC_A2DP_7a_2': b'FALSE',
-    b'TSPC_A2DP_7a_3': b'FALSE',
-    b'TSPC_A2DP_7b_1': b'FALSE',
-    b'TSPC_A2DP_7b_2': b'FALSE',
-
-    # Not available in Launch Studio Yet
-    b'TSPC_A2DP_10_1': b'FALSE',
-    b'TSPC_A2DP_10_2': b'FALSE',
-    b'TSPC_A2DP_10_3': b'FALSE',
-    b'TSPC_A2DP_10_4': b'FALSE',
-    b'TSPC_A2DP_10_5': b'FALSE',
-    b'TSPC_A2DP_10_6': b'FALSE',
-    b'TSPC_A2DP_11_1': b'FALSE',
-    b'TSPC_A2DP_11_2': b'FALSE',
-    b'TSPC_A2DP_11_3': b'FALSE',
-    b'TSPC_A2DP_11_4': b'FALSE',
-    b'TSPC_A2DP_11_5': b'FALSE',
-    b'TSPC_A2DP_11_6': b'FALSE',
-    b'TSPC_A2DP_12_2': b'FALSE',
-    b'TSPC_A2DP_12_3': b'FALSE',
-    b'TSPC_A2DP_12_3': b'FALSE',
-    b'TSPC_A2DP_12_4': b'FALSE',
-    b'TSPC_A2DP_13_1': b'FALSE',
-    b'TSPC_A2DP_13_2': b'FALSE',
-    b'TSPC_A2DP_13_3': b'FALSE',
-    b'TSPC_A2DP_13_4': b'FALSE',
-    b'TSPC_A2DP_14_1': b'FALSE',
-    b'TSPC_A2DP_14_2': b'FALSE',
-    b'TSPC_A2DP_14_3': b'FALSE',
-    b'TSPC_A2DP_14_4': b'FALSE',
-    b'TSPC_A2DP_14_5': b'FALSE',
-    b'TSPC_A2DP_15_1': b'FALSE',
-    b'TSPC_A2DP_15_2': b'FALSE',
-    b'TSPC_A2DP_15_3': b'FALSE',
-    b'TSPC_A2DP_15_4': b'FALSE',
-    b'TSPC_A2DP_15_5': b'FALSE',
-    b'TSPC_A2DP_15_6': b'FALSE',
-    b'TSPC_A2DP_3_2a': b'FALSE',
-    b'TSPC_A2DP_3_2b': b'FALSE',
-    b'TSPC_A2DP_3_2c': b'FALSE',
-    b'TSPC_A2DP_3_2d': b'FALSE',
-    b'TSPC_A2DP_3_2e': b'FALSE',
-    b'TSPC_A2DP_3_2f': b'FALSE',
-    b'TSPC_A2DP_5_2a': b'FALSE',
-    b'TSPC_A2DP_5_2b': b'FALSE',
-    b'TSPC_A2DP_5_2c': b'FALSE',
-    b'TSPC_A2DP_8_2': b'FALSE',
-    b'TSPC_A2DP_8_3': b'FALSE',
-    b'TSPC_A2DP_8_4': b'FALSE',
-    b'TSPC_A2DP_9_1': b'FALSE',
-    b'TSPC_A2DP_9_2': b'FALSE',
-    b'TSPC_A2DP_9_3': b'FALSE',
-    b'TSPC_A2DP_9_4': b'FALSE',
-
-}
-
-
-GATT_ICS = {
-    b'TSPC_GATT_1_1': b'TRUE',
-    b'TSPC_GATT_1_2': b'TRUE',
-    b'TSPC_GATT_1a_1': b'TRUE',
-    b'TSPC_GATT_1a_2': b'TRUE',
-    b'TSPC_GATT_1a_3': b'TRUE',
-    b'TSPC_GATT_1a_4': b'TRUE',
-    b'TSPC_GATT_1a_5': b'FALSE',
-    b'TSPC_GATT_1a_6': b'FALSE',
-    b'TSPC_GATT_1a_7': b'FALSE',
-    b'TSPC_GATT_1a_8': b'FALSE',
-    b'TSPC_GATT_2_1': b'FALSE',
-    b'TSPC_GATT_2_2': b'TRUE',
-    b'TSPC_GATT_3_1': b'TRUE',
-    b'TSPC_GATT_3_2': b'TRUE',
-    b'TSPC_GATT_3_3': b'TRUE',
-    b'TSPC_GATT_3_4': b'TRUE',
-    b'TSPC_GATT_3_5': b'TRUE',
-    b'TSPC_GATT_3_6': b'FALSE',
-    b'TSPC_GATT_3_7': b'TRUE',
-    b'TSPC_GATT_3_8': b'TRUE',
-    b'TSPC_GATT_3_9': b'TRUE',
-    b'TSPC_GATT_3_10': b'TRUE',
-    b'TSPC_GATT_3_11': b'FALSE',
-    b'TSPC_GATT_3_12': b'TRUE',
-    b'TSPC_GATT_3_13': b'FALSE',
-    b'TSPC_GATT_3_14': b'TRUE',
-    b'TSPC_GATT_3_15': b'TRUE',
-    b'TSPC_GATT_3_16': b'TRUE',
-    b'TSPC_GATT_3_17': b'TRUE',
-    b'TSPC_GATT_3_18': b'TRUE',
-    b'TSPC_GATT_3_19': b'TRUE',
-    b'TSPC_GATT_3_20': b'TRUE',
-    b'TSPC_GATT_3_21': b'TRUE',
-    b'TSPC_GATT_3_22': b'TRUE',
-    b'TSPC_GATT_3_23': b'TRUE',
-    b'TSPC_GATT_3_24': b'FALSE',
-    b'TSPC_GATT_3_25': b'FALSE',
-    b'TSPC_GATT_3_26': b'FALSE',
-    b'TSPC_GATT_3B_1': b'FALSE',
-    b'TSPC_GATT_3B_2': b'FALSE',
-    b'TSPC_GATT_3B_3': b'FALSE',
-    b'TSPC_GATT_3B_4': b'FALSE',
-    b'TSPC_GATT_3B_5': b'FALSE',
-    b'TSPC_GATT_3B_6': b'FALSE',
-    b'TSPC_GATT_3B_7': b'FALSE',
-    b'TSPC_GATT_3B_8': b'FALSE',
-    b'TSPC_GATT_3B_9': b'FALSE',
-    b'TSPC_GATT_3B_10': b'FALSE',
-    b'TSPC_GATT_3B_11': b'FALSE',
-    b'TSPC_GATT_3B_12': b'FALSE',
-    b'TSPC_GATT_3B_13': b'FALSE',
-    b'TSPC_GATT_3B_14': b'FALSE',
-    b'TSPC_GATT_3B_15': b'FALSE',
-    b'TSPC_GATT_3B_16': b'FALSE',
-    b'TSPC_GATT_3B_17': b'FALSE',
-    b'TSPC_GATT_3B_18': b'FALSE',
-    b'TSPC_GATT_3B_19': b'FALSE',
-    b'TSPC_GATT_3B_20': b'FALSE',
-    b'TSPC_GATT_3B_21': b'FALSE',
-    b'TSPC_GATT_3B_22': b'FALSE',
-    b'TSPC_GATT_3B_23': b'FALSE',
-    b'TSPC_GATT_3B_24': b'FALSE',
-    b'TSPC_GATT_3B_25': b'FALSE',
-    b'TSPC_GATT_3B_26': b'FALSE',
-    b'TSPC_GATT_3B_27': b'FALSE',
-    b'TSPC_GATT_3B_28': b'FALSE',
-    b'TSPC_GATT_3B_29': b'FALSE',
-    b'TSPC_GATT_3B_30': b'FALSE',
-    b'TSPC_GATT_3B_31': b'FALSE',
-    b'TSPC_GATT_3B_32': b'FALSE',
-    b'TSPC_GATT_3B_33': b'FALSE',
-    b'TSPC_GATT_3B_34': b'FALSE',
-    b'TSPC_GATT_3B_35': b'FALSE',
-    b'TSPC_GATT_3B_36': b'FALSE',
-    b'TSPC_GATT_3B_37': b'FALSE',
-    b'TSPC_GATT_3B_38': b'FALSE',
-    b'TSPC_GATT_4_1': b'TRUE',
-    b'TSPC_GATT_4_2': b'TRUE',
-    b'TSPC_GATT_4_3': b'TRUE',
-    b'TSPC_GATT_4_4': b'TRUE',
-    b'TSPC_GATT_4_5': b'TRUE',
-    b'TSPC_GATT_4_6': b'TRUE',
-    b'TSPC_GATT_4_7': b'TRUE',
-    b'TSPC_GATT_4_8': b'TRUE',
-    b'TSPC_GATT_4_9': b'TRUE',
-    b'TSPC_GATT_4_10': b'TRUE',
-    b'TSPC_GATT_4_11': b'FALSE',
-    b'TSPC_GATT_4_12': b'TRUE',
-    b'TSPC_GATT_4_13': b'FALSE',
-    b'TSPC_GATT_4_14': b'TRUE',
-    b'TSPC_GATT_4_15': b'TRUE',
-    b'TSPC_GATT_4_16': b'TRUE',
-    b'TSPC_GATT_4_17': b'TRUE',
-    b'TSPC_GATT_4_18': b'TRUE',
-    b'TSPC_GATT_4_19': b'TRUE',
-    b'TSPC_GATT_4_20': b'TRUE',
-    b'TSPC_GATT_4_21': b'TRUE',
-    b'TSPC_GATT_4_22': b'TRUE',
-    b'TSPC_GATT_4_23': b'TRUE',
-    b'TSPC_GATT_4_24': b'FALSE',
-    b'TSPC_GATT_4_25': b'FALSE',
-    b'TSPC_GATT_4_26': b'FALSE',
-    b'TSPC_GATT_4_27': b'FALSE',
-    b'TSPC_GATT_4B_1': b'FALSE',
-    b'TSPC_GATT_4B_2': b'FALSE',
-    b'TSPC_GATT_4B_3': b'FALSE',
-    b'TSPC_GATT_4B_4': b'FALSE',
-    b'TSPC_GATT_4B_5': b'FALSE',
-    b'TSPC_GATT_4B_6': b'FALSE',
-    b'TSPC_GATT_4B_7': b'FALSE',
-    b'TSPC_GATT_4B_8': b'FALSE',
-    b'TSPC_GATT_4B_9': b'FALSE',
-    b'TSPC_GATT_4B_10': b'FALSE',
-    b'TSPC_GATT_4B_11': b'FALSE',
-    b'TSPC_GATT_4B_12': b'FALSE',
-    b'TSPC_GATT_4B_13': b'FALSE',
-    b'TSPC_GATT_4B_14': b'FALSE',
-    b'TSPC_GATT_4B_15': b'FALSE',
-    b'TSPC_GATT_4B_16': b'FALSE',
-    b'TSPC_GATT_4B_17': b'FALSE',
-    b'TSPC_GATT_4B_18': b'FALSE',
-    b'TSPC_GATT_4B_19': b'FALSE',
-    b'TSPC_GATT_4B_20': b'FALSE',
-    b'TSPC_GATT_4B_21': b'FALSE',
-    b'TSPC_GATT_4B_22': b'FALSE',
-    b'TSPC_GATT_4B_23': b'FALSE',
-    b'TSPC_GATT_4B_24': b'FALSE',
-    b'TSPC_GATT_4B_25': b'FALSE',
-    b'TSPC_GATT_4B_26': b'FALSE',
-    b'TSPC_GATT_4B_27': b'FALSE',
-    b'TSPC_GATT_4B_28': b'FALSE',
-    b'TSPC_GATT_4B_29': b'FALSE',
-    b'TSPC_GATT_4B_30': b'FALSE',
-    b'TSPC_GATT_4B_31': b'FALSE',
-    b'TSPC_GATT_4B_32': b'FALSE',
-    b'TSPC_GATT_4B_33': b'FALSE',
-    b'TSPC_GATT_4B_34': b'FALSE',
-    b'TSPC_GATT_4B_35': b'FALSE',
-    b'TSPC_GATT_4B_36': b'FALSE',
-    b'TSPC_GATT_4B_37': b'FALSE',
-    b'TSPC_GATT_4B_38': b'FALSE',
-    b'TSPC_GATT_6_2': b'TRUE',
-    b'TSPC_GATT_6_3': b'TRUE',
-    b'TSPC_GATT_7_1': b'TRUE',
-    b'TSPC_GATT_7_2': b'TRUE',
-    b'TSPC_GATT_7_3': b'TRUE',
-    b'TSPC_GATT_7_4': b'TRUE',
-    b'TSPC_GATT_7_5': b'FALSE',
-    b'TSPC_GATT_7_6': b'FALSE',
-    b'TSPC_GATT_7_7': b'FALSE',
-    b'TSPC_GATT_8_1': b'TRUE',
-    b'TSPC_GAP_0_2': b'FALSE',
-    b'TSPC_GAP_24_2': b'TRUE',
-    b'TSPC_GAP_24_3': b'TRUE',
-    b'TSPC_GAP_34_2': b'TRUE',
-    b'TSPC_GAP_34_3': b'TRUE',
-    b'TSPC_ALL': b'FALSE',
-}
-
-
-SDP_ICS = {
-    b'TSPC_ALL': b'FALSE',
-    b'TSPC_SDP_1_1': b'TRUE',
-    b'TSPC_SDP_1_2': b'TRUE',
-    b'TSPC_SDP_1_3': b'TRUE',
-    b'TSPC_SDP_1b_1': b'TRUE',
-    b'TSPC_SDP_1b_2': b'TRUE',
-    b'TSPC_SDP_2_1': b'TRUE',
-    b'TSPC_SDP_2_2': b'TRUE',
-    b'TSPC_SDP_2_3': b'TRUE',
-    b'TSPC_SDP_3_1': b'TRUE',
-    b'TSPC_SDP_4_1': b'TRUE',
-    b'TSPC_SDP_4_2': b'TRUE',
-    b'TSPC_SDP_4_3': b'TRUE',
-    b'TSPC_SDP_5_1': b'TRUE',
-    b'TSPC_SDP_6_1': b'TRUE',
-    b'TSPC_SDP_6_2': b'TRUE',
-    b'TSPC_SDP_6_3': b'TRUE',
-    b'TSPC_SDP_7_1': b'TRUE',
-    b'TSPC_SDP_8_1': b'FALSE',
-    b'TSPC_SDP_8_2': b'FALSE',
-    b'TSPC_SDP_9_1': b'TRUE',
-    b'TSPC_SDP_9_2': b'TRUE',
-    b'TSPC_SDP_9_3': b'FALSE',
-    b'TSPC_SDP_9_4': b'FALSE',
-    b'TSPC_SDP_9_5': b'TRUE',
-    b'TSPC_SDP_9_6': b'TRUE',
-    b'TSPC_SDP_9_7': b'FALSE',
-    b'TSPC_SDP_9_8': b'FALSE',
-    b'TSPC_SDP_9_9': b'TRUE',
-    b'TSPC_SDP_9_10': b'TRUE',
-    b'TSPC_SDP_9_11': b'TRUE',
-    b'TSPC_SDP_9_12': b'FALSE',
-    b'TSPC_SDP_9_13': b'FALSE',
-    b'TSPC_SDP_9_14': b'TRUE',
-    b'TSPC_SDP_9_15': b'FALSE',
-    b'TSPC_SDP_9_16': b'FALSE',
-    b'TSPC_SDP_9_17': b'TRUE',
-    b'TSPC_SDP_9_18': b'TRUE',
-    b'TSPC_SDP_9_19': b'TRUE',
-}
diff --git a/src/antlion/test_utils/bt/pts/fuchsia_pts_ixit_lib.py b/src/antlion/test_utils/bt/pts/fuchsia_pts_ixit_lib.py
deleted file mode 100644
index c8fdf5c..0000000
--- a/src/antlion/test_utils/bt/pts/fuchsia_pts_ixit_lib.py
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This is a placeholder for all IXIT values in PTS
-    that matter to Fuchsia devices.
-"""
-
-A2DP_IXIT = {
-    b'TSPX_security_enabled': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_bd_addr_iut': (b'OCTETSTRING', b'000000000000'),
-    b'TSPX_SRC_class_of_device': (b'OCTETSTRING', b'080418'),
-    b'TSPX_SNK_class_of_device': (b'OCTETSTRING', b'04041C'),
-    b'TSPX_pin_code': (b'IA5STRING', b'0000'),
-    b'TSPX_delete_link_key': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_time_guard': (b'INTEGER', b'300000'),
-    b'TSPX_use_implicit_send': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_media_directory':
-    (b'IA5STRING', b'C:\Program Files\Bluetooth SIG\Bluetooth PTS\\bin\\audio'),
-    b'TSPX_auth_password': (b'IA5STRING', b'0000'),
-    b'TSPX_auth_user_id': (b'IA5STRING', b'PTS'),
-    b'TSPX_rfcomm_channel': (b'INTEGER', b'8'),
-    b'TSPX_l2cap_psm': (b'OCTETSTRING', b'1011'),
-    b'TSPX_no_confirmations': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_cover_art_uuid': (b'OCTETSTRING', b'3EEE'),
-}
-
-GATT_IXIT = {
-    b'TSPX_bd_addr_iut': (b'OCTETSTRING', b'000000000000'),
-    b'TSPX_iut_device_name_in_adv_packet_for_random_address': (b'IA5STRING', b'tbd'),
-    b'TSPX_security_enabled': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_delete_link_key': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_time_guard': (b'INTEGER', b'180000'),
-    b'TSPX_selected_handle': (b'OCTETSTRING', b'0012'),
-    b'TSPX_use_implicit_send': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_secure_simple_pairing_pass_key_confirmation': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_iut_use_dynamic_bd_addr': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_iut_setup_att_over_br_edr': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_tester_database_file': (b'IA5STRING', b'C:\Program Files\Bluetooth SIG\Bluetooth PTS\Data\SIGDatabase\GATT_Qualification_Test_Databases.xml'),
-    b'TSPX_iut_is_client_periphral': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_iut_is_server_central': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_mtu_size': (b'INTEGER', b'23'),
-    b'TSPX_pin_code':  (b'IA5STRING', b'0000'),
-    b'TSPX_use_dynamic_pin': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_delete_ltk': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_tester_appearance': (b'OCTETSTRING', b'0000'),
-}
-
-SDP_IXIT = {
-    b'TSPX_sdp_service_search_pattern': (b'IA5STRING', b'0100'),
-    b'TSPX_sdp_service_search_pattern_no_results': (b'IA5STRING', b'EEEE'),
-    b'TSPX_sdp_service_search_pattern_additional_protocol_descriptor_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_bluetooth_profile_descriptor_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_browse_group_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_client_exe_url': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_documentation_url': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_icon_url': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_language_base_attribute_id_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_protocol_descriptor_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_provider_name': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_availability': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_data_base_state': (b'IA5STRING', b'1000'),
-    b'TSPX_sdp_service_search_pattern_service_description': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_id': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_info_time_to_live': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_version_number_list': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_name': (b'IA5STRING', b''),
-    b'TSPX_sdp_service_search_pattern_service_record_state': (b'IA5STRING', b''),
-    b'TSPX_sdp_unsupported_attribute_id': (b'OCTETSTRING', b'EEEE'),
-    b'TSPX_security_enabled': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_delete_link_key': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_bd_addr_iut': (b'OCTETSTRING', b''),
-    b'TSPX_class_of_device_pts': (b'OCTETSTRING', b'200404'),
-    b'TSPX_class_of_device_test_pts_initiator': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_limited_inquiry_used': (b'BOOLEAN', b'FALSE'),
-    b'TSPX_pin_code': (b'IA5STRING', b'0000'),
-    b'TSPX_time_guard': (b'INTEGER', b'200000'),
-    b'TSPX_device_search_time': (b'INTEGER', b'20'),
-    b'TSPX_use_implicit_send': (b'BOOLEAN', b'TRUE'),
-    b'TSPX_secure_simple_pairing_pass_key_confirmation': (b'BOOLEAN', b'FALSE'),
-}
diff --git a/src/antlion/test_utils/bt/pts/pts_base_class.py b/src/antlion/test_utils/bt/pts/pts_base_class.py
deleted file mode 100644
index cee0389..0000000
--- a/src/antlion/test_utils/bt/pts/pts_base_class.py
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This is the PTS base class that is inherited from all PTS
-Tests.
-"""
-
-import re
-import time
-import traceback
-
-from ctypes import *
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.controllers.bluetooth_pts_device import VERDICT_STRINGS
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.signals import TestSignal
-from antlion.test_utils.abstract_devices.bluetooth_device import create_bluetooth_device
-from antlion.test_utils.bt.bt_constants import gatt_transport
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-
-class PtsBaseClass(BaseTestClass):
-    """ Class for representing common functionality across all PTS tests.
-
-    This includes the ability to rerun tests due to PTS instability,
-    common PTS action mappings, and setup/teardown related devices.
-
-    """
-    scan_timeout_seconds = 10
-    peer_identifier = None
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_bluetooth_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_bluetooth_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an fuchsia device
-            self.dut = create_bluetooth_device(self.fuchsia_devices[0])
-
-        self.characteristic_read_not_permitted_uuid = self.user_params.get(
-            "characteristic_read_not_permitted_uuid")
-        self.characteristic_read_not_permitted_handle = self.user_params.get(
-            "characteristic_read_not_permitted_handle")
-        self.characteristic_read_invalid_handle = self.user_params.get(
-            "characteristic_read_invalid_handle")
-        self.characteristic_attribute_not_found_uuid = self.user_params.get(
-            "characteristic_attribute_not_found_uuid")
-        self.write_characteristic_not_permitted_handle = self.user_params.get(
-            "write_characteristic_not_permitted_handle")
-
-        self.pts = self.bluetooth_pts_device[0]
-        # MMI functions commented out until implemented. Added for tracking
-        # purposes.
-        self.pts_action_mapping = {
-            "A2DP": {
-                1: self.a2dp_mmi_iut_connectable,
-                1002: self.a2dp_mmi_iut_accept_connect,
-                1020: self.a2dp_mmi_initiate_open_stream,
-            },
-            "GATT": {
-                1: self.mmi_make_iut_connectable,
-                2: self.mmi_iut_initiate_connection,
-                3: self.mmi_iut_initiate_disconnection,
-                # 4: self.mmi_iut_no_security,
-                # 5: self.mmi_iut_initiate_br_connection,
-                10: self.mmi_discover_primary_service,
-                # 11: self.mmi_confirm_no_primary_service_small,
-                # 12: self.mmi_iut_mtu_exchange,
-                # 13: self.mmi_discover_all_service_record,
-                # 14: self.mmi_iut_discover_gatt_service_record,
-                15: self.mmi_iut_find_included_services,
-                # 16: self.mmi_confirm_no_characteristic_uuid_small,
-                17: self.mmi_confirm_primary_service,
-                # 18: self.mmi_send_primary_service_uuid,
-                # 19: self.mmi_confirm_primary_service_uuid,
-                # 22: self.confirm_primary_service_1801,
-                24: self.mmi_confirm_include_service,
-                26: self.mmi_confirm_characteristic_service,
-                # 27: self.perform_read_all_characteristics,
-                29: self.
-                mmi_discover_service_uuid_range,  # AKA: discover service by uuid
-                # 31: self.perform_read_all_descriptors,
-                48: self.mmi_iut_send_read_characteristic_handle,
-                58: self.mmi_iut_send_read_descriptor_handle,
-                70: self.mmi_send_write_command,
-                74: self.mmi_send_write_request,
-                76: self.mmi_send_prepare_write,
-                77: self.mmi_iut_send_prepare_write_greater_offset,
-                80: self.mmi_iut_send_prepare_write_greater,
-                110: self.mmi_iut_enter_handle_read_not_permitted,
-                111: self.mmi_iut_enter_uuid_read_not_permitted,
-                118: self.mmi_iut_enter_handle_invalid,
-                119: self.mmi_iut_enter_uuid_attribute_not_found,
-                120: self.mmi_iut_enter_handle_write_not_permitted,
-                2000: self.mmi_verify_secure_id,  # Enter pairing pin from DUT.
-            },
-            "SDP": {
-                # TODO: Implement MMIs as necessary
-            }
-        }
-        self.pts.bind_to(self.process_next_action)
-
-    def teardown_class(self):
-        self.pts.clean_up()
-
-    def setup_test(self):
-        # Always start the test with RESULT_INCOMP
-        self.pts.pts_test_result = VERDICT_STRINGS['RESULT_INCOMP']
-
-    def teardown_test(self):
-        return True
-
-    @staticmethod
-    def pts_test_wrap(fn):
-        def _safe_wrap_test_case(self, *args, **kwargs):
-            test_id = "{}:{}:{}".format(self.__class__.__name__, fn.__name__,
-                                        time.time())
-            log_string = "[Test ID] {}".format(test_id)
-            self.log.info(log_string)
-            try:
-                self.dut.log_info("Started " + log_string)
-                result = fn(self, *args, **kwargs)
-                self.dut.log_info("Finished " + log_string)
-                rerun_count = self.user_params.get("pts_auto_rerun_count", 0)
-                for i in range(int(rerun_count)):
-                    if result is not True:
-                        self.teardown_test()
-                        log_string = "[Rerun Test ID] {}. Run #{} run failed... Retrying".format(
-                            test_id, i + 1)
-                        self.log.info(log_string)
-                        self.setup_test()
-                        self.dut.log_info("Rerun Started " + log_string)
-                        result = fn(self, *args, **kwargs)
-                    else:
-                        return result
-                return result
-            except TestSignal:
-                raise
-            except Exception as e:
-                self.log.error(traceback.format_exc())
-                self.log.error(str(e))
-                raise
-            return fn(self, *args, **kwargs)
-
-        return _safe_wrap_test_case
-
-    def process_next_action(self, action):
-        func = self.pts_action_mapping.get(
-            self.pts.pts_profile_mmi_request).get(action, "Nothing")
-        if func != 'Nothing':
-            func()
-
-    ### BEGIN A2DP MMI Actions ###
-
-    def a2dp_mmi_iut_connectable(self):
-        self.dut.start_profile_a2dp_sink()
-        self.dut.set_discoverable(True)
-
-    def a2dp_mmi_iut_accept_connect(self):
-        self.dut.start_profile_a2dp_sink()
-        self.dut.set_discoverable(True)
-
-    def a2dp_mmi_initiate_open_stream(self):
-        self.dut.a2dp_initiate_open_stream()
-
-    ### END A2DP MMI Actions ###
-
-    ### BEGIN GATT MMI Actions ###
-
-    def create_write_value_by_size(self, size):
-        write_value = []
-        for i in range(size):
-            write_value.append(i % 256)
-        return write_value
-
-    def mmi_send_write_command(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O with', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_size = re.search('with <= \'(.*)\' byte', description_to_parse)
-        size = int(raw_size.group(1))
-        self.dut.gatt_client_write_characteristic_without_response_by_handle(
-            self.peer_identifier, handle,
-            self.create_write_value_by_size(size))
-
-    def mmi_send_write_request(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O with', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_size = re.search('with <= \'(.*)\' byte', description_to_parse)
-        size = int(raw_size.group(1))
-        offset = 0
-        self.dut.gatt_client_write_characteristic_by_handle(
-            self.peer_identifier, handle, offset,
-            self.create_write_value_by_size(size))
-
-    def mmi_send_prepare_write(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O <=', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_size = re.search('<= \'(.*)\' byte', description_to_parse)
-        size = int(math.floor(int(raw_size.group(1)) / 2))
-        offset = int(size / 2)
-        self.dut.gatt_client_write_characteristic_by_handle(
-            self.peer_identifier, handle, offset,
-            self.create_write_value_by_size(size))
-
-    def mmi_iut_send_prepare_write_greater_offset(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O and', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_offset = re.search('greater than \'(.*)\' byte',
-                               description_to_parse)
-        offset = int(raw_offset.group(1))
-        size = 1
-        self.dut.gatt_client_write_characteristic_by_handle(
-            self.peer_identifier, handle, offset,
-            self.create_write_value_by_size(size))
-
-    def mmi_iut_send_prepare_write_greater(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O with', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        raw_size = re.search('greater than \'(.*)\' byte',
-                             description_to_parse)
-        size = int(raw_size.group(1))
-        offset = 0
-        self.dut.gatt_client_write_characteristic_by_handle(
-            self.peer_identifier, handle, offset,
-            self.create_write_value_by_size(size))
-
-    def mmi_make_iut_connectable(self):
-        adv_data = {
-            "name": fuchsia_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        interval = 1000
-
-        self.dut.start_le_advertisement(adv_data, scan_response, interval,
-                                        connectable)
-
-    def mmi_iut_enter_uuid_read_not_permitted(self):
-        self.pts.extra_answers.append(
-            self.characteristic_read_not_permitted_uuid)
-
-    def mmi_iut_enter_handle_read_not_permitted(self):
-        self.pts.extra_answers.append(
-            self.characteristic_read_not_permitted_handle)
-
-    def mmi_iut_enter_handle_invalid(self):
-        self.pts.extra_answers.append(self.characteristic_read_invalid_handle)
-
-    def mmi_iut_enter_uuid_attribute_not_found(self):
-        self.pts.extra_answers.append(
-            self.characteristic_attribute_not_found_uuid)
-
-    def mmi_iut_enter_handle_write_not_permitted(self):
-        self.pts.extra_answers.append(
-            self.write_characteristic_not_permitted_handle)
-
-    def mmi_verify_secure_id(self):
-        self.pts.extra_answers.append(self.dut.get_pairing_pin())
-
-    def mmi_discover_service_uuid_range(self, uuid):
-        self.dut.gatt_client_mmi_discover_service_uuid_range(
-            self.peer_identifier, uuid)
-
-    def mmi_iut_initiate_connection(self):
-        autoconnect = False
-        transport = gatt_transport['le']
-        adv_name = "PTS"
-        self.peer_identifier = self.dut.le_scan_with_name_filter(
-            "PTS", self.scan_timeout_seconds)
-        if self.peer_identifier is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-        tries = 3
-        for _ in range(tries):
-            if self.dut.gatt_connect(self.peer_identifier, transport,
-                                     autoconnect):
-                return
-
-        raise signals.TestFailure("Unable to connect to peripheral.")
-
-    def mmi_iut_initiate_disconnection(self):
-        if not self.dut.gatt_disconnect(self.peer_identifier):
-            raise signals.TestFailure("Failed to disconnect from peer.")
-
-    def mmi_discover_primary_service(self):
-        self.dut.gatt_refresh()
-
-    def mmi_iut_find_included_services(self):
-        self.dut.gatt_refresh()
-
-        test_result = self.pts.execute_test(test_name)
-        return test_result
-
-    def mmi_confirm_primary_service(self):
-        # TODO: Write verifier that 1800 and 1801 exists. For now just pass.
-        return True
-
-    def mmi_confirm_characteristic_service(self):
-        # TODO: Write verifier that no services exist. For now just pass.
-        return True
-
-    def mmi_confirm_include_service(self, uuid_description):
-        # TODO: Write verifier that input services exist. For now just pass.
-        # Note: List comes in the form of a long string to parse:
-        # Attribute Handle = '0002'O Included Service Attribute handle = '0080'O,End Group Handle = '0085'O,Service UUID = 'A00B'O
-        # \n
-        # Attribute Handle = '0021'O Included Service Attribute handle = '0001'O,End Group Handle = '0006'O,Service UUID = 'A00D'O
-        # \n ...
-        return True
-
-    def mmi_iut_send_read_characteristic_handle(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O to', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        self.dut.gatt_client_read_characteristic_by_handle(
-            self.peer_identifier, handle)
-
-    def mmi_iut_send_read_descriptor_handle(self):
-        description_to_parse = self.pts.current_implicit_send_description
-        raw_handle = re.search('handle = \'(.*)\'O to', description_to_parse)
-        handle = int(raw_handle.group(1), 16)
-        self.dut.gatt_client_descriptor_read_by_handle(self.peer_identifier,
-                                                       handle)
-
-    ### END GATT MMI Actions ###
diff --git a/src/antlion/test_utils/bt/rfcomm_lib.py b/src/antlion/test_utils/bt/rfcomm_lib.py
deleted file mode 100644
index 62d650c..0000000
--- a/src/antlion/test_utils/bt/rfcomm_lib.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Bluetooth adapter libraries
-"""
-
-from antlion.test_utils.bt.bt_constants import bt_rfcomm_uuids
-from antlion.test_utils.bt.bt_test_utils import set_bt_scan_mode
-
-
-class RfcommLib():
-    def __init__(self, log, dut, target_mac_addr=None):
-        self.advertisement_list = []
-        self.dut = dut
-        self.log = log
-        self.target_mac_addr = target_mac_addr
-
-    def set_target_mac_addr(self, mac_addr):
-        self.target_mac_addr = mac_addr
-
-    def connect(self, line):
-        """Perform an RFCOMM connect"""
-        uuid = None
-        if len(line) > 0:
-            uuid = line
-        if uuid:
-            self.dut.droid.bluetoothRfcommBeginConnectThread(
-                self.target_mac_addr, uuid)
-        else:
-            self.dut.droid.bluetoothRfcommBeginConnectThread(
-                self.target_mac_addr)
-
-    def open_rfcomm_socket(self):
-        """Open rfcomm socket"""
-        self.dut.droid.rfcommCreateRfcommSocket(self.target_mac_addr, 1)
-
-    def open_l2cap_socket(self):
-        """Open L2CAP socket"""
-        self.dut.droid.rfcommCreateL2capSocket(self.target_mac_addr, 1)
-
-    def write(self, line):
-        """Write String data over an RFCOMM connection"""
-        self.dut.droid.bluetoothRfcommWrite(line)
-
-    def write_binary(self, line):
-        """Write String data over an RFCOMM connection"""
-        self.dut.droid.bluetoothRfcommWriteBinary(line)
-
-    def end_connect(self):
-        """End RFCOMM connection"""
-        self.dut.droid.bluetoothRfcommEndConnectThread()
-
-    def accept(self, line):
-        """Accept RFCOMM connection"""
-        uuid = None
-        if len(line) > 0:
-            uuid = line
-        if uuid:
-            self.dut.droid.bluetoothRfcommBeginAcceptThread(uuid)
-        else:
-            self.dut.droid.bluetoothRfcommBeginAcceptThread(
-                bt_rfcomm_uuids['base_uuid'])
-
-    def stop(self):
-        """Stop RFCOMM Connection"""
-        self.dut.droid.bluetoothRfcommStop()
-
-    def open_l2cap_socket(self):
-        """Open L2CAP socket"""
-        self.dut.droid.rfcommCreateL2capSocket(self.target_mac_addr, 1)
diff --git a/src/antlion/test_utils/bt/shell_commands_lib.py b/src/antlion/test_utils/bt/shell_commands_lib.py
deleted file mode 100644
index 0eafd73..0000000
--- a/src/antlion/test_utils/bt/shell_commands_lib.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Shell command library.
-"""
-
-
-class ShellCommands():
-    def __init__(self, log, dut):
-        self.dut = dut
-        self.log = log
-
-    def set_battery_level(self, level):
-        """Set the battery level via ADB shell
-        Args:
-            level: the percent level to set
-        """
-        self.dut.adb.shell("dumpsys battery set level {}".format(level))
-
-    def disable_ble_scanning(self):
-        """Disable BLE scanning via ADB shell"""
-        self.dut.adb.shell("settings put global ble_scan_always_enabled 0")
-
-    def enable_ble_scanning(self):
-        """Enable BLE scanning via ADB shell"""
-        self.dut.adb.shell("settings put global ble_scan_always_enabled 1")
-
-    def consume_cpu_core(self):
-        """Consume a CPU core on the Android device via ADB shell"""
-        self.dut.adb.shell("echo $$ > /dev/cpuset/top-app/tasks")
-        self.dut.adb.shell("cat /dev/urandom > /dev/null &")
diff --git a/src/antlion/test_utils/bt/simulated_carkit_device.py b/src/antlion/test_utils/bt/simulated_carkit_device.py
deleted file mode 100644
index 533184e..0000000
--- a/src/antlion/test_utils/bt/simulated_carkit_device.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-
-from antlion.controllers import android_device
-from antlion.test_utils.bt.bt_test_utils import bluetooth_enabled_check
-
-# TODO: This class to be deprecated for
-# ../acts/test_utils/abstract_devices/bluetooth_handsfree_abstract_device.py
-
-
-class SimulatedCarkitDevice():
-    def __init__(self, serial):
-        self.ad = android_device.create(serial)[0]
-        if not bluetooth_enabled_check(self.ad):
-            asserts.fail("No able to turn on bluetooth")
-        self.mac_address = self.ad.droid.bluetoothGetLocalAddress()
-        self.ad.droid.bluetoothToggleState(False)
-        self.ad.droid.bluetoothMediaConnectToCarMBS()
-
-    def destroy(self):
-        self.ad.clean_up()
-
-    def accept_call(self):
-        return self.ad.droid.telecomAcceptRingingCall(None)
-
-    def end_call(self):
-        return self.ad.droid.telecomEndCall()
-
-    def enter_pairing_mode(self):
-        self.ad.droid.bluetoothStartPairingHelper(True)
-        return self.ad.droid.bluetoothMakeDiscoverable()
-
-    def next_track(self):
-        return self.ad.droid.bluetoothMediaPassthrough("skipNext")
-
-    def pause(self):
-        return self.ad.droid.bluetoothMediaPassthrough("pause")
-
-    def play(self):
-        return self.ad.droid.bluetoothMediaPassthrough("play")
-
-    def power_off(self):
-        return self.ad.droid.bluetoothToggleState(False)
-
-    def power_on(self):
-        return self.ad.droid.bluetoothToggleState(True)
-
-    def previous_track(self):
-        return self.ad.droid.bluetoothMediaPassthrough("skipPrev")
-
-    def reject_call(self):
-        return self.ad.droid.telecomCallDisconnect(
-            self.ad.droid.telecomCallGetCallIds()[0])
-
-    def volume_down(self):
-        target_step = self.ad.droid.getMediaVolume() - 1
-        target_step = max(target_step, 0)
-        return self.ad.droid.setMediaVolume(target_step)
-
-    def volume_up(self):
-        target_step = self.ad.droid.getMediaVolume() + 1
-        max_step = self.ad.droid.getMaxMediaVolume()
-        target_step = min(target_step, max_step)
-        return self.ad.droid.setMediaVolume(target_step)
diff --git a/src/antlion/test_utils/fuchsia/bt_test_utils.py b/src/antlion/test_utils/fuchsia/bt_test_utils.py
deleted file mode 100644
index 4706f2c..0000000
--- a/src/antlion/test_utils/fuchsia/bt_test_utils.py
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import re
-import subprocess
-import time
-
-PERSISTENT_BLUETOOTH_STORAGE_LOCATION = "/data/persistent/c1a6d0aebbf7c092c53e8e696636af8ec0629ff39b7f2e548430b0034d809da4/stash_secure.store"
-
-
-def le_scan_for_device_by_name(fd,
-                               log,
-                               search_name,
-                               timeout,
-                               partial_match=False,
-                               self_manage_scan=True):
-    """Scan for and returns the first BLE advertisement with the device name.
-
-    Args:
-        fd: The Fuchsia device to start LE scanning on.
-        log: The log var passed in from the test.
-        search_name: The name to find.
-        timeout: How long to scan for.
-        partial_match: Only do a partial match for the LE advertising name.
-          This will return the first result that had a partial match.
-        self_manage_scan: Whther or not this function should start/stop (True)
-          scans or if the caller should (False).
-
-    Returns:
-        The dictionary of device information.
-    """
-    if self_manage_scan:
-        scan_filter = {"name_substring": search_name}
-        fd.sl4f.gattc_lib.bleStartBleScan(scan_filter)
-    end_time = time.time() + timeout
-    found_device = None
-    while time.time() < end_time and not found_device:
-        time.sleep(1)
-        scan_res = fd.sl4f.gattc_lib.bleGetDiscoveredDevices()['result']
-        for device in scan_res:
-            name, did, connectable = device["name"], device["id"], device[
-                "connectable"]
-            if name == search_name or (partial_match and search_name in name):
-                log.info("Successfully found advertisement! name, id: {}, {}".
-                         format(name, did))
-                found_device = device
-    if self_manage_scan:
-        fd.sl4f.gattc_lib.bleStopBleScan()
-    if not found_device:
-        log.error("Failed to find device with name {}.".format(search_name))
-    return found_device
-
-
-def bredr_scan_for_device_by_name(fd,
-                                  log,
-                                  search_name,
-                                  timeout,
-                                  partial_match=False):
-    """Discover for and returns the first Classic device that matches search_name.
-
-    Args:
-        fd: The Fuchsia device to start Classic discovery on.
-        log: The log var passed in from the test.
-        search_name: The name to find.
-        timeout: How long to scan for.
-        partial_match: Only do a partial match for the search_name.
-          This will return the first result that had a partial match.
-
-    Returns:
-        The dictionary of device information.
-    """
-    fd.sl4f.bts_lib.requestDiscovery(True)
-
-    end_time = time.time() + timeout
-    found_device = None
-    while time.time() < end_time and not found_device:
-        scan_res = fd.sl4f.bts_lib.getKnownRemoteDevices()['result']
-        for device in scan_res:
-            name, did = scan_res[device]["name"], scan_res[device]["id"]
-            if name == search_name or (partial_match and search_name in name):
-                log.info("Successfully found peer! name, id: {}, {}".format(
-                    name, did))
-                found_device = did
-        time.sleep(1)
-    fd.sl4f.bts_lib.requestDiscovery(False)
-    if not found_device:
-        log.error("Failed to find device with name {}.".format(search_name))
-        return found_device
-    return found_device
-
-
-def unbond_all_known_devices(fd, log):
-    """Unbond all known devices from input Fuchsia Device.
-
-    Args:
-        fd: The Fuchsia device to unbond devices from.
-        log: The log var passed in from the test.
-    """
-    fd.sl4f.bts_lib.requestDiscovery(True)
-    device_list = fd.sl4f.bts_lib.getKnownRemoteDevices()['result']
-    fd.sl4f.bts_lib.requestDiscovery(False)
-    for device in device_list:
-        d = device_list[device]
-        if d['bonded'] or d['connected']:
-            log.info("Unbonding device: {}".format(d))
-            log.info(fd.sl4f.bts_lib.forgetDevice(d['id'])['result'])
-
-
-def verify_device_state_by_name(fd, log, search_name, state, services=None):
-    """Verify a connection state change happened an input device.
-
-    Args:
-        fd: The Fuchsia device to unbond devices from.
-        log: The log var passed in from the test.
-        search_name: The device name to find.
-        state: The expected state.
-        services: An optional list of services to expect based on the connected
-            device.
-    """
-    fd.sl4f.bts_lib.requestDiscovery(True)
-
-    seconds_allowed_for_state_change = 10
-    end_time = time.time() + seconds_allowed_for_state_change
-    found_state = None
-    while time.time() < end_time and not found_state:
-        device_list = fd.sl4f.bts_lib.getKnownRemoteDevices()['result']
-        for device in device_list:
-            d = device_list[device]
-            name = d['name']
-            if name == search_name:
-                print(d)
-                if state == "CONNECTED" and d['connected']:
-                    log.info("Found connected device {}".format(d))
-                    found_state = True
-                    break
-                if state == "BONDED" and d['bonded']:
-                    log.info("Found bonded device {}".format(d))
-                    found_state = True
-                    break
-        time.sleep(1)
-    #TODO: Verify services.
-    fd.sl4f.bts_lib.requestDiscovery(False)
-    return found_state
-
-
-def decode_list_to_link_key(raw_list):
-    """ Decodes the input int list to a string link key
-    Args:
-        raw_list: The list of int values to convert
-    Returns:
-        A string represetnation of the link key
-    """
-    str_list = ""
-    raw_list.reverse()
-    for item in raw_list:
-        check = str(hex(int(item)))[2:]
-        if len(check) == 1:
-            check = "0{}".format(check)
-        str_list += check
-    return str_list
-
-
-def get_link_keys(fd, save_path):
-    """Get Bluetooth link keys and LTKs for input Fuchsia device.
-
-    Args:
-        fd: The Fuchsia device object.
-        save_path: The custom save path.
-    Returns:
-        Dictionary of known LTKs and link keys
-    """
-    subprocess.run([
-        f"scp -F {fd.ssh_config} -6 [{fd.ip}]:{PERSISTENT_BLUETOOTH_STORAGE_LOCATION} {save_path}"
-    ],
-                   shell=True)
-    stash_secure_output = ""
-    with open(save_path, 'rb') as file:
-        stash_secure_output = file.read()
-    non_ascii_bytes_removed = re.sub(rb'[^\x00-\x7f]', rb'',
-                                     stash_secure_output).decode('utf-8')
-
-    bonding_data_split = non_ascii_bytes_removed.split("bonding-data:")
-    bonding_data_split.pop(0)
-    data_dict = {}
-    for data in bonding_data_split:
-        if "saved_networks" in data:
-            data = data.split("saved_networks")[0]
-        trailing_data_removed = re.sub(r'^.*?{', '{', data).strip()
-
-        more_trailing_data = trailing_data_removed.rsplit('}', 1)[0] + "}"
-        # Sometimes 'ost-data' will be apended at the end.
-        even_more_trailing_info = more_trailing_data.split('ost-data')[0]
-
-        # Remove the special chars at the end of the string that start with x1b
-        clean_json = more_trailing_data.split('\x1b')[0]
-
-        json_conversion = json.loads(clean_json)
-        identifier = json_conversion.get("identifier")
-        device_name = json_conversion.get("name")
-
-        device_address = decode_list_to_link_key(
-            json_conversion.get("address").get("value"))
-        device_address = ':'.join([
-            device_address[i:i + 2] for i in range(0, len(device_address), 2)
-        ])
-
-        data_dict[identifier] = {
-            "device_name": device_name,
-            "device_address": device_address
-        }
-
-        if json_conversion.get("bredr") is not None:
-            link_key = decode_list_to_link_key(
-                json_conversion.get("bredr").get("linkKey").get("value"))
-            data_dict[identifier]["bredr_link_key"] = link_key
-
-        if json_conversion.get("le") is not None:
-            ltk_key = decode_list_to_link_key(
-                json_conversion.get("le").get("localLtk").get("key").get(
-                    "value"))
-            data_dict[identifier]["le_ltk"] = ltk_key
-
-    return data_dict
diff --git a/src/antlion/test_utils/fuchsia/sdp_records.py b/src/antlion/test_utils/fuchsia/sdp_records.py
deleted file mode 100644
index 84f442e..0000000
--- a/src/antlion/test_utils/fuchsia/sdp_records.py
+++ /dev/null
@@ -1,491 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.bt.bt_constants import bt_attribute_values
-from antlion.test_utils.bt.bt_constants import sig_uuid_constants
-
-BASE_UUID = sig_uuid_constants['BASE_UUID']
-
-# A list of pre-defined SDP definitions
-sdp_pts_record_list = [
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['AudioSink'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['AVDTP'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVDTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['AdvancedAudioDistribution'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors': [{
-            'protocol':
-            int(sig_uuid_constants['L2CAP'], 16),
-            'params': [
-                {
-                    'data': int(sig_uuid_constants['AVDTP'], 16),
-                },
-                {
-                    'data': int(sig_uuid_constants['AVCTP'], 16),
-                },
-                {
-                    'data': int(sig_uuid_constants['GenericAudio'], 16),
-                },
-            ]
-        }],
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_SERVICE_AVAILABILITY'],
-            'element': {
-                'data': 0xff  # Indicate all available
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [
-            BASE_UUID.format(sig_uuid_constants['A/V_RemoteControlTarget']),
-            BASE_UUID.format(sig_uuid_constants['A/V_RemoteControl']),
-            BASE_UUID.format(sig_uuid_constants['A/V_RemoteControlController'])
-        ],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['AVCTP'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['PANU'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['NAP'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['SerialPort'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['SerialPort'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['DialupNetworking'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['DialupNetworking'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['OBEXObjectPush'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['OBEXObjectPush'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['OBEXFileTransfer'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['OBEXFileTransfer'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['Headset'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['Headset'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['HandsfreeAudioGateway'])],
-        'protocol_descriptors': [
-            {
-                'protocol':
-                int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data':
-                    int(sig_uuid_constants['HandsfreeAudioGateway'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['Handsfree'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['Handsfree'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    },
-    {
-        'service_class_uuids': [BASE_UUID.format(sig_uuid_constants['SIM_Access'])],
-        'protocol_descriptors': [
-            {
-                'protocol': int(sig_uuid_constants['L2CAP'], 16),
-                'params': [{
-                    'data': int(sig_uuid_constants['SIM_Access'], 16),
-                }]
-            },
-            {
-                'protocol': int(sig_uuid_constants['AVCTP'], 16),
-                'params': [{
-                    'data': 0x103  # to indicate 1.3
-                }]
-            },
-        ],
-        'profile_descriptors': [{
-            'profile_id':
-            int(sig_uuid_constants['A/V_RemoteControl'], 16),
-            'major_version':
-            1,
-            'minor_version':
-            2,
-        }],
-        'additional_protocol_descriptors':
-        None,
-        'information': [{
-            'language': "en",
-            'name': "A2DP",
-            'description': "Advanced Audio Distribution Profile",
-            'provider': "Fuchsia"
-        }],
-        'additional_attributes': [{
-            'id':
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-            'element': {
-                'data': 0x0011
-            }
-        }]
-    }
-]
diff --git a/src/antlion/test_utils/fuchsia/utils.py b/src/antlion/test_utils/fuchsia/utils.py
deleted file mode 100644
index 01f7da2..0000000
--- a/src/antlion/test_utils/fuchsia/utils.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError
-
-
-def http_file_download_by_curl(fd,
-                               url,
-                               out_path='/tmp/',
-                               curl_loc='/bin/curl',
-                               remove_file_after_check=True,
-                               timeout=3600,
-                               limit_rate=None,
-                               additional_args=None,
-                               retry=3):
-    """Download http file by ssh curl.
-
-    Args:
-        fd: Fuchsia Device Object.
-        url: The url that file to be downloaded from.
-        out_path: Optional. Where to download file to.
-            out_path is /tmp by default.
-        curl_loc: Location of curl binary on fd.
-        remove_file_after_check: Whether to remove the downloaded file after
-            check.
-        timeout: timeout for file download to complete.
-        limit_rate: download rate in bps. None, if do not apply rate limit.
-        additional_args: Any additional args for curl.
-        retry: the retry request times provided in curl command.
-    """
-    file_directory, file_name = _generate_file_directory_and_file_name(
-        url, out_path)
-    file_path = os.path.join(file_directory, file_name)
-    curl_cmd = curl_loc
-    if limit_rate:
-        curl_cmd += f' --limit-rate {limit_rate}'
-    if retry:
-        curl_cmd += f' --retry {retry}'
-    if additional_args:
-        curl_cmd += f' {additional_args}'
-    curl_cmd += f' --url {url} > {file_path}'
-
-    fd.log.info(f'Download {url} to {file_path} by ssh command {curl_cmd}')
-    try:
-        fd.ssh.run(curl_cmd, timeout_sec=timeout)
-        if _check_file_existence(fd, file_path):
-            fd.log.info(f'{url} is downloaded to {file_path} successfully')
-            return True
-
-        fd.log.warning(f'Fail to download {url}')
-        return False
-    except FuchsiaSSHError as e:
-        fd.log.warning(f'Command "{curl_cmd}" failed with error {e}')
-        return False
-    except Exception as e:
-        fd.log.error(f'Download {url} failed with unexpected exception {e}')
-        return False
-    finally:
-        if remove_file_after_check:
-            fd.log.info(f'Remove the downloaded file {file_path}')
-            try:
-                fd.ssh.run(f'rm {file_path}')
-            except FuchsiaSSHError:
-                pass
-
-
-def _generate_file_directory_and_file_name(url, out_path):
-    """Splits the file from the url and specifies the appropriate location of
-       where to store the downloaded file.
-
-    Args:
-        url: A url to the file that is going to be downloaded.
-        out_path: The location of where to store the file that is downloaded.
-
-    Returns:
-        file_directory: The directory of where to store the downloaded file.
-        file_name: The name of the file that is being downloaded.
-    """
-    file_name = url.split('/')[-1]
-    if not out_path:
-        file_directory = '/tmp/'
-    elif not out_path.endswith('/'):
-        file_directory, file_name = os.path.split(out_path)
-    else:
-        file_directory = out_path
-    return file_directory, file_name
-
-
-def _check_file_existence(fd, file_path):
-    """Check file existence by file_path. If expected_file_size
-       is provided, then also check if the file meet the file size requirement.
-
-    Args:
-        fd: A fuchsia device
-        file_path: Where to store the file on the fuchsia device.
-    """
-    try:
-        result = fd.ssh.run(f'ls -al "{file_path}"')
-        fd.log.debug(f'File {file_path} exists.')
-        return True
-    except FuchsiaSSHError as e:
-        if 'No such file or directory' in e.result.stderr:
-            fd.log.debug(f'File {file_path} does not exist.')
-            return False
-        raise e
diff --git a/src/antlion/test_utils/fuchsia/wmm_test_cases.py b/src/antlion/test_utils/fuchsia/wmm_test_cases.py
deleted file mode 100644
index d5aa3c5..0000000
--- a/src/antlion/test_utils/fuchsia/wmm_test_cases.py
+++ /dev/null
@@ -1,1239 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Internal Traffic Differentiation
-test_internal_traffic_diff_VO_VI = {
-    'phase_1': {
-        'stream_VO':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.87,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_VI':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VO')
-             ])
-    }
-}
-
-test_internal_traffic_diff_VO_BE = {
-    'phase_1': {
-        'stream_VO':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VO')
-             ])
-    }
-}
-
-test_internal_traffic_diff_VO_BK = {
-    'phase_1': {
-        'stream_VO':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VO')
-             ])
-    }
-}
-
-test_internal_traffic_diff_VI_BE = {
-    'phase_1': {
-        'stream_VI':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VI')
-             ])
-    }
-}
-
-test_internal_traffic_diff_VI_BK = {
-    'phase_1': {
-        'stream_VI':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_VI')
-             ])
-    }
-}
-
-test_internal_traffic_diff_BE_BK = {
-    'phase_1': {
-        'stream_BE':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<', phase='phase_1', stream='stream_BE')
-             ])
-    }
-}
-# External Traffic Differentiation
-
-# Single station, STAUT transmits high priority
-test_external_traffic_diff_staut_VO_ap_VI = {
-    'phase_1': {
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.87,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_staut_to_ap')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_VO_ap_BE = {
-    'phase_1': {
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_staut_to_ap')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_VO_ap_BK = {
-    'phase_1': {
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_staut_to_ap')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_VI_ap_BE = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VI_staut_to_ap')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_VI_ap_BK = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VI_staut_to_ap')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_BE_ap_BK = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_BE_staut_to_ap')
-             ])
-    }
-}
-
-# Single station, STAUT transmits low priority
-test_external_traffic_diff_staut_VI_ap_VO = {
-    'phase_1': {
-        'stream_VO_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.87,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_ap_to_staut')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_BE_ap_VO = {
-    'phase_1': {
-        'stream_VO_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_ap_to_staut')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_BK_ap_VO = {
-    'phase_1': {
-        'stream_VO_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VO_ap_to_staut')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_BE_ap_VI = {
-    'phase_1': {
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VI_ap_to_staut')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_BK_ap_VI = {
-    'phase_1': {
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_VI_ap_to_staut')
-             ])
-    }
-}
-
-test_external_traffic_diff_staut_BK_ap_BE = {
-    'phase_1': {
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.03)
-             ]),
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=1.0,
-             validation=[
-                 dict(operator='<',
-                      phase='phase_1',
-                      stream='stream_BE_ap_to_staut')
-             ])
-    }
-}
-
-# Dual Internal/External Traffic Differetiation
-
-test_dual_traffic_diff_staut_VO_VI_ap_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_VI_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.5)
-    }
-}
-
-test_dual_traffic_diff_staut_VO_BE_ap_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BE_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.5)
-    }
-}
-
-test_dual_traffic_diff_staut_VO_BK_ap_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BK_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5)
-    }
-}
-
-test_dual_traffic_diff_staut_VI_BE_ap_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BE_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.5)
-    }
-}
-
-test_dual_traffic_diff_staut_VI_BK_ap_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BK_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5)
-    }
-}
-
-test_dual_traffic_diff_staut_BE_BK_ap_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5,
-             validation=[
-                 dict(operator='==',
-                      phase='phase_1',
-                      stream='stream_BK_ap_to_staut',
-                      max_bw_rel_tolerance=0.15)
-             ]),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.85,
-             validation=[
-                 dict(operator='>=',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.5)
-    }
-}
-
-# ACM Bit Conformance Tests (Single station, as WFA test below uses two)
-test_acm_bit_on_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VI',
-            max_bandwidth_percentage=0.6,
-            validation=[
-                # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6),
-        'stream_BE_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6)
-    }
-}
-
-# AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
-test_ac_param_degrade_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VI',
-            max_bandwidth_percentage=0.6,
-            validation=[
-                # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6),
-        'stream_BE_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6)
-    }
-}
-
-test_ac_param_degrade_VO = {
-    'phase_1': {
-        'stream_VO_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VO',
-            max_bandwidth_percentage=0.6,
-            validation=[
-                # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_staut_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6),
-        'stream_BE_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.6)
-    }
-}
-
-test_ac_param_improve_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_BE',
-            max_bandwidth_percentage=0.6,
-            validation=[
-                # TODO(): This should technically be an "or"
-                dict(operator='>',
-                     phase='phase_1',
-                     stream='stream_VI_staut_to_ap_1',
-                     bandwidth_percentage=0.869,
-                     rel_tolerance=0.05),
-                dict(operator='>',
-                     phase='phase_1',
-                     stream='stream_VI_staut_to_ap_2',
-                     bandwidth_percentage=0.869,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_VI_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.6),
-        'stream_VI_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.6)
-    }
-}
-
-test_ac_param_improve_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_BK',
-            max_bandwidth_percentage=0.6,
-            validation=[
-                # TODO(): This should technically be an "or"
-                dict(operator='>',
-                     phase='phase_1',
-                     stream='stream_VI_staut_to_ap_1',
-                     bandwidth_percentage=0.869,
-                     rel_tolerance=0.05),
-                dict(operator='>',
-                     phase='phase_1',
-                     stream='stream_VI_staut_to_ap_2',
-                     bandwidth_percentage=0.869,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_VI_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.6),
-        'stream_VI_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.6)
-    }
-}
-# WFA Test Plan Cases
-
-# Traffic Differentiation in Single BSS (Single Station)
-test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE = {
-    'phase_1': {
-        'steam_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'steam_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VI_ap_to_staut',
-                      bandwidth_percentage=0.85,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-test_wfa_traffic_diff_single_station_staut_VI_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'stream_BE_staut_to_ap_1':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VI_staut_to_ap',
-                      bandwidth_percentage=0.89,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_staut_to_ap_2':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65),
-    }
-}
-
-test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE = {
-    'phase_1': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VI_staut_to_ap',
-                      bandwidth_percentage=0.87,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK = {
-    'phase_1': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'stream_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_BE_staut_to_ap',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'stream_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_VO_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VO_staut_to_ap',
-                      bandwidth_percentage=0.81,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_VI_ap_to_staut':
-        dict(transmitter_str='access_point',
-             receiver_str='staut',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-# Traffic Differentiation in Single BSS (Two Stations)
-test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE = {
-    'phase_1': {
-        'steam_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'steam_BE_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_VI_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VI_secondary_to_ap',
-                      bandwidth_percentage=0.90,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE = {
-    'phase_1': {
-        'steam_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'steam_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='steam_VI_staut_to_ap',
-                      bandwidth_percentage=0.88,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BE_secondary_to_ap_1':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_secondary_to_ap_2':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK = {
-    'phase_1': {
-        'steam_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'steam_BK_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.45),
-        'stream_BE_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_BE_secondary_to_ap',
-                      bandwidth_percentage=0.90,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_BK_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BK',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI = {
-    'phase_1': {
-        'steam_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_VO_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.45)
-    },
-    'phase_2': {
-        'steam_VI_staut_to_ap':
-        dict(transmitter_str='staut',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.45),
-        'stream_VO_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VO',
-             max_bandwidth_percentage=0.45,
-             validation=[
-                 dict(operator='>=',
-                      phase='phase_1',
-                      stream='stream_VO_secondary_to_ap',
-                      bandwidth_percentage=0.90,
-                      rel_tolerance=0.01)
-             ]),
-        'stream_VI_secondary_to_ap':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_VI',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-test_wfa_acm_bit_on_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VI',
-            max_bandwidth_percentage=0.65,
-            validation=[
-                # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_secondary_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_secondary_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_secondary_to_ap_1':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65),
-        'stream_BE_secondary_to_ap_2':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
-}
-
-test_wfa_ac_param_degrade_VI = {
-    'phase_1': {
-        'stream_VI_staut_to_ap':
-        dict(
-            transmitter_str='staut',
-            receiver_str='access_point',
-            access_category='AC_VI',
-            max_bandwidth_percentage=0.65,
-            validation=[
-                # TODO(): This should technically be an "or"
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_secondary_to_ap_1',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-                dict(operator='<',
-                     phase='phase_1',
-                     stream='stream_BE_secondary_to_ap_2',
-                     bandwidth_percentage=1.15,
-                     rel_tolerance=0.05),
-            ]),
-        'stream_BE_secondary_to_ap_1':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65),
-        'stream_BE_secondary_to_ap_2':
-        dict(transmitter_str='secondary_sta',
-             receiver_str='access_point',
-             access_category='AC_BE',
-             max_bandwidth_percentage=0.65)
-    }
-}
diff --git a/src/antlion/test_utils/net/NetstackBaseTest.py b/src/antlion/test_utils/net/NetstackBaseTest.py
deleted file mode 100755
index 9cd0a7f..0000000
--- a/src/antlion/test_utils/net/NetstackBaseTest.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.base_test import BaseTestClass
-from antlion import asserts
-
-
-class NetstackBaseTest(BaseTestClass):
-    def __init__(self, controllers):
-        BaseTestClass.__init__(self, controllers)
diff --git a/src/antlion/test_utils/net/arduino_test_utils.py b/src/antlion/test_utils/net/arduino_test_utils.py
deleted file mode 100644
index 45fc21a..0000000
--- a/src/antlion/test_utils/net/arduino_test_utils.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion import utils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-
-ARDUINO = "/root/arduino/arduino-1.8.5/arduino "
-CONNECT_WIFI = "/arduino/connect_wifi/connect_wifi.ino"
-DISCONNECT_WIFI = "/arduino/disconnect_wifi/disconnect_wifi.ino"
-SSID = wutils.WifiEnums.SSID_KEY
-PWD = wutils.WifiEnums.PWD_KEY
-
-
-def connect_wifi(wd, network=None):
-    """Connect wifi on arduino wifi dongle
-
-    Args:
-        wd - wifi dongle object
-        network - wifi network to connect to
-    """
-    wd.log.info("Flashing connect_wifi.ino onto dongle")
-    cmd = "locate %s" % CONNECT_WIFI
-    file_path = utils.exe_cmd(cmd).decode("utf-8", "ignore").split()[-1]
-    write_status = wd.write(ARDUINO, file_path, network)
-    asserts.assert_true(write_status, "Failed to flash connect wifi")
-    wd.log.info("Flashing complete")
-    wifi_status = wd.wifi_status()
-    asserts.assert_true(wifi_status, "Failed to connect to %s" % network)
-    ping_status = wd.ping_status()
-    asserts.assert_true(ping_status, "Failed to connect to internet")
-
-
-def disconnect_wifi(wd):
-    """Disconnect wifi on arduino wifi dongle
-
-    Args:
-        wd - wifi dongle object
-
-    Returns:
-        True - if wifi is disconnected
-        False - if not
-    """
-    wd.log.info("Flashing disconnect_wifi.ino onto dongle")
-    cmd = "locate %s" % DISCONNECT_WIFI
-    file_path = utils.exe_cmd(cmd).decode("utf-8", "ignore").rstrip()
-    write_status = wd.write(ARDUINO, file_path)
-    asserts.assert_true(write_status, "Failed to flash disconnect wifi")
-    wd.log.info("Flashing complete")
-    wifi_status = wd.wifi_status(False)
-    asserts.assert_true(not wifi_status, "Failed to disconnect wifi")
diff --git a/src/antlion/test_utils/net/connectivity_const.py b/src/antlion/test_utils/net/connectivity_const.py
deleted file mode 100644
index a54654f..0000000
--- a/src/antlion/test_utils/net/connectivity_const.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import enum
-
-######################################################
-# ConnectivityManager.NetworkCallback events
-######################################################
-EVENT_NETWORK_CALLBACK = "NetworkCallback"
-
-# event types
-NETWORK_CB_PRE_CHECK = "PreCheck"
-NETWORK_CB_AVAILABLE = "Available"
-NETWORK_CB_LOSING = "Losing"
-NETWORK_CB_LOST = "Lost"
-NETWORK_CB_UNAVAILABLE = "Unavailable"
-NETWORK_CB_CAPABILITIES_CHANGED = "CapabilitiesChanged"
-NETWORK_CB_SUSPENDED = "Suspended"
-NETWORK_CB_RESUMED = "Resumed"
-NETWORK_CB_LINK_PROPERTIES_CHANGED = "LinkPropertiesChanged"
-NETWORK_CB_INVALID = "Invalid"
-
-# event data keys
-NETWORK_CB_KEY_ID = "id"
-NETWORK_CB_KEY_EVENT = "networkCallbackEvent"
-NETWORK_CB_KEY_MAX_MS_TO_LIVE = "maxMsToLive"
-NETWORK_CB_KEY_RSSI = "rssi"
-NETWORK_CB_KEY_INTERFACE_NAME = "interfaceName"
-NETWORK_CB_KEY_CREATE_TS = "creation_timestamp"
-NETWORK_CB_KEY_CURRENT_TS = "current_timestamp"
-NETWORK_CB_KEY_NETWORK_SPECIFIER = "network_specifier"
-NETWORK_CB_KEY_TRANSPORT_INFO = "transport_info"
-
-# Constants for VPN connection status
-VPN_STATE_DISCONNECTED = 0
-VPN_STATE_INITIALIZING = 1
-VPN_STATE_CONNECTING = 2
-VPN_STATE_CONNECTED = 3
-VPN_STATE_TIMEOUT = 4
-VPN_STATE_FAILED = 5
-# TODO gmoturu: determine the exact timeout value
-# This is a random value as of now
-VPN_TIMEOUT = 30
-
-# Connectiivty Manager constants
-TYPE_MOBILE = 0
-TYPE_WIFI = 1
-
-# Network request related constants.
-NETWORK_CAP_TRANSPORT_WIFI = TYPE_WIFI
-NETWORK_CAP_CAPABILITY_INTERNET = 12
-
-# Network request related keys.
-NETWORK_CAP_TRANSPORT_TYPE_KEY = "TransportType"
-NETWORK_CAP_CAPABILITY_KEY = "Capability"
-
-# Multipath preference constants
-MULTIPATH_PREFERENCE_NONE = 0
-MULTIPATH_PREFERENCE_HANDOVER = 1 << 0
-MULTIPATH_PREFERENCE_RELIABILITY = 1 << 1
-MULTIPATH_PREFERENCE_PERFORMANCE = 1 << 2
-
-# Private DNS constants
-DNS_GOOGLE_HOSTNAME = "dns.google"
-DNS_QUAD9_HOSTNAME = "dns.quad9.net"
-DNS_CLOUDFLARE_HOSTNAME = "1dot1dot1dot1.cloudflare-dns.com"
-DOH_CLOUDFLARE_HOSTNAME = "cloudflare-dns.com"
-PRIVATE_DNS_MODE_OFF = "off"
-PRIVATE_DNS_MODE_OPPORTUNISTIC = "opportunistic"
-PRIVATE_DNS_MODE_STRICT = "hostname"
-
-DNS_SUPPORT_TYPE = {
-    DNS_GOOGLE_HOSTNAME: ["Do53", "DoT", "DoH"],
-    DNS_CLOUDFLARE_HOSTNAME: ["Do53","DoT"],
-    DOH_CLOUDFLARE_HOSTNAME: ["DoH"]
-}
-
-DNS_GOOGLE_ADDR_V4 = ["8.8.4.4", "8.8.8.8"]
-DNS_GOOGLE_ADDR_V6 = ["2001:4860:4860::8888",
-                      "2001:4860:4860::8844"]
-DNS_CLOUDFLARE_ADDR_V4 = ["1.1.1.1", "1.0.0.1"]
-DOH_CLOUDFLARE_ADDR_V4 = ["104.16.248.249", "104.16.249.249"]
-DOH_CLOUDFLARE_ADDR_V6 = ["2606:4700::6810:f8f9",
-                          "2606:4700::6810:f9f9"]
-
-
-# IpSec constants
-SOCK_STREAM = 1
-SOCK_DGRAM = 2
-AF_INET = 2
-AF_INET6 = 10
-DIRECTION_IN = 0
-DIRECTION_OUT = 1
-MODE_TRANSPORT = 0
-MODE_TUNNEL = 1
-CRYPT_NULL = "ecb(cipher_null)"
-CRYPT_AES_CBC = "cbc(aes)"
-AUTH_HMAC_MD5 = "hmac(md5)"
-AUTH_HMAC_SHA1 = "hmac(sha1)"
-AUTH_HMAC_SHA256 = "hmac(sha256)"
-AUTH_HMAC_SHA384 = "hmac(sha384)"
-AUTH_HMAC_SHA512 = "hmac(sha512)"
-AUTH_CRYPT_AES_GCM = "rfc4106(gcm(aes))"
-
-
-# Constants for VpnProfile
-class VpnProfile(object):
-    """ This class contains all the possible
-        parameters required for VPN connection
-    """
-    NAME = "name"
-    TYPE = "type"
-    SERVER = "server"
-    USER = "username"
-    PWD = "password"
-    DNS = "dnsServers"
-    SEARCH_DOMAINS = "searchDomains"
-    ROUTES = "routes"
-    MPPE = "mppe"
-    L2TP_SECRET = "l2tpSecret"
-    IPSEC_ID = "ipsecIdentifier"
-    IPSEC_SECRET = "ipsecSecret"
-    IPSEC_USER_CERT = "ipsecUserCert"
-    IPSEC_CA_CERT = "ipsecCaCert"
-    IPSEC_SERVER_CERT = "ipsecServerCert"
-
-
-# Enums for VPN profile types
-class VpnProfileType(enum.Enum):
-    """ Integer constant for each type of VPN
-    """
-    PPTP = 0
-    L2TP_IPSEC_PSK = 1
-    L2TP_IPSEC_RSA = 2
-    IPSEC_XAUTH_PSK = 3
-    IPSEC_XAUTH_RSA = 4
-    IPSEC_HYBRID_RSA = 5
-    IKEV2_IPSEC_USER_PASS = 6
-    IKEV2_IPSEC_PSK = 7
-    IKEV2_IPSEC_RSA = 8
-
-
-# Constants for config file
-class VpnReqParams(object):
-    """ Config file parameters required for
-        VPN connection
-    """
-    vpn_server_addresses = "vpn_server_addresses"
-    vpn_verify_addresses = "vpn_verify_addresses"
-    vpn_username = "vpn_username"
-    vpn_password = "vpn_password"
-    psk_secret = "psk_secret"
-    client_pkcs_file_name = "client_pkcs_file_name"
-    cert_path_vpnserver = "cert_path_vpnserver"
-    cert_password = "cert_password"
-    pptp_mppe = "pptp_mppe"
-    ipsec_server_type = "ipsec_server_type"
-    wifi_network = "wifi_network"
-    vpn_identity = "vpn_identity"
-    vpn_server_hostname = "vpn_server_hostname"
diff --git a/src/antlion/test_utils/net/connectivity_test_utils.py b/src/antlion/test_utils/net/connectivity_test_utils.py
deleted file mode 100644
index 6841de9..0000000
--- a/src/antlion/test_utils/net/connectivity_test_utils.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion.test_utils.net import connectivity_const as cconst
-from queue import Empty
-
-def _listen_for_keepalive_event(ad, key, msg, ka_event):
-    """Listen for keepalive event and return status
-
-    Args:
-        ad: DUT object
-        key: keepalive key
-        msg: Error message
-        event: Keepalive event type
-    """
-    ad.droid.socketKeepaliveStartListeningForEvent(key, ka_event)
-    try:
-        event = ad.ed.pop_event("SocketKeepaliveCallback")
-        status = event["data"]["socketKeepaliveEvent"] == ka_event
-    except Empty:
-        asserts.fail(msg)
-    finally:
-        ad.droid.socketKeepaliveStopListeningForEvent(key, ka_event)
-    if ka_event != "Started":
-        ad.droid.removeSocketKeepaliveReceiverKey(key)
-    if status:
-        ad.log.info("'%s' keepalive event successful" % ka_event)
-    return status
-
-def start_natt_socket_keepalive(ad, udp_encap, src_ip, dst_ip, interval = 10):
-    """Start NATT SocketKeepalive on DUT
-
-    Args:
-        ad: DUT object
-        udp_encap: udp_encap socket key
-        src_ip: IP addr of the client
-        dst_ip: IP addr of the keepalive server
-        interval: keepalive time interval
-    """
-    ad.log.info("Starting Natt Socket Keepalive")
-    key = ad.droid.startNattSocketKeepalive(udp_encap, src_ip, dst_ip, interval)
-    msg = "Failed to receive confirmation of starting natt socket keeaplive"
-    status = _listen_for_keepalive_event(ad, key, msg, "Started")
-    return key if status else None
-
-def start_tcp_socket_keepalive(ad, socket, time_interval = 10):
-    """Start TCP socket keepalive on DUT
-
-    Args:
-        ad: DUT object
-        socket: TCP socket key
-        time_interval: Keepalive time interval
-    """
-    ad.log.info("Starting TCP Socket Keepalive")
-    key = ad.droid.startTcpSocketKeepalive(socket, time_interval)
-    msg = "Failed to receive confirmation of starting tcp socket keeaplive"
-    status = _listen_for_keepalive_event(ad, key, msg, "Started")
-    return key if status else None
-
-def socket_keepalive_error(ad, key):
-    """Verify Error callback
-
-    Args:
-        ad: DUT object
-        key: Keepalive key
-    """
-    ad.log.info("Verify Error callback on keepalive: %s" % key)
-    msg = "Failed to receive confirmation of Error callback"
-    return _listen_for_keepalive_event(ad, key, msg, "Error")
-
-def socket_keepalive_data_received(ad, key):
-    """Verify OnDataReceived callback
-
-    Args:
-        ad: DUT object
-        key: Keepalive key
-    """
-    ad.log.info("Verify OnDataReceived callback on keepalive: %s" % key)
-    msg = "Failed to receive confirmation of OnDataReceived callback"
-    return _listen_for_keepalive_event(ad, key, msg, "OnDataReceived")
-
-def stop_socket_keepalive(ad, key):
-    """Stop SocketKeepalive on DUT
-
-    Args:
-        ad: DUT object
-        key: Keepalive key
-    """
-    ad.log.info("Stopping Socket keepalive: %s" % key)
-    ad.droid.stopSocketKeepalive(key)
-    msg = "Failed to receive confirmation of stopping socket keepalive"
-    return _listen_for_keepalive_event(ad, key, msg, "Stopped")
-
-
-def set_private_dns(ad, dns_mode, hostname=None):
-    """ Set private DNS mode and DNS server hostname on DUT
-
-    :param ad: Device under test (DUT)
-    :param dns_mode: DNS mode, including OFF, OPPORTUNISTIC, STRICT
-    :param hostname: DNS server hostname
-    """
-    """ Set private DNS mode on dut """
-    if dns_mode == cconst.PRIVATE_DNS_MODE_OFF:
-        ad.droid.setPrivateDnsMode(False)
-    else:
-        ad.droid.setPrivateDnsMode(True, hostname)
-
-    mode = ad.droid.getPrivateDnsMode()
-    host = ad.droid.getPrivateDnsSpecifier()
-    ad.log.info("DNS mode is %s and DNS server is %s" % (mode, host))
diff --git a/src/antlion/test_utils/net/ipsec_test_utils.py b/src/antlion/test_utils/net/ipsec_test_utils.py
deleted file mode 100644
index 657aa7f..0000000
--- a/src/antlion/test_utils/net/ipsec_test_utils.py
+++ /dev/null
@@ -1,262 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import binascii
-import os
-import re
-
-from antlion.test_utils.net import connectivity_const as cconst
-from antlion import asserts
-
-PKTS = 5
-
-
-def make_key(len_bits):
-    asserts.assert_true(
-        len_bits % 8 == 0, "Unexpected key length. Should be a multiple "
-        "of 8, got %s" % len_bits)
-    return binascii.hexlify(os.urandom(int(len_bits / 8))).decode()
-
-
-def allocate_spis(ad, ip_a, ip_b, in_spi=None, out_spi=None):
-    """ Allocate in and out SPIs for android device
-
-    Args:
-      1. ad : android device object
-      2. ip_a : local IP address for In SPI
-      3. ip_b : remote IP address for Out SPI
-      4. in_spi : Generate In SPI with this value
-      5. out_spi : Generate Out SPI with this value
-
-    Returns:
-      List of In and Out SPI
-    """
-    in_spi_key = ad.droid.ipSecAllocateSecurityParameterIndex(ip_a, in_spi)
-    in_spi = ad.droid.ipSecGetSecurityParameterIndex(in_spi_key)
-    ad.log.info("In SPI: %s" % hex(in_spi))
-
-    out_spi_key = ad.droid.ipSecAllocateSecurityParameterIndex(ip_b, out_spi)
-    out_spi = ad.droid.ipSecGetSecurityParameterIndex(out_spi_key)
-    ad.log.info("Out SPI: %s" % hex(out_spi))
-
-    asserts.assert_true(in_spi and out_spi, "Failed to allocate SPIs")
-    return [in_spi_key, out_spi_key]
-
-
-def release_spis(ad, spis):
-    """ Destroy SPIs
-
-    Args:
-      1. ad : android device object
-      2. spis : list of SPI keys to destroy
-    """
-    for spi_key in spis:
-        ad.droid.ipSecReleaseSecurityParameterIndex(spi_key)
-        spi = ad.droid.ipSecGetSecurityParameterIndex(spi_key)
-        asserts.assert_true(not spi, "Failed to release SPI")
-
-
-def create_transport_mode_transforms(ad,
-                                     spis,
-                                     ip_a,
-                                     ip_b,
-                                     crypt_algo,
-                                     crypt_key,
-                                     auth_algo,
-                                     auth_key,
-                                     trunc_bit,
-                                     udp_encap_sock=None):
-    """ Create transport mode transforms on the device
-
-    Args:
-      1. ad : android device object
-      2. spis : spi keys of the SPIs created
-      3. ip_a : local IP addr
-      4. ip_b : remote IP addr
-      5. crypt_key : encryption key
-      6. auth_key : authentication key
-      7. udp_encap_sock : set udp encapsulation for ESP packets
-
-    Returns:
-      List of In and Out Transforms
-    """
-    in_transform = ad.droid.ipSecCreateTransportModeTransform(
-        crypt_algo, crypt_key, auth_algo, auth_key, trunc_bit, spis[0], ip_b,
-        udp_encap_sock)
-    ad.log.info("In Transform: %s" % in_transform)
-    out_transform = ad.droid.ipSecCreateTransportModeTransform(
-        crypt_algo, crypt_key, auth_algo, auth_key, trunc_bit, spis[1], ip_a,
-        udp_encap_sock)
-    ad.log.info("Out Transform: %s" % out_transform)
-    asserts.assert_true(in_transform and out_transform,
-                        "Failed to create transforms")
-    return [in_transform, out_transform]
-
-
-def destroy_transport_mode_transforms(ad, transforms):
-    """ Destroy transforms on the device
-
-    Args:
-      1. ad : android device object
-      2. transforms : list to transform keys to destroy
-    """
-    for transform in transforms:
-        ad.droid.ipSecDestroyTransportModeTransform(transform)
-        status = ad.droid.ipSecGetTransformStatus(transform)
-        ad.log.info("Transform status: %s" % status)
-        asserts.assert_true(not status, "Failed to destroy transform")
-
-
-def apply_transport_mode_transforms_file_descriptors(ad, fd, transforms):
-    """ Apply transpot mode transform to FileDescriptor object
-
-    Args:
-      1. ad - android device object
-      2. fd - FileDescriptor key
-      3. transforms - list of in and out transforms
-    """
-    in_transform = ad.droid.ipSecApplyTransportModeTransformFileDescriptor(
-        fd, cconst.DIRECTION_IN, transforms[0])
-    out_transform = ad.droid.ipSecApplyTransportModeTransformFileDescriptor(
-        fd, cconst.DIRECTION_OUT, transforms[1])
-    asserts.assert_true(in_transform and out_transform,
-                        "Failed to apply transform")
-    ip_xfrm_state = ad.adb.shell("ip -s xfrm state")
-    ad.log.info("XFRM STATE:\n%s\n" % ip_xfrm_state)
-    ip_xfrm_policy = ad.adb.shell("ip -s xfrm policy")
-    ad.log.info("XFRM POLICY:\n%s\n" % ip_xfrm_policy)
-
-
-def remove_transport_mode_transforms_file_descriptors(ad, fd):
-    """ Remove transport mode transform from FileDescriptor object
-
-    Args:
-      1. ad - android device object
-      2. socket - FileDescriptor key
-    """
-    status = ad.droid.ipSecRemoveTransportModeTransformsFileDescriptor(fd)
-    asserts.assert_true(status, "Failed to remove transform")
-
-
-def apply_transport_mode_transforms_datagram_socket(ad, socket, transforms):
-    """ Apply transport mode transform to DatagramSocket object
-
-    Args:
-      1. ad - android device object
-      2. socket - DatagramSocket object key
-      3. transforms - list of in and out transforms
-    """
-    in_tfrm_status = ad.droid.ipSecApplyTransportModeTransformDatagramSocket(
-        socket, cconst.DIRECTION_IN, transforms[0])
-    out_tfrm_status = ad.droid.ipSecApplyTransportModeTransformDatagramSocket(
-        socket, cconst.DIRECTION_OUT, transforms[1])
-    asserts.assert_true(in_tfrm_status and out_tfrm_status,
-                        "Failed to apply transform")
-
-    ip_xfrm_state = ad.adb.shell("ip -s xfrm state")
-    ad.log.info("XFRM STATE:\n%s\n" % ip_xfrm_state)
-
-
-def remove_transport_mode_transforms_datagram_socket(ad, socket):
-    """ Remove transport mode transform from DatagramSocket object
-
-    Args:
-      1. ad - android device object
-      2. socket - DatagramSocket object key
-    """
-    status = ad.droid.ipSecRemoveTransportModeTransformsDatagramSocket(socket)
-    asserts.assert_true(status, "Failed to remove transform")
-
-
-def apply_transport_mode_transforms_socket(ad, socket, transforms):
-    """ Apply transport mode transform to Socket object
-
-    Args:
-      1. ad - android device object
-      2. socket - Socket object key
-      3. transforms - list of in and out transforms
-    """
-    in_tfrm_status = ad.droid.ipSecApplyTransportModeTransformSocket(
-        socket, cconst.DIRECTION_IN, transforms[0])
-    out_tfrm_status = ad.droid.ipSecApplyTransportModeTransformSocket(
-        socket, cconst.DIRECTION_OUT, transforms[1])
-    asserts.assert_true(in_tfrm_status and out_tfrm_status,
-                        "Failed to apply transform")
-
-    ip_xfrm_state = ad.adb.shell("ip -s xfrm state")
-    ad.log.info("XFRM STATE:\n%s\n" % ip_xfrm_state)
-
-
-def remove_transport_mode_transforms_socket(ad, socket):
-    """ Remove transport mode transform from Socket object
-
-    Args:
-      1. ad - android device object
-      2. socket - Socket object key
-    """
-    status = ad.droid.ipSecRemoveTransportModeTransformsSocket(socket)
-    asserts.assert_true(status, "Failed to remove transform")
-
-
-def verify_esp_packets(ads):
-    """ Verify that encrypted ESP packets are sent
-
-    Args:
-      1. ads - Verify ESP packets on all devices
-    """
-    for ad in ads:
-        ip_xfrm_state = ad.adb.shell("ip -s xfrm state")
-        ad.log.info("XFRM STATE on %s:\n%s\n" % (ad.serial, ip_xfrm_state))
-        pattern = re.findall(r'\d+\(packets\)', ip_xfrm_state)
-        esp_pkts = False
-        for _ in pattern:
-            if int(_.split('(')[0]) >= PKTS:
-                esp_pkts = True
-                break
-        asserts.assert_true(esp_pkts, "Could not find ESP pkts")
-
-
-def generate_random_crypt_auth_combo():
-    """ Generate every possible combination of crypt and auth keys,
-        auth algo, trunc bits supported by IpSecManager
-    """
-    crypt_key_length = [128, 192, 256]
-    auth_method_key = {
-        cconst.AUTH_HMAC_MD5: 128,
-        cconst.AUTH_HMAC_SHA1: 160,
-        cconst.AUTH_HMAC_SHA256: 256,
-        cconst.AUTH_HMAC_SHA384: 384,
-        cconst.AUTH_HMAC_SHA512: 512
-    }
-    auth_method_trunc = {
-        cconst.AUTH_HMAC_MD5: list(range(96, 136, 8)),
-        cconst.AUTH_HMAC_SHA1: list(range(96, 168, 8)),
-        cconst.AUTH_HMAC_SHA256: list(range(96, 264, 8)),
-        cconst.AUTH_HMAC_SHA384: list(range(192, 392, 8)),
-        cconst.AUTH_HMAC_SHA512: list(range(256, 520, 8))
-    }
-    return_list = []
-    for c in crypt_key_length:
-        for k in auth_method_key.keys():
-            auth_key = auth_method_key[k]
-            lst = auth_method_trunc[k]
-            for t in lst:
-                combo = []
-                combo.append(c)
-                combo.append(k)
-                combo.append(auth_key)
-                combo.append(t)
-                return_list.append(combo)
-
-    return return_list
diff --git a/src/antlion/test_utils/net/net_test_utils.py b/src/antlion/test_utils/net/net_test_utils.py
deleted file mode 100644
index c4b6d0c..0000000
--- a/src/antlion/test_utils/net/net_test_utils.py
+++ /dev/null
@@ -1,548 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import os
-import re
-import time
-import urllib.request
-
-from antlion import asserts
-from antlion import signals
-from antlion import utils
-from antlion.controllers import adb
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.libs.proc import job
-from antlion.utils import start_standing_subprocess
-from antlion.utils import stop_standing_subprocess
-from antlion.test_utils.net import connectivity_const as cconst
-
-VPN_CONST = cconst.VpnProfile
-VPN_TYPE = cconst.VpnProfileType
-VPN_PARAMS = cconst.VpnReqParams
-TCPDUMP_PATH = "/data/local/tmp/"
-USB_CHARGE_MODE = "svc usb setFunctions"
-USB_TETHERING_MODE = "svc usb setFunctions rndis"
-ENABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 0"
-DISABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 1"
-DEVICE_IP_ADDRESS = "ip address"
-LOCALHOST = "192.168.1.1"
-
-# Time to wait for radio to up and running after reboot
-WAIT_TIME_AFTER_REBOOT = 10
-
-GCE_SSH = "gcloud compute ssh "
-GCE_SCP = "gcloud compute scp "
-
-
-def set_chrome_browser_permissions(ad):
-    """Set chrome browser start with no-first-run verification.
-
-    Give permission to read from and write to storage
-
-    Args:
-        ad: android device object
-    """
-    commands = ["pm grant com.android.chrome "
-                "android.permission.READ_EXTERNAL_STORAGE",
-                "pm grant com.android.chrome "
-                "android.permission.WRITE_EXTERNAL_STORAGE",
-                "rm /data/local/chrome-command-line",
-                "am set-debug-app --persistent com.android.chrome",
-                'echo "chrome --no-default-browser-check --no-first-run '
-                '--disable-fre" > /data/local/tmp/chrome-command-line']
-    for cmd in commands:
-        try:
-            ad.adb.shell(cmd)
-        except AdbError:
-            logging.warning("adb command %s failed on %s" % (cmd, ad.serial))
-
-
-def verify_ping_to_vpn_ip(ad, vpn_ping_addr):
-    """Verify if IP behind VPN server is pingable.
-
-    Ping should pass, if VPN is connected.
-    Ping should fail, if VPN is disconnected.
-
-    Args:
-        ad: android device object
-        vpn_ping_addr: target ping addr
-    """
-    ping_result = None
-    pkt_loss = "100% packet loss"
-    logging.info("Pinging: %s" % vpn_ping_addr)
-    try:
-        ping_result = ad.adb.shell("ping -c 3 -W 2 %s" % vpn_ping_addr)
-    except AdbError:
-        pass
-    return ping_result and pkt_loss not in ping_result
-
-
-def legacy_vpn_connection_test_logic(ad, vpn_profile, vpn_ping_addr):
-    """Test logic for each legacy VPN connection.
-
-    Steps:
-      1. Generate profile for the VPN type
-      2. Establish connection to the server
-      3. Verify that connection is established using LegacyVpnInfo
-      4. Verify the connection by pinging the IP behind VPN
-      5. Stop the VPN connection
-      6. Check the connection status
-      7. Verify that ping to IP behind VPN fails
-
-    Args:
-        ad: Android device object
-        vpn_profile: object contains attribute for create vpn profile
-        vpn_ping_addr: addr to verify vpn connection
-    """
-    # Wait for sometime so that VPN server flushes all interfaces and
-    # connections after graceful termination
-    time.sleep(10)
-
-    ad.adb.shell("ip xfrm state flush")
-    ad.log.info("Connecting to: %s", vpn_profile)
-    ad.droid.vpnStartLegacyVpn(vpn_profile)
-    time.sleep(cconst.VPN_TIMEOUT)
-
-    connected_vpn_info = ad.droid.vpnGetLegacyVpnInfo()
-    asserts.assert_equal(connected_vpn_info["state"],
-                         cconst.VPN_STATE_CONNECTED,
-                         "Unable to establish VPN connection for %s"
-                         % vpn_profile)
-
-    ping_result = verify_ping_to_vpn_ip(ad, vpn_ping_addr)
-    ip_xfrm_state = ad.adb.shell("ip xfrm state")
-    match_obj = re.search(r'hmac(.*)', "%s" % ip_xfrm_state)
-    if match_obj:
-        ip_xfrm_state = format(match_obj.group(0)).split()
-        ad.log.info("HMAC for ESP is %s " % ip_xfrm_state[0])
-
-    ad.droid.vpnStopLegacyVpn()
-    asserts.assert_true(ping_result,
-                        "Ping to the internal IP failed. "
-                        "Expected to pass as VPN is connected")
-
-    connected_vpn_info = ad.droid.vpnGetLegacyVpnInfo()
-    asserts.assert_true(not connected_vpn_info,
-                        "Unable to terminate VPN connection for %s"
-                        % vpn_profile)
-
-
-def download_load_certs(ad, vpn_params, vpn_type, vpn_server_addr,
-                        ipsec_server_type, log_path):
-    """Download the certificates from VPN server and push to sdcard of DUT.
-
-    Args:
-      ad: android device object
-      vpn_params: vpn params from config file
-      vpn_type: 1 of the 6 VPN types
-      vpn_server_addr: server addr to connect to
-      ipsec_server_type: ipsec version - strongswan or openswan
-      log_path: log path to download cert
-
-    Returns:
-      Client cert file name on DUT's sdcard
-    """
-    url = "http://%s%s%s" % (vpn_server_addr,
-                             vpn_params['cert_path_vpnserver'],
-                             vpn_params['client_pkcs_file_name'])
-    logging.info("URL is: %s" % url)
-    if vpn_server_addr == LOCALHOST:
-        ad.droid.httpDownloadFile(url, "/sdcard/")
-        return vpn_params['client_pkcs_file_name']
-
-    local_cert_name = "%s_%s_%s" % (vpn_type.name,
-                                    ipsec_server_type,
-                                    vpn_params['client_pkcs_file_name'])
-    local_file_path = os.path.join(log_path, local_cert_name)
-    try:
-        ret = urllib.request.urlopen(url)
-        with open(local_file_path, "wb") as f:
-            f.write(ret.read())
-    except Exception:
-        asserts.fail("Unable to download certificate from the server")
-
-    ad.adb.push("%s sdcard/" % local_file_path)
-    return local_cert_name
-
-
-def generate_legacy_vpn_profile(ad,
-                                vpn_params,
-                                vpn_type,
-                                vpn_server_addr,
-                                ipsec_server_type,
-                                log_path):
-    """Generate legacy VPN profile for a VPN.
-
-    Args:
-      ad: android device object
-      vpn_params: vpn params from config file
-      vpn_type: 1 of the 6 VPN types
-      vpn_server_addr: server addr to connect to
-      ipsec_server_type: ipsec version - strongswan or openswan
-      log_path: log path to download cert
-
-    Returns:
-      Vpn profile
-    """
-    vpn_profile = {VPN_CONST.USER: vpn_params['vpn_username'],
-                   VPN_CONST.PWD: vpn_params['vpn_password'],
-                   VPN_CONST.TYPE: vpn_type.value,
-                   VPN_CONST.SERVER: vpn_server_addr, }
-    vpn_profile[VPN_CONST.NAME] = "test_%s_%s" % (vpn_type.name,
-                                                  ipsec_server_type)
-    if vpn_type.name == "PPTP":
-        vpn_profile[VPN_CONST.NAME] = "test_%s" % vpn_type.name
-
-    psk_set = set(["L2TP_IPSEC_PSK", "IPSEC_XAUTH_PSK"])
-    rsa_set = set(["L2TP_IPSEC_RSA", "IPSEC_XAUTH_RSA", "IPSEC_HYBRID_RSA"])
-
-    if vpn_type.name in psk_set:
-        vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params['psk_secret']
-    elif vpn_type.name in rsa_set:
-        cert_name = download_load_certs(ad,
-                                        vpn_params,
-                                        vpn_type,
-                                        vpn_server_addr,
-                                        ipsec_server_type,
-                                        log_path)
-        vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split('.')[0]
-        ad.droid.installCertificate(vpn_profile, cert_name,
-                                    vpn_params['cert_password'])
-    else:
-        vpn_profile[VPN_CONST.MPPE] = "mppe"
-
-    return vpn_profile
-
-
-def generate_ikev2_vpn_profile(ad, vpn_params, vpn_type, server_addr, log_path):
-    """Generate VPN profile for IKEv2 VPN.
-
-    Args:
-        ad: android device object.
-        vpn_params: vpn params from config file.
-        vpn_type: ikev2 vpn type.
-        server_addr: vpn server addr.
-        log_path: log path to download cert.
-
-    Returns:
-        Vpn profile.
-    """
-    vpn_profile = {
-        VPN_CONST.TYPE: vpn_type.value,
-        VPN_CONST.SERVER: server_addr,
-    }
-
-    if vpn_type.name == "IKEV2_IPSEC_USER_PASS":
-        vpn_profile[VPN_CONST.USER] = vpn_params["vpn_username"]
-        vpn_profile[VPN_CONST.PWD] = vpn_params["vpn_password"]
-        vpn_profile[VPN_CONST.IPSEC_ID] = vpn_params["vpn_identity"]
-        cert_name = download_load_certs(
-            ad, vpn_params, vpn_type, vpn_params["server_addr"],
-            "IKEV2_IPSEC_USER_PASS", log_path)
-        vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split('.')[0]
-        ad.droid.installCertificate(
-            vpn_profile, cert_name, vpn_params['cert_password'])
-    elif vpn_type.name == "IKEV2_IPSEC_PSK":
-        vpn_profile[VPN_CONST.IPSEC_ID] = vpn_params["vpn_identity"]
-        vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params["psk_secret"]
-    else:
-        vpn_profile[VPN_CONST.IPSEC_ID] = "%s@%s" % (
-            vpn_params["vpn_identity"], server_addr)
-        logging.info("ID: %s@%s" % (vpn_params["vpn_identity"], server_addr))
-        cert_name = download_load_certs(
-            ad, vpn_params, vpn_type, vpn_params["server_addr"],
-            "IKEV2_IPSEC_RSA", log_path)
-        vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split('.')[0]
-        vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split('.')[0]
-        ad.droid.installCertificate(
-            vpn_profile, cert_name, vpn_params['cert_password'])
-
-    return vpn_profile
-
-
-def start_tcpdump(ad, test_name, interface="any"):
-    """Start tcpdump on all interfaces.
-
-    Args:
-        ad: android device object.
-        test_name: tcpdump file name will have this
-    """
-    ad.log.info("Starting tcpdump on all interfaces")
-    ad.adb.shell("killall -9 tcpdump", ignore_status=True)
-    ad.adb.shell("mkdir %s" % TCPDUMP_PATH, ignore_status=True)
-    ad.adb.shell("rm -rf %s/*" % TCPDUMP_PATH, ignore_status=True)
-
-    file_name = "%s/tcpdump_%s_%s.pcap" % (TCPDUMP_PATH, ad.serial, test_name)
-    ad.log.info("tcpdump file is %s", file_name)
-    cmd = "adb -s {} shell tcpdump -i {} -s0 -w {}".format(ad.serial,
-                                                           interface, file_name)
-    try:
-        return start_standing_subprocess(cmd, 5)
-    except Exception:
-        ad.log.exception('Could not start standing process %s' % repr(cmd))
-
-    return None
-
-
-def stop_tcpdump(ad,
-                 proc,
-                 test_name,
-                 pull_dump=True,
-                 adb_pull_timeout=adb.DEFAULT_ADB_PULL_TIMEOUT):
-    """Stops tcpdump on any iface.
-
-       Pulls the tcpdump file in the tcpdump dir if necessary.
-
-    Args:
-        ad: android device object.
-        proc: need to know which pid to stop
-        test_name: test name to save the tcpdump file
-        pull_dump: pull tcpdump file or not
-        adb_pull_timeout: timeout for adb_pull
-
-    Returns:
-      log_path of the tcpdump file
-    """
-    ad.log.info("Stopping and pulling tcpdump if any")
-    if proc is None:
-        return None
-    try:
-        stop_standing_subprocess(proc)
-    except Exception as e:
-        ad.log.warning(e)
-    if pull_dump:
-        log_path = os.path.join(ad.device_log_path, "TCPDUMP_%s" % ad.serial)
-        os.makedirs(log_path, exist_ok=True)
-        ad.adb.pull("%s/. %s" % (TCPDUMP_PATH, log_path),
-                    timeout=adb_pull_timeout)
-        ad.adb.shell("rm -rf %s/*" % TCPDUMP_PATH, ignore_status=True)
-        file_name = "tcpdump_%s_%s.pcap" % (ad.serial, test_name)
-        return "%s/%s" % (log_path, file_name)
-    return None
-
-
-def start_tcpdump_gce_server(ad, test_name, dest_port, gce):
-    """Start tcpdump on gce server.
-
-    Args:
-        ad: android device object
-        test_name: test case name
-        dest_port: port to collect tcpdump
-        gce: dictionary of gce instance
-
-    Returns:
-       process id and pcap file path from gce server
-    """
-    ad.log.info("Starting tcpdump on gce server")
-
-    # pcap file name
-    fname = "/tmp/%s_%s_%s_%s" % \
-        (test_name, ad.model, ad.serial,
-         time.strftime('%Y-%m-%d_%H-%M-%S', time.localtime(time.time())))
-
-    # start tcpdump
-    tcpdump_cmd = "sudo bash -c \'tcpdump -i %s -w %s.pcap port %s > \
-        %s.txt 2>&1 & echo $!\'" % (gce["interface"], fname, dest_port, fname)
-    gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % \
-        (GCE_SSH, gce["project"], gce["zone"], gce["username"], gce["hostname"])
-    gce_ssh_cmd = '%s "%s"' % (gcloud_ssh_cmd, tcpdump_cmd)
-    utils.exe_cmd(gce_ssh_cmd)
-
-    # get process id
-    ps_cmd = '%s "ps aux | grep tcpdump | grep %s"' % (gcloud_ssh_cmd, fname)
-    tcpdump_pid = utils.exe_cmd(ps_cmd).decode("utf-8", "ignore").split()
-    if not tcpdump_pid:
-        raise signals.TestFailure("Failed to start tcpdump on gce server")
-    return tcpdump_pid[1], fname
-
-
-def stop_tcpdump_gce_server(ad, tcpdump_pid, fname, gce):
-    """Stop and pull tcpdump file from gce server.
-
-    Args:
-        ad: android device object
-        tcpdump_pid: process id for tcpdump file
-        fname: tcpdump file path
-        gce: dictionary of gce instance
-
-    Returns:
-       pcap file from gce server
-    """
-    ad.log.info("Stop and pull pcap file from gce server")
-
-    # stop tcpdump
-    tcpdump_cmd = "sudo kill %s" % tcpdump_pid
-    gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % \
-        (GCE_SSH, gce["project"], gce["zone"], gce["username"], gce["hostname"])
-    gce_ssh_cmd = '%s "%s"' % (gcloud_ssh_cmd, tcpdump_cmd)
-    utils.exe_cmd(gce_ssh_cmd)
-
-    # verify tcpdump is stopped
-    ps_cmd = '%s "ps aux | grep tcpdump"' % gcloud_ssh_cmd
-    res = utils.exe_cmd(ps_cmd).decode("utf-8", "ignore")
-    if tcpdump_pid in res.split():
-        raise signals.TestFailure("Failed to stop tcpdump on gce server")
-    if not fname:
-        return None
-
-    # pull pcap file
-    gcloud_scp_cmd = "%s --project=%s --zone=%s %s@%s:" % \
-        (GCE_SCP, gce["project"], gce["zone"], gce["username"], gce["hostname"])
-    pull_file = '%s%s.pcap %s/' % (gcloud_scp_cmd, fname, ad.device_log_path)
-    utils.exe_cmd(pull_file)
-    if not os.path.exists(
-        "%s/%s.pcap" % (ad.device_log_path, fname.split('/')[-1])):
-        raise signals.TestFailure("Failed to pull tcpdump from gce server")
-
-    # delete pcaps
-    utils.exe_cmd('%s "sudo rm %s.*"' % (gcloud_ssh_cmd, fname))
-
-    # return pcap file
-    pcap_file = "%s/%s.pcap" % (ad.device_log_path, fname.split('/')[-1])
-    return pcap_file
-
-
-def is_ipaddress_ipv6(ip_address):
-    """Verify if the given string is a valid IPv6 address.
-
-    Args:
-        ip_address: string containing the IP address
-
-    Returns:
-        True: if valid ipv6 address
-        False: if not
-    """
-    try:
-        socket.inet_pton(socket.AF_INET6, ip_address)
-        return True
-    except socket.error:
-        return False
-
-
-def set_cap_net_raw_capability():
-    """Set the CAP_NET_RAW capability
-
-    To send the Scapy packets, we need to get the CAP_NET_RAW capability first.
-    """
-    cap_net_raw = "sudo setcap cap_net_raw=eip $(readlink -f $(which act.py))"
-    utils.exe_cmd(cap_net_raw)
-    cap_python = "sudo setcap cap_net_raw=eip $(readlink -f $(which python))"
-    utils.exe_cmd(cap_python)
-
-
-def stop_usb_tethering(ad):
-    """Stop USB tethering.
-
-    Args:
-        ad: android device object
-    """
-    ad.log.info("Stopping USB Tethering")
-    ad.stop_services()
-    ad.adb.shell(USB_CHARGE_MODE)
-    ad.adb.wait_for_device()
-    ad.start_services()
-
-
-def wait_for_new_iface(old_ifaces):
-    """Wait for the new interface to come up.
-
-    Args:
-        old_ifaces: list of old interfaces
-    """
-    old_set = set(old_ifaces)
-    # Try 10 times to find a new interface with a 1s sleep every time
-    # (equivalent to a 9s timeout)
-    for _ in range(0, 10):
-        new_ifaces = set(get_if_list()) - old_set
-        asserts.assert_true(len(new_ifaces) < 2,
-                            "Too many new interfaces after turning on "
-                            "tethering")
-        if len(new_ifaces) == 1:
-            # enable the new iface before return
-            new_iface = new_ifaces.pop()
-            enable_iface(new_iface)
-            return new_iface
-        time.sleep(1)
-    asserts.fail("Timeout waiting for tethering interface on host")
-
-
-def get_if_list():
-    """Returns a list containing all network interfaces.
-
-    The newest version of Scapy.get_if_list() returns the cached interfaces,
-    which might be out-dated, and unable to perceive the interface changes.
-    Use this method when need to monitoring the network interfaces changes.
-    Reference: https://github.com/secdev/scapy/pull/2707
-
-    Returns:
-        A list of the latest network interfaces. For example:
-        ['cvd-ebr', ..., 'eno1', 'enx4afa19a8dde1', 'lo', 'wlxd03745d68d88']
-    """
-    from scapy.config import conf
-    from scapy.compat import plain_str
-
-    # Get ifconfig output
-    result = job.run([conf.prog.ifconfig])
-    if result.exit_status:
-        raise asserts.fail(
-            "Failed to execute ifconfig: {}".format(plain_str(result.stderr)))
-
-    interfaces = [
-        line[:line.find(':')] for line in plain_str(result.stdout).splitlines()
-        if ": flags" in line.lower()
-    ]
-    return interfaces
-
-
-def enable_hardware_offload(ad):
-    """Enable hardware offload using adb shell command.
-
-    Args:
-        ad: Android device object
-    """
-    ad.log.info("Enabling hardware offload.")
-    ad.adb.shell(ENABLE_HARDWARE_OFFLOAD, ignore_status=True)
-    ad.reboot()
-    time.sleep(WAIT_TIME_AFTER_REBOOT)
-
-
-def disable_hardware_offload(ad):
-    """Disable hardware offload using adb shell command.
-
-    Args:
-        ad: Android device object
-    """
-    ad.log.info("Disabling hardware offload.")
-    ad.adb.shell(DISABLE_HARDWARE_OFFLOAD, ignore_status=True)
-    ad.reboot()
-    time.sleep(WAIT_TIME_AFTER_REBOOT)
-
-
-def enable_iface(iface):
-    """Enable network interfaces.
-
-    Some network interface might disabled as default, need to enable before
-    using it.
-
-    Args:
-        iface: network interface that need to enable
-    """
-    from scapy.compat import plain_str
-
-    result = job.run("sudo ifconfig %s up" % (iface), ignore_status=True)
-    if result.exit_status:
-        raise asserts.fail(
-            "Failed to execute ifconfig: {}".format(plain_str(result.stderr)))
diff --git a/src/antlion/test_utils/net/nsd_const.py b/src/antlion/test_utils/net/nsd_const.py
deleted file mode 100644
index 6b53f23..0000000
--- a/src/antlion/test_utils/net/nsd_const.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-######################################################
-# NsdManager.RegistrationListener events
-######################################################
-REG_LISTENER_EVENT = "NsdRegistrationListener"
-
-# event type - using REG_LISTENER_CALLBACK
-REG_LISTENER_EVENT_ON_REG_FAILED = "OnRegistrationFailed"
-REG_LISTENER_EVENT_ON_SERVICE_REGISTERED = "OnServiceRegistered"
-REG_LISTENER_EVENT_ON_SERVICE_UNREG = "OnServiceUnregistered"
-REG_LISTENER_EVENT_ON_UNREG_FAILED = "OnUnregistrationFailed"
-
-# event data keys
-REG_LISTENER_DATA_ID = "id"
-REG_LISTENER_CALLBACK = "callback"
-REG_LISTENER_ERROR_CODE = "error_code"
-
-######################################################
-# NsdManager.DiscoveryListener events
-######################################################
-DISCOVERY_LISTENER_EVENT = "NsdDiscoveryListener"
-
-# event type - using DISCOVERY_LISTENER_DATA_CALLBACK
-DISCOVERY_LISTENER_EVENT_ON_DISCOVERY_STARTED = "OnDiscoveryStarted"
-DISCOVERY_LISTENER_EVENT_ON_DISCOVERY_STOPPED = "OnDiscoveryStopped"
-DISCOVERY_LISTENER_EVENT_ON_SERVICE_FOUND = "OnServiceFound"
-DISCOVERY_LISTENER_EVENT_ON_SERVICE_LOST = "OnServiceLost"
-DISCOVERY_LISTENER_EVENT_ON_START_DISCOVERY_FAILED = "OnStartDiscoveryFailed"
-DISCOVERY_LISTENER_EVENT_ON_STOP_DISCOVERY_FAILED = "OnStopDiscoveryFailed"
-
-# event data keys
-DISCOVERY_LISTENER_DATA_ID = "id"
-DISCOVERY_LISTENER_DATA_CALLBACK = "callback"
-DISCOVERY_LISTENER_DATA_SERVICE_TYPE = "service_type"
-DISCOVERY_LISTENER_DATA_ERROR_CODE = "error_code"
-
-######################################################
-# NsdManager.ResolveListener events
-######################################################
-RESOLVE_LISTENER_EVENT = "NsdResolveListener"
-
-# event type using RESOLVE_LISTENER_DATA_CALLBACK
-RESOLVE_LISTENER_EVENT_ON_RESOLVE_FAIL = "OnResolveFail"
-RESOLVE_LISTENER_EVENT_ON_SERVICE_RESOLVED = "OnServiceResolved"
-
-# event data keys
-RESOLVE_LISTENER_DATA_ID = "id"
-RESOLVE_LISTENER_DATA_CALLBACK = "callback"
-RESOLVE_LISTENER_DATA_ERROR_CODE = "error_code"
-
-######################################################
-# NsdServiceInfo elements
-######################################################
-NSD_SERVICE_INFO_HOST = "serviceInfoHost"
-NSD_SERVICE_INFO_PORT = "serviceInfoPort"
-NSD_SERVICE_INFO_SERVICE_NAME = "serviceInfoServiceName"
-NSD_SERVICE_INFO_SERVICE_TYPE = "serviceInfoServiceType"
diff --git a/src/antlion/test_utils/net/socket_test_utils.py b/src/antlion/test_utils/net/socket_test_utils.py
deleted file mode 100644
index a2d695c..0000000
--- a/src/antlion/test_utils/net/socket_test_utils.py
+++ /dev/null
@@ -1,293 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import queue
-import threading
-import time
-
-from antlion.test_utils.net import connectivity_const as cconst
-from antlion import asserts
-
-MSG = "Test message "
-PKTS = 5
-""" Methods for android.system.Os based sockets """
-
-
-def open_android_socket(ad, domain, sock_type, ip, port):
-    """ Open TCP or UDP using android.system.Os class
-
-    Args:
-      1. ad - android device object
-      2. domain - IPv4 or IPv6 type
-      3. sock_type - UDP or TCP socket
-      4. ip - IP addr on the device
-      5. port - open socket on port
-
-    Returns:
-      File descriptor key
-    """
-    fd_key = ad.droid.openSocket(domain, sock_type, ip, port)
-    ad.log.info("File descriptor: %s" % fd_key)
-    asserts.assert_true(fd_key, "Failed to open socket")
-    return fd_key
-
-
-def close_android_socket(ad, fd_key):
-    """ Close socket
-
-    Args:
-      1. ad - android device object
-      2. fd_key - file descriptor key
-    """
-    status = ad.droid.closeSocket(fd_key)
-    asserts.assert_true(status, "Failed to close socket")
-
-
-def listen_accept_android_socket(client, server, client_fd, server_fd,
-                                 server_ip, server_port):
-    """ Listen, accept TCP sockets
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_fd : client's socket handle
-      4. server_fd : server's socket handle
-      5. server_ip : send data to this IP
-      6. server_port : send data to this port
-    """
-    server.droid.listenSocket(server_fd)
-    client.droid.connectSocket(client_fd, server_ip, server_port)
-    sock = server.droid.acceptSocket(server_fd)
-    asserts.assert_true(sock, "Failed to accept socket")
-    return sock
-
-
-def send_recv_data_android_sockets(client, server, client_fd, server_fd,
-                                   server_ip, server_port):
-    """ Send TCP or UDP data over android os sockets from client to server.
-        Verify that server received the data.
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_fd : client's socket handle
-      4. server_fd : server's socket handle
-      5. server_ip : send data to this IP
-      6. server_port : send data to this port
-    """
-    send_list = []
-    recv_list = []
-
-    for _ in range(1, PKTS + 1):
-        msg = MSG + " %s" % _
-        send_list.append(msg)
-        client.log.info("Sending message: %s" % msg)
-        client.droid.sendDataOverSocket(server_ip, server_port, msg, client_fd)
-        recv_msg = server.droid.recvDataOverSocket(server_fd)
-        server.log.info("Received message: %s" % recv_msg)
-        recv_list.append(recv_msg)
-
-    recv_list = [x.rstrip('\x00') if x else x for x in recv_list]
-    asserts.assert_true(send_list and recv_list and send_list == recv_list,
-                        "Send and recv information is incorrect")
-
-
-""" Methods for java.net.DatagramSocket based sockets """
-
-
-def open_datagram_socket(ad, ip, port):
-    """ Open datagram socket
-
-    Args:
-      1. ad : android device object
-      2. ip : IP addr on the device
-      3. port : socket port
-
-    Returns:
-      Hash key of the datagram socket
-    """
-    socket_key = ad.droid.openDatagramSocket(ip, port)
-    ad.log.info("Datagram socket: %s" % socket_key)
-    asserts.assert_true(socket_key, "Failed to open datagram socket")
-    return socket_key
-
-
-def close_datagram_socket(ad, socket_key):
-    """ Close datagram socket
-
-    Args:
-      1. socket_key : hash key of datagram socket
-    """
-    status = ad.droid.closeDatagramSocket(socket_key)
-    asserts.assert_true(status, "Failed to close datagram socket")
-
-
-def send_recv_data_datagram_sockets(client, server, client_sock, server_sock,
-                                    server_ip, server_port):
-    """ Send data over datagram socket from dut_a to dut_b.
-        Verify that dut_b received the data.
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_sock : client's socket handle
-      4. server_sock : server's socket handle
-      5. server_ip : send data to this IP
-      6. server_port : send data to this port
-    """
-    send_list = []
-    recv_list = []
-
-    for _ in range(1, PKTS + 1):
-        msg = MSG + " %s" % _
-        send_list.append(msg)
-        client.log.info("Sending message: %s" % msg)
-        client.droid.sendDataOverDatagramSocket(client_sock, msg, server_ip,
-                                                server_port)
-        recv_msg = server.droid.recvDataOverDatagramSocket(server_sock)
-        server.log.info("Received message: %s" % recv_msg)
-        recv_list.append(recv_msg)
-
-    recv_list = [x.rstrip('\x00') if x else x for x in recv_list]
-    asserts.assert_true(send_list and recv_list and send_list == recv_list,
-                        "Send and recv information is incorrect")
-
-
-""" Utils methods for java.net.Socket based sockets """
-
-
-def _accept_socket(server, server_ip, server_port, server_sock, q):
-    sock = server.droid.acceptTcpSocket(server_sock)
-    server.log.info("Server socket: %s" % sock)
-    q.put(sock)
-
-
-def _client_socket(client, server_ip, server_port, client_ip, client_port, q):
-    time.sleep(0.5)
-    sock = client.droid.openTcpSocket(server_ip, server_port, client_ip,
-                                      client_port)
-    client.log.info("Client socket: %s" % sock)
-    q.put(sock)
-
-
-def open_connect_socket(client, server, client_ip, server_ip, client_port,
-                        server_port, server_sock):
-    """ Open tcp socket and connect to server
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_ip : client's socket handle
-      4. server_ip : send data to this IP
-      5. client_port : port on client socket
-      6. server_port : port on server socket
-      7. server_sock : server socket
-
-    Returns:
-      client and server socket from successful connect
-    """
-    sq = queue.Queue()
-    cq = queue.Queue()
-    s = threading.Thread(target=_accept_socket,
-                         args=(server, server_ip, server_port, server_sock,
-                               sq))
-    c = threading.Thread(target=_client_socket,
-                         args=(client, server_ip, server_port, client_ip,
-                               client_port, cq))
-    s.start()
-    c.start()
-    c.join()
-    s.join()
-
-    client_sock = cq.get()
-    server_sock = sq.get()
-    asserts.assert_true(client_sock and server_sock, "Failed to open sockets")
-
-    return client_sock, server_sock
-
-
-def open_server_socket(server, server_ip, server_port):
-    """ Open tcp server socket
-
-    Args:
-      1. server : ad object for server device
-      2. server_ip : send data to this IP
-      3. server_port : send data to this port
-    """
-    sock = server.droid.openTcpServerSocket(server_ip, server_port)
-    server.log.info("Server Socket: %s" % sock)
-    asserts.assert_true(sock, "Failed to open server socket")
-    return sock
-
-
-def close_socket(ad, socket):
-    """ Close socket
-
-    Args:
-      1. ad - android device object
-      2. socket - socket key
-    """
-    status = ad.droid.closeTcpSocket(socket)
-    asserts.assert_true(status, "Failed to socket")
-
-
-def close_server_socket(ad, socket):
-    """ Close server socket
-
-    Args:
-      1. ad - android device object
-      2. socket - server socket key
-    """
-    status = ad.droid.closeTcpServerSocket(socket)
-    asserts.assert_true(status, "Failed to socket")
-
-
-def shutdown_socket(ad, socket):
-    """ Shutdown socket
-
-    Args:
-      1. ad - androidandroid device object
-      2. socket - socket key
-    """
-    fd = ad.droid.getFileDescriptorOfSocket(socket)
-    asserts.assert_true(fd, "Failed to get FileDescriptor key")
-    status = ad.droid.shutdownFileDescriptor(fd)
-    asserts.assert_true(status, "Failed to shutdown socket")
-
-
-def send_recv_data_sockets(client, server, client_sock, server_sock):
-    """ Send data over TCP socket from client to server.
-        Verify that server received the data
-
-    Args:
-      1. client : ad object for client device
-      2. server : ad object for server device
-      3. client_sock : client's socket handle
-      4. server_sock : server's socket handle
-    """
-    send_list = []
-    recv_list = []
-
-    for _ in range(1, PKTS + 1):
-        msg = MSG + " %s" % _
-        send_list.append(msg)
-        client.log.info("Sending message: %s" % msg)
-        client.droid.sendDataOverTcpSocket(client_sock, msg)
-        recv_msg = server.droid.recvDataOverTcpSocket(server_sock)
-        server.log.info("Received message: %s" % recv_msg)
-        recv_list.append(recv_msg)
-
-    recv_list = [x.rstrip('\x00') if x else x for x in recv_list]
-    asserts.assert_true(send_list and recv_list and send_list == recv_list,
-                        "Send and recv information is incorrect")
diff --git a/src/antlion/test_utils/net/ui_utils.py b/src/antlion/test_utils/net/ui_utils.py
deleted file mode 100644
index 03aa6c5..0000000
--- a/src/antlion/test_utils/net/ui_utils.py
+++ /dev/null
@@ -1,277 +0,0 @@
-"""Utils for adb-based UI operations."""
-
-import collections
-import logging
-import os
-import re
-import time
-
-from xml.dom import minidom
-from antlion.controllers.android_lib.errors import AndroidDeviceError
-
-
-class Point(collections.namedtuple('Point', ['x', 'y'])):
-
-  def __repr__(self):
-    return '{x},{y}'.format(x=self.x, y=self.y)
-
-
-class Bounds(collections.namedtuple('Bounds', ['start', 'end'])):
-
-  def __repr__(self):
-    return '[{start}][{end}]'.format(start=str(self.start), end=str(self.end))
-
-  def calculate_middle_point(self):
-    return Point((self.start.x + self.end.x) // 2,
-                 (self.start.y + self.end.y) // 2)
-
-
-def get_key_value_pair_strings(kv_pairs):
-  return ' '.join(['%s="%s"' % (k, v) for k, v in kv_pairs.items()])
-
-
-def parse_bound(bounds_string):
-  """Parse UI bound string.
-
-  Args:
-    bounds_string: string, In the format of the UI element bound.
-                   e.g '[0,0][1080,2160]'
-
-  Returns:
-    Bounds, The bound of UI element.
-  """
-  bounds_pattern = re.compile(r'\[(\d+),(\d+)\]\[(\d+),(\d+)\]')
-  points = bounds_pattern.match(bounds_string).groups()
-  points = list(map(int, points))
-  return Bounds(Point(*points[:2]), Point(*points[-2:]))
-
-
-def _find_point_in_bounds(bounds_string):
-  """Finds a point that resides within the given bounds.
-
-  Args:
-    bounds_string: string, In the format of the UI element bound.
-
-  Returns:
-    A tuple of integers, representing X and Y coordinates of a point within
-    the given boundary.
-  """
-  return parse_bound(bounds_string).calculate_middle_point()
-
-
-def get_screen_dump_xml(device):
-  """Gets an XML dump of the current device screen.
-
-  This only works when there is no instrumentation process running. A running
-  instrumentation process will disrupt calls for `adb shell uiautomator dump`.
-
-  Args:
-    device: AndroidDevice object.
-
-  Returns:
-    XML Document of the screen dump.
-  """
-  os.makedirs(device.log_path, exist_ok=True)
-  device.adb.shell('uiautomator dump')
-  device.adb.pull('/sdcard/window_dump.xml %s' % device.log_path)
-  return minidom.parse('%s/window_dump.xml' % device.log_path)
-
-
-def match_node(node, **matcher):
-  """Determine if a mode matches with the given matcher.
-
-  Args:
-    node: Is a XML node to be checked against matcher.
-    **matcher: Is a dict representing mobly AdbUiDevice matchers.
-
-  Returns:
-    True if all matchers match the given node.
-  """
-  match_list = []
-  for k, v in matcher.items():
-    if k == 'class_name':
-      key = k.replace('class_name', 'class')
-    elif k == 'text_contains':
-      key = k.replace('text_contains', 'text')
-    else:
-      key = k.replace('_', '-')
-    try:
-      if k == 'text_contains':
-        match_list.append(v in node.attributes[key].value)
-      else:
-        match_list.append(node.attributes[key].value == v)
-    except KeyError:
-      match_list.append(False)
-  return all(match_list)
-
-
-def _find_node(screen_dump_xml, **kwargs):
-  """Finds an XML node from an XML DOM.
-
-  Args:
-    screen_dump_xml: XML doc, parsed from adb ui automator dump.
-    **kwargs: key/value pairs to match in an XML node's attributes. Value of
-      each key has to be string type. Below lists keys which can be used:
-        index
-        text
-        text_contains (matching a part of text attribute)
-        resource_id
-        class_name (representing "class" attribute)
-        package
-        content_desc
-        checkable
-        checked
-        clickable
-        enabled
-        focusable
-        focused
-        scrollable
-        long_clickable
-        password
-        selected
-        A special key/value: matching_node key is used to identify If more than one nodes have the same key/value,
-            the matching_node stands for which matching node should be fetched.
-
-  Returns:
-    XML node of the UI element or None if not found.
-  """
-  nodes = screen_dump_xml.getElementsByTagName('node')
-  matching_node = kwargs.pop('matching_node', 1)
-  count = 1
-  for node in nodes:
-    if match_node(node, **kwargs):
-      if count == matching_node:
-        logging.debug('Found a node matching conditions: %s',
-                      get_key_value_pair_strings(kwargs))
-        return node
-      count += 1
-  return None
-
-
-def wait_and_get_xml_node(device, timeout, child=None, sibling=None, **kwargs):
-  """Waits for a node to appear and return it.
-
-  Args:
-    device: AndroidDevice object.
-    timeout: float, The number of seconds to wait for before giving up.
-    child: dict, a dict contains child XML node's attributes. It is extra set of
-      conditions to match an XML node that is under the XML node which is found
-      by **kwargs.
-    sibling: dict, a dict contains sibling XML node's attributes. It is extra
-      set of conditions to match an XML node that is under parent of the XML
-      node which is found by **kwargs.
-    **kwargs: Key/value pairs to match in an XML node's attributes.
-
-  Returns:
-    The XML node of the UI element.
-
-  Raises:
-    AndroidDeviceError: if the UI element does not appear on screen within
-    timeout or extra sets of conditions of child and sibling are used in a call.
-  """
-  if child and sibling:
-    raise AndroidDeviceError(
-        device, 'Only use one extra set of conditions: child or sibling.')
-  start_time = time.time()
-  threshold = start_time + timeout
-  while time.time() < threshold:
-    time.sleep(1)
-    screen_dump_xml = get_screen_dump_xml(device)
-    node = _find_node(screen_dump_xml, **kwargs)
-    if node and child:
-      node = _find_node(node, **child)
-    if node and sibling:
-      node = _find_node(node.parentNode, **sibling)
-    if node:
-      return node
-  msg = ('Timed out after %ds waiting for UI node matching conditions: %s.'
-         % (timeout, get_key_value_pair_strings(kwargs)))
-  if child:
-    msg = ('%s extra conditions: %s'
-           % (msg, get_key_value_pair_strings(child)))
-  if sibling:
-    msg = ('%s extra conditions: %s'
-           % (msg, get_key_value_pair_strings(sibling)))
-  raise AndroidDeviceError(device, msg)
-
-
-def has_element(device, **kwargs):
-  """Checks a UI element whether appears or not in the current screen.
-
-  Args:
-    device: AndroidDevice object.
-    **kwargs: Key/value pairs to match in an XML node's attributes.
-
-  Returns:
-    True if the UI element appears in the current screen else False.
-  """
-  timeout_sec = kwargs.pop('timeout', 30)
-  try:
-    wait_and_get_xml_node(device, timeout_sec, **kwargs)
-    return True
-  except AndroidDeviceError:
-    return False
-
-
-def get_element_attributes(device, **kwargs):
-  """Gets a UI element's all attributes.
-
-  Args:
-    device: AndroidDevice object.
-    **kwargs: Key/value pairs to match in an XML node's attributes.
-
-  Returns:
-    XML Node Attributes.
-  """
-  timeout_sec = kwargs.pop('timeout', 30)
-  node = wait_and_get_xml_node(device, timeout_sec, **kwargs)
-  return node.attributes
-
-
-def wait_and_click(device, duration_ms=None, **kwargs):
-  """Wait for a UI element to appear and click on it.
-
-  This function locates a UI element on the screen by matching attributes of
-  nodes in XML DOM, calculates a point's coordinates within the boundary of the
-  element, and clicks on the point marked by the coordinates.
-
-  Args:
-    device: AndroidDevice object.
-    duration_ms: int, The number of milliseconds to long-click.
-    **kwargs: A set of `key=value` parameters that identifies a UI element.
-  """
-  timeout_sec = kwargs.pop('timeout', 30)
-  button_node = wait_and_get_xml_node(device, timeout_sec, **kwargs)
-  x, y = _find_point_in_bounds(button_node.attributes['bounds'].value)
-  args = []
-  if duration_ms is None:
-    args = 'input tap %s %s' % (str(x), str(y))
-  else:
-    # Long click.
-    args = 'input swipe %s %s %s %s %s' % \
-        (str(x), str(y), str(x), str(y), str(duration_ms))
-  device.adb.shell(args)
-
-def wait_and_input_text(device, input_text, duration_ms=None, **kwargs):
-  """Wait for a UI element text field that can accept text entry.
-
-  This function located a UI element using wait_and_click. Once the element is
-  clicked, the text is input into the text field.
-
-  Args:
-    device: AndroidDevice, Mobly's Android controller object.
-    input_text: Text string to be entered in to the text field.
-    duration_ms: duration in milliseconds.
-    **kwargs: A set of `key=value` parameters that identifies a UI element.
-  """
-  wait_and_click(device, duration_ms, **kwargs)
-  # Replace special characters.
-  # The command "input text <string>" requires special treatment for
-  # characters ' ' and '&'.  They need to be escaped. for example:
-  #    "hello world!!&" needs to transform to "hello\ world!!\&"
-  special_chars = ' &'
-  for c in special_chars:
-    input_text = input_text.replace(c, '\\%s' % c)
-  input_text = "'" + input_text + "'"
-  args = 'input text %s' % input_text
-  device.adb.shell(args)
diff --git a/src/antlion/test_utils/users/__init__.py b/src/antlion/test_utils/users/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/users/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/users/users.py b/src/antlion/test_utils/users/users.py
deleted file mode 100644
index 07df00d..0000000
--- a/src/antlion/test_utils/users/users.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-#
-# Defines utilities that can be used to create android user account
-
-import re
-import time
-import logging as log
-
-
-
-def get_all_users(android_device):
-    all_users = {}
-    out = android_device.adb.shell("pm list users")
-
-    for user in re.findall("UserInfo{(.*\d*\w):", out):
-        all = user.split(":")
-        all_users[all[1]] = all_users.get(all[1], all[0])
-    return all_users
-
-
-def create_new_user(android_device, user_name):
-    out = android_device.adb.shell("pm create-user {}".format(user_name))
-    return re.search("Success(.* (.*\d))", out).group(2)
-
-
-def switch_user(android_device, user_id):
-    prev_user = get_current_user(android_device)
-    android_device.adb.shell("am switch-user {}".format(user_id))
-    if not _wait_for_user_to_take_place(android_device, prev_user):
-        log.error("Failed to successfully switch user {}".format(user_id))
-        return False
-    return True
-
-
-def remove_user(android_device, user_id):
-    return "Success" in android_device.adb.shell("pm remove-user {}".format(user_id))
-
-
-def get_current_user(android_device):
-    out = android_device.adb.shell("dumpsys activity")
-    result = re.search("mCurrentUserId:(\d+)", out)
-    return result.group(1)
-
-
-def _wait_for_user_to_take_place(android_device, user_id, timeout=10):
-    start_time = time.time()
-    while (start_time + timeout) > time.time():
-        time.sleep(1)
-        if user_id != get_current_user(android_device):
-            return True
-    return False
diff --git a/src/antlion/test_utils/wifi/RttPostFlightTest.py b/src/antlion/test_utils/wifi/RttPostFlightTest.py
deleted file mode 100644
index 16d85e2..0000000
--- a/src/antlion/test_utils/wifi/RttPostFlightTest.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-import antlion.test_utils.wifi.rpm_controller_utils as rutils
-import antlion.test_utils.wifi.wifi_test_utils as wutils
-from antlion import asserts
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-SSID = "DO_NOT_CONNECT"
-TIMEOUT = 60
-WAIT_TIME = 10
-
-class RttPostFlightTest(WifiBaseTest):
-    """Turns off 802.11mc AP after RTT tests."""
-
-    def setup_class(self):
-        super().setup_class()
-        self.dut = self.android_devices[0]
-        required_params = ["rpm_ip", "rpm_port"]
-        self.unpack_userparams(req_param_names=required_params)
-        self.rpm_telnet = rutils.create_telnet_session(self.rpm_ip)
-
-    ### Tests ###
-
-    def test_turn_off_80211mc_ap(self):
-        self.rpm_telnet.turn_off(self.rpm_port)
-        curr_time = time.time()
-        while time.time() < curr_time + TIMEOUT:
-            time.sleep(WAIT_TIME)
-            if not wutils.start_wifi_connection_scan_and_check_for_network(
-                self.dut, SSID):
-                return True
-        self.log.error("Failed to turn off AP")
-        return False
diff --git a/src/antlion/test_utils/wifi/RttPreFlightTest.py b/src/antlion/test_utils/wifi/RttPreFlightTest.py
deleted file mode 100644
index 0ab6d99..0000000
--- a/src/antlion/test_utils/wifi/RttPreFlightTest.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-import antlion.test_utils.wifi.rpm_controller_utils as rutils
-import antlion.test_utils.wifi.wifi_test_utils as wutils
-from antlion import asserts
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-SSID = "DO_NOT_CONNECT"
-TIMEOUT = 60
-WAIT_TIME = 10
-
-class RttPreFlightTest(WifiBaseTest):
-    """Turns on/off 802.11mc AP before and after RTT tests."""
-
-    def setup_class(self):
-        super().setup_class()
-        self.dut = self.android_devices[0]
-        required_params = ["rpm_ip", "rpm_port"]
-        self.unpack_userparams(req_param_names=required_params)
-        self.rpm_telnet = rutils.create_telnet_session(self.rpm_ip)
-
-    ### Tests ###
-
-    def test_turn_on_80211mc_ap(self):
-        self.rpm_telnet.turn_on(self.rpm_port)
-        curr_time = time.time()
-        while time.time() < curr_time + TIMEOUT:
-            time.sleep(WAIT_TIME)
-            if wutils.start_wifi_connection_scan_and_check_for_network(
-                self.dut, SSID):
-                return True
-        self.log.error("Failed to turn on AP")
-        return False
diff --git a/src/antlion/test_utils/wifi/WifiBaseTest.py b/src/antlion/test_utils/wifi/WifiBaseTest.py
deleted file mode 100644
index ac770f9..0000000
--- a/src/antlion/test_utils/wifi/WifiBaseTest.py
+++ /dev/null
@@ -1,994 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-    Base Class for Defining Common WiFi Test Functionality
-"""
-
-import copy
-import os
-import time
-
-from antlion import asserts
-from antlion import context
-from antlion import signals
-from antlion import utils
-from antlion.base_test import BaseTestClass
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_bss_settings
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.keys import Config
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-
-from mobly.base_test import STAGE_NAME_TEARDOWN_CLASS
-
-WifiEnums = wutils.WifiEnums
-AP_1 = 0
-AP_2 = 1
-MAX_AP_COUNT = 2
-
-
-class WifiBaseTest(BaseTestClass):
-
-    def __init__(self, configs):
-        super().__init__(configs)
-        self.enable_packet_log = False
-        self.packet_log_2g = hostapd_constants.AP_DEFAULT_CHANNEL_2G
-        self.packet_log_5g = hostapd_constants.AP_DEFAULT_CHANNEL_5G
-        self.tcpdump_proc = []
-        self.packet_log_pid = {}
-
-    def setup_class(self):
-        if hasattr(self, 'attenuators') and self.attenuators:
-            for attenuator in self.attenuators:
-                attenuator.set_atten(0)
-        opt_param = ["pixel_models", "cnss_diag_file", "country_code_file"]
-        self.unpack_userparams(opt_param_names=opt_param)
-        if hasattr(self, "cnss_diag_file"):
-            if isinstance(self.cnss_diag_file, list):
-                self.cnss_diag_file = self.cnss_diag_file[0]
-            if not os.path.isfile(self.cnss_diag_file):
-                self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value],
-                    self.cnss_diag_file)
-        if self.enable_packet_log and hasattr(self, "packet_capture"):
-            self.packet_logger = self.packet_capture[0]
-            self.packet_logger.configure_monitor_mode("2G", self.packet_log_2g)
-            self.packet_logger.configure_monitor_mode("5G", self.packet_log_5g)
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                wutils.wifi_test_device_init(ad)
-                if hasattr(self, "country_code_file"):
-                    if isinstance(self.country_code_file, list):
-                        self.country_code_file = self.country_code_file[0]
-                    if not os.path.isfile(self.country_code_file):
-                        self.country_code_file = os.path.join(
-                            self.user_params[Config.key_config_path.value],
-                            self.country_code_file)
-                    self.country_code = utils.load_config(
-                        self.country_code_file)["country"]
-                else:
-                    self.country_code = WifiEnums.CountryCode.US
-                wutils.set_wifi_country_code(ad, self.country_code)
-
-    def setup_test(self):
-        if (hasattr(self, "android_devices")
-                and hasattr(self, "cnss_diag_file")
-                and hasattr(self, "pixel_models")):
-            wutils.start_cnss_diags(self.android_devices, self.cnss_diag_file,
-                                    self.pixel_models)
-        self.tcpdump_proc = []
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                proc = nutils.start_tcpdump(ad, self.test_name)
-                self.tcpdump_proc.append((ad, proc))
-        if hasattr(self, "packet_logger"):
-            self.packet_log_pid = wutils.start_pcap(self.packet_logger, 'dual',
-                                                    self.test_name)
-
-    def teardown_test(self):
-        if (hasattr(self, "android_devices")
-                and hasattr(self, "cnss_diag_file")
-                and hasattr(self, "pixel_models")):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-            for proc in self.tcpdump_proc:
-                nutils.stop_tcpdump(proc[0],
-                                    proc[1],
-                                    self.test_name,
-                                    pull_dump=False)
-            self.tcpdump_proc = []
-        if hasattr(self, "packet_logger") and self.packet_log_pid:
-            wutils.stop_pcap(self.packet_logger,
-                             self.packet_log_pid,
-                             test_status=True)
-            self.packet_log_pid = {}
-
-    def teardown_class(self):
-        begin_time = utils.get_current_epoch_time()
-        super().teardown_class()
-        for device in getattr(self, "fuchsia_devices", []):
-            device.take_bug_report(STAGE_NAME_TEARDOWN_CLASS, begin_time)
-
-    def on_fail(self, test_name, begin_time):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.take_bug_report(test_name, begin_time)
-                ad.cat_adb_log(test_name, begin_time)
-                wutils.get_ssrdumps(ad)
-            if (hasattr(self, "cnss_diag_file")
-                    and hasattr(self, "pixel_models")):
-                wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-                for ad in self.android_devices:
-                    wutils.get_cnss_diag_log(ad)
-            for proc in self.tcpdump_proc:
-                nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
-            self.tcpdump_proc = []
-        if hasattr(self, "packet_logger") and self.packet_log_pid:
-            wutils.stop_pcap(self.packet_logger,
-                             self.packet_log_pid,
-                             test_status=False)
-            self.packet_log_pid = {}
-
-        # Gets a wlan_device log and calls the generic device fail on DUT.
-        for device in getattr(self, "fuchsia_devices", []):
-            self.on_device_fail(device, test_name, begin_time)
-
-    def on_device_fail(self, device, test_name, begin_time):
-        """Gets a generic device DUT bug report.
-
-        This method takes a bug report if the device has the
-        'take_bug_report_on_fail' config value, and if the flag is true. This
-        method also power cycles if 'hard_reboot_on_fail' is True.
-
-        Args:
-            device: Generic device to gather logs from.
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-        if (not hasattr(device, "take_bug_report_on_fail")
-                or device.take_bug_report_on_fail):
-            device.take_bug_report(test_name, begin_time)
-
-        if hasattr(device,
-                   "hard_reboot_on_fail") and device.hard_reboot_on_fail:
-            device.reboot(reboot_type='hard', testbed_pdus=self.pdu_devices)
-
-    def download_ap_logs(self):
-        """Downloads the DHCP and hostapad logs from the access_point.
-
-        Using the current TestClassContext and TestCaseContext this method pulls
-        the DHCP and hostapd logs and outputs them to the correct path.
-        """
-        current_path = context.get_current_context().get_full_output_path()
-
-        dhcp_log = self.access_point.get_dhcp_logs()
-        if dhcp_log:
-            dhcp_log_path = os.path.join(current_path, "dhcp_log.txt")
-            with open(dhcp_log_path, 'w') as f:
-                f.write(dhcp_log)
-
-        hostapd_logs = self.access_point.get_hostapd_logs()
-        for interface in hostapd_logs:
-            hostapd_log_path = os.path.join(current_path,
-                                            f'hostapd_log_{interface}.txt')
-            with open(hostapd_log_path, 'w') as f:
-                f.write(hostapd_logs[interface])
-
-        radvd_log = self.access_point.get_radvd_logs()
-        if radvd_log:
-            radvd_log_path = os.path.join(current_path, "radvd_log.txt")
-            with open(radvd_log_path, 'w') as f:
-                f.write(radvd_log)
-
-    def get_psk_network(
-            self,
-            mirror_ap,
-            reference_networks,
-            hidden=False,
-            same_ssid=False,
-            security_mode=hostapd_constants.WPA2_STRING,
-            ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-            passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-            passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G):
-        """Generates SSID and passphrase for a WPA2 network using random
-           generator.
-
-           Args:
-               mirror_ap: Boolean, determines if both APs use the same hostapd
-                          config or different configs.
-               reference_networks: List of PSK networks.
-               same_ssid: Boolean, determines if both bands on AP use the same
-                          SSID.
-               ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
-               ssid_length_5g: Int, number of characters to use for 5G SSID.
-               passphrase_length_2g: Int, length of password for 2G network.
-               passphrase_length_5g: Int, length of password for 5G network.
-
-           Returns: A dict of 2G and 5G network lists for hostapd configuration.
-
-        """
-        network_dict_2g = {}
-        network_dict_5g = {}
-        ref_5g_security = security_mode
-        ref_2g_security = security_mode
-
-        if same_ssid:
-            ref_2g_ssid = 'xg_%s' % utils.rand_ascii_str(ssid_length_2g)
-            ref_5g_ssid = ref_2g_ssid
-
-            ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
-            ref_5g_passphrase = ref_2g_passphrase
-
-        else:
-            ref_2g_ssid = '2g_%s' % utils.rand_ascii_str(ssid_length_2g)
-            ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
-
-            ref_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
-            ref_5g_passphrase = utils.rand_ascii_str(passphrase_length_5g)
-
-        network_dict_2g = {
-            "SSID": ref_2g_ssid,
-            "security": ref_2g_security,
-            "password": ref_2g_passphrase,
-            "hiddenSSID": hidden
-        }
-
-        network_dict_5g = {
-            "SSID": ref_5g_ssid,
-            "security": ref_5g_security,
-            "password": ref_5g_passphrase,
-            "hiddenSSID": hidden
-        }
-
-        ap = 0
-        for ap in range(MAX_AP_COUNT):
-            reference_networks.append({
-                "2g": copy.copy(network_dict_2g),
-                "5g": copy.copy(network_dict_5g)
-            })
-            if not mirror_ap:
-                break
-        return {"2g": network_dict_2g, "5g": network_dict_5g}
-
-    def get_open_network(self,
-                         mirror_ap,
-                         open_network,
-                         hidden=False,
-                         same_ssid=False,
-                         ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-                         ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-                         security_mode='none'):
-        """Generates SSIDs for a open network using a random generator.
-
-        Args:
-            mirror_ap: Boolean, determines if both APs use the same hostapd
-                       config or different configs.
-            open_network: List of open networks.
-            same_ssid: Boolean, determines if both bands on AP use the same
-                       SSID.
-            ssid_length_2g: Int, number of characters to use for 2G SSID.
-            ssid_length_5g: Int, number of characters to use for 5G SSID.
-            security_mode: 'none' for open and 'OWE' for WPA3 OWE.
-
-        Returns: A dict of 2G and 5G network lists for hostapd configuration.
-
-        """
-        network_dict_2g = {}
-        network_dict_5g = {}
-
-        if same_ssid:
-            open_2g_ssid = 'xg_%s' % utils.rand_ascii_str(ssid_length_2g)
-            open_5g_ssid = open_2g_ssid
-
-        else:
-            open_2g_ssid = '2g_%s' % utils.rand_ascii_str(ssid_length_2g)
-            open_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
-
-        network_dict_2g = {
-            "SSID": open_2g_ssid,
-            "security": security_mode,
-            "hiddenSSID": hidden
-        }
-
-        network_dict_5g = {
-            "SSID": open_5g_ssid,
-            "security": security_mode,
-            "hiddenSSID": hidden
-        }
-
-        ap = 0
-        for ap in range(MAX_AP_COUNT):
-            open_network.append({
-                "2g": copy.copy(network_dict_2g),
-                "5g": copy.copy(network_dict_5g)
-            })
-            if not mirror_ap:
-                break
-        return {"2g": network_dict_2g, "5g": network_dict_5g}
-
-    def get_wep_network(
-            self,
-            mirror_ap,
-            networks,
-            hidden=False,
-            same_ssid=False,
-            ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-            passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-            passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G):
-        """Generates SSID and passphrase for a WEP network using random
-           generator.
-
-           Args:
-               mirror_ap: Boolean, determines if both APs use the same hostapd
-                          config or different configs.
-               networks: List of WEP networks.
-               same_ssid: Boolean, determines if both bands on AP use the same
-                          SSID.
-               ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
-               ssid_length_5g: Int, number of characters to use for 5G SSID.
-               passphrase_length_2g: Int, length of password for 2G network.
-               passphrase_length_5g: Int, length of password for 5G network.
-
-           Returns: A dict of 2G and 5G network lists for hostapd configuration.
-
-        """
-        network_dict_2g = {}
-        network_dict_5g = {}
-        ref_5g_security = hostapd_constants.WEP_STRING
-        ref_2g_security = hostapd_constants.WEP_STRING
-
-        if same_ssid:
-            ref_2g_ssid = 'xg_%s' % utils.rand_ascii_str(ssid_length_2g)
-            ref_5g_ssid = ref_2g_ssid
-
-            ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
-            ref_5g_passphrase = ref_2g_passphrase
-
-        else:
-            ref_2g_ssid = '2g_%s' % utils.rand_ascii_str(ssid_length_2g)
-            ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
-
-            ref_5g_ssid = '5g_%s' % utils.rand_ascii_str(ssid_length_5g)
-            ref_5g_passphrase = utils.rand_hex_str(passphrase_length_5g)
-
-        network_dict_2g = {
-            "SSID": ref_2g_ssid,
-            "security": ref_2g_security,
-            "wepKeys": [ref_2g_passphrase] * 4,
-            "hiddenSSID": hidden
-        }
-
-        network_dict_5g = {
-            "SSID": ref_5g_ssid,
-            "security": ref_5g_security,
-            "wepKeys": [ref_2g_passphrase] * 4,
-            "hiddenSSID": hidden
-        }
-
-        ap = 0
-        for ap in range(MAX_AP_COUNT):
-            networks.append({
-                "2g": copy.copy(network_dict_2g),
-                "5g": copy.copy(network_dict_5g)
-            })
-            if not mirror_ap:
-                break
-        return {"2g": network_dict_2g, "5g": network_dict_5g}
-
-    def update_bssid(self, ap_instance, ap, network, band):
-        """Get bssid and update network dictionary.
-
-        Args:
-            ap_instance: Accesspoint index that was configured.
-            ap: Accesspoint object corresponding to ap_instance.
-            network: Network dictionary.
-            band: Wifi networks' band.
-
-        """
-        bssid = ap.get_bssid_from_ssid(network["SSID"], band)
-
-        if network["security"] == hostapd_constants.WPA2_STRING:
-            # TODO:(bamahadev) Change all occurances of reference_networks
-            # to wpa_networks.
-            self.reference_networks[ap_instance][band]["bssid"] = bssid
-        if network["security"] == hostapd_constants.WPA_STRING:
-            self.wpa_networks[ap_instance][band]["bssid"] = bssid
-        if network["security"] == hostapd_constants.WEP_STRING:
-            self.wep_networks[ap_instance][band]["bssid"] = bssid
-        if network["security"] == hostapd_constants.ENT_STRING:
-            if "bssid" not in self.ent_networks[ap_instance][band]:
-                self.ent_networks[ap_instance][band]["bssid"] = bssid
-            else:
-                self.ent_networks_pwd[ap_instance][band]["bssid"] = bssid
-        if network["security"] == 'none':
-            self.open_network[ap_instance][band]["bssid"] = bssid
-
-    def populate_bssid(self, ap_instance, ap, networks_5g, networks_2g):
-        """Get bssid for a given SSID and add it to the network dictionary.
-
-        Args:
-            ap_instance: Accesspoint index that was configured.
-            ap: Accesspoint object corresponding to ap_instance.
-            networks_5g: List of 5g networks configured on the APs.
-            networks_2g: List of 2g networks configured on the APs.
-
-        """
-
-        if not (networks_5g or networks_2g):
-            return
-
-        for network in networks_5g:
-            if 'channel' in network:
-                continue
-            self.update_bssid(ap_instance, ap, network,
-                              hostapd_constants.BAND_5G)
-
-        for network in networks_2g:
-            if 'channel' in network:
-                continue
-            self.update_bssid(ap_instance, ap, network,
-                              hostapd_constants.BAND_2G)
-
-    def configure_openwrt_ap_and_start(
-            self,
-            channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_5g_ap2=None,
-            channel_2g_ap2=None,
-            ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-            passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-            passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
-            mirror_ap=False,
-            hidden=False,
-            same_ssid=False,
-            open_network=False,
-            wpa1_network=False,
-            wpa_network=False,
-            wep_network=False,
-            ent_network=False,
-            ent_network_pwd=False,
-            owe_network=False,
-            sae_network=False,
-            saemixed_network=False,
-            radius_conf_2g=None,
-            radius_conf_5g=None,
-            radius_conf_pwd=None,
-            ap_count=1,
-            ieee80211w=None):
-        """Create, configure and start OpenWrt AP.
-
-        Args:
-            channel_5g: 5G channel to configure.
-            channel_2g: 2G channel to configure.
-            channel_5g_ap2: 5G channel to configure on AP2.
-            channel_2g_ap2: 2G channel to configure on AP2.
-            ssid_length_2g: Int, number of characters to use for 2G SSID.
-            passphrase_length_2g: Int, length of password for 2G network.
-            ssid_length_5g: Int, number of characters to use for 5G SSID.
-            passphrase_length_5g: Int, length of password for 5G network.
-            same_ssid: Boolean, determines if both bands on AP use the same SSID.
-            open_network: Boolean, to check if open network should be configured.
-            wpa_network: Boolean, to check if wpa network should be configured.
-            wep_network: Boolean, to check if wep network should be configured.
-            ent_network: Boolean, to check if ent network should be configured.
-            ent_network_pwd: Boolean, to check if ent pwd network should be configured.
-            owe_network: Boolean, to check if owe network should be configured.
-            sae_network: Boolean, to check if sae network should be configured.
-            saemixed_network: Boolean, to check if saemixed network should be configured.
-            radius_conf_2g: dictionary with enterprise radius server details.
-            radius_conf_5g: dictionary with enterprise radius server details.
-            radius_conf_pwd: dictionary with enterprise radiuse server details.
-            ap_count: APs to configure.
-            ieee80211w:PMF to configure
-        """
-        if mirror_ap and ap_count == 1:
-            raise ValueError("ap_count cannot be 1 if mirror_ap is True.")
-        if (channel_5g_ap2 or channel_2g_ap2) and ap_count == 1:
-            raise ValueError(
-                "ap_count cannot be 1 if channels of AP2 are provided.")
-        # we are creating a channel list for 2G and 5G bands. The list is of
-        # size 2 and this is based on the assumption that each testbed will have
-        # at most 2 APs.
-        if not channel_5g_ap2:
-            channel_5g_ap2 = channel_5g
-        if not channel_2g_ap2:
-            channel_2g_ap2 = channel_2g
-        channels_2g = [channel_2g, channel_2g_ap2]
-        channels_5g = [channel_5g, channel_5g_ap2]
-
-        self.reference_networks = []
-        self.wpa1_networks = []
-        self.wpa_networks = []
-        self.wep_networks = []
-        self.ent_networks = []
-        self.ent_networks_pwd = []
-        self.open_network = []
-        self.owe_networks = []
-        self.sae_networks = []
-        self.saemixed_networks = []
-        self.bssid_map = []
-        for i in range(ap_count):
-            network_list = []
-            if wpa1_network:
-                wpa1_dict = self.get_psk_network(mirror_ap, self.wpa1_networks,
-                                                 hidden, same_ssid,
-                                                 ssid_length_2g,
-                                                 ssid_length_5g,
-                                                 passphrase_length_2g,
-                                                 passphrase_length_5g)
-                wpa1_dict[hostapd_constants.BAND_2G]["security"] = "psk"
-                wpa1_dict[hostapd_constants.BAND_5G]["security"] = "psk"
-                wpa1_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
-                wpa1_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w
-                self.wpa1_networks.append(wpa1_dict)
-                network_list.append(wpa1_dict)
-            if wpa_network:
-                wpa_dict = self.get_psk_network(mirror_ap,
-                                                self.reference_networks,
-                                                hidden, same_ssid,
-                                                ssid_length_2g, ssid_length_5g,
-                                                passphrase_length_2g,
-                                                passphrase_length_5g)
-                wpa_dict[hostapd_constants.BAND_2G]["security"] = "psk2"
-                wpa_dict[hostapd_constants.BAND_5G]["security"] = "psk2"
-                wpa_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
-                wpa_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w
-                self.wpa_networks.append(wpa_dict)
-                network_list.append(wpa_dict)
-            if wep_network:
-                wep_dict = self.get_wep_network(mirror_ap, self.wep_networks,
-                                                hidden, same_ssid,
-                                                ssid_length_2g, ssid_length_5g)
-                network_list.append(wep_dict)
-            if ent_network:
-                ent_dict = self.get_open_network(mirror_ap, self.ent_networks,
-                                                 hidden, same_ssid,
-                                                 ssid_length_2g,
-                                                 ssid_length_5g)
-                ent_dict["2g"]["security"] = "wpa2"
-                ent_dict["2g"].update(radius_conf_2g)
-                ent_dict["5g"]["security"] = "wpa2"
-                ent_dict["5g"].update(radius_conf_5g)
-                network_list.append(ent_dict)
-            if ent_network_pwd:
-                ent_pwd_dict = self.get_open_network(mirror_ap,
-                                                     self.ent_networks_pwd,
-                                                     hidden, same_ssid,
-                                                     ssid_length_2g,
-                                                     ssid_length_5g)
-                ent_pwd_dict["2g"]["security"] = "wpa2"
-                ent_pwd_dict["2g"].update(radius_conf_pwd)
-                ent_pwd_dict["5g"]["security"] = "wpa2"
-                ent_pwd_dict["5g"].update(radius_conf_pwd)
-                network_list.append(ent_pwd_dict)
-            if open_network:
-                open_dict = self.get_open_network(mirror_ap, self.open_network,
-                                                  hidden, same_ssid,
-                                                  ssid_length_2g,
-                                                  ssid_length_5g)
-                network_list.append(open_dict)
-            if owe_network:
-                owe_dict = self.get_open_network(mirror_ap, self.owe_networks,
-                                                 hidden, same_ssid,
-                                                 ssid_length_2g,
-                                                 ssid_length_5g, "OWE")
-                owe_dict[hostapd_constants.BAND_2G]["security"] = "owe"
-                owe_dict[hostapd_constants.BAND_5G]["security"] = "owe"
-                network_list.append(owe_dict)
-            if sae_network:
-                sae_dict = self.get_psk_network(mirror_ap, self.sae_networks,
-                                                hidden, same_ssid,
-                                                hostapd_constants.SAE_KEY_MGMT,
-                                                ssid_length_2g, ssid_length_5g,
-                                                passphrase_length_2g,
-                                                passphrase_length_5g)
-                sae_dict[hostapd_constants.BAND_2G]["security"] = "sae"
-                sae_dict[hostapd_constants.BAND_5G]["security"] = "sae"
-                network_list.append(sae_dict)
-            if saemixed_network:
-                saemixed_dict = self.get_psk_network(
-                    mirror_ap, self.saemixed_networks, hidden, same_ssid,
-                    hostapd_constants.SAE_KEY_MGMT, ssid_length_2g,
-                    ssid_length_5g, passphrase_length_2g, passphrase_length_5g)
-                saemixed_dict[
-                    hostapd_constants.BAND_2G]["security"] = "sae-mixed"
-                saemixed_dict[
-                    hostapd_constants.BAND_5G]["security"] = "sae-mixed"
-                saemixed_dict[
-                    hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
-                saemixed_dict[
-                    hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w
-                network_list.append(saemixed_dict)
-            self.access_points[i].configure_ap(network_list, channels_2g[i],
-                                               channels_5g[i])
-            self.access_points[i].start_ap()
-            self.bssid_map.append(
-                self.access_points[i].get_bssids_for_wifi_networks())
-            if mirror_ap:
-                self.access_points[i + 1].configure_ap(network_list,
-                                                       channels_2g[i + 1],
-                                                       channels_5g[i + 1])
-                self.access_points[i + 1].start_ap()
-                self.bssid_map.append(
-                    self.access_points[i + 1].get_bssids_for_wifi_networks())
-                break
-
-    def legacy_configure_ap_and_start(
-            self,
-            channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            max_2g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_2G,
-            max_5g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_5G,
-            ap_ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-            ap_passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-            ap_ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-            ap_passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
-            hidden=False,
-            same_ssid=False,
-            mirror_ap=True,
-            wpa_network=False,
-            wep_network=False,
-            ent_network=False,
-            radius_conf_2g=None,
-            radius_conf_5g=None,
-            ent_network_pwd=False,
-            radius_conf_pwd=None,
-            ap_count=1):
-
-        config_count = 1
-        count = 0
-
-        # For example, the NetworkSelector tests use 2 APs and require that
-        # both APs are not mirrored.
-        if not mirror_ap and ap_count == 1:
-            raise ValueError("ap_count cannot be 1 if mirror_ap is False.")
-
-        if not mirror_ap:
-            config_count = ap_count
-
-        self.user_params["reference_networks"] = []
-        self.user_params["open_network"] = []
-        if wpa_network:
-            self.user_params["wpa_networks"] = []
-        if wep_network:
-            self.user_params["wep_networks"] = []
-        if ent_network:
-            self.user_params["ent_networks"] = []
-        if ent_network_pwd:
-            self.user_params["ent_networks_pwd"] = []
-
-        # kill hostapd & dhcpd if the cleanup was not successful
-        for i in range(len(self.access_points)):
-            self.log.debug("Check ap state and cleanup")
-            self._cleanup_hostapd_and_dhcpd(i)
-
-        for count in range(config_count):
-
-            network_list_2g = []
-            network_list_5g = []
-
-            orig_network_list_2g = []
-            orig_network_list_5g = []
-
-            network_list_2g.append({"channel": channel_2g})
-            network_list_5g.append({"channel": channel_5g})
-
-            networks_dict = self.get_psk_network(
-                mirror_ap,
-                self.user_params["reference_networks"],
-                hidden=hidden,
-                same_ssid=same_ssid)
-            self.reference_networks = self.user_params["reference_networks"]
-
-            network_list_2g.append(networks_dict["2g"])
-            network_list_5g.append(networks_dict["5g"])
-
-            # When same_ssid is set, only configure one set of WPA networks.
-            # We cannot have more than one set because duplicate interface names
-            # are not allowed.
-            # TODO(bmahadev): Provide option to select the type of network,
-            # instead of defaulting to WPA.
-            if not same_ssid:
-                networks_dict = self.get_open_network(
-                    mirror_ap,
-                    self.user_params["open_network"],
-                    hidden=hidden,
-                    same_ssid=same_ssid)
-                self.open_network = self.user_params["open_network"]
-
-                network_list_2g.append(networks_dict["2g"])
-                network_list_5g.append(networks_dict["5g"])
-
-                if wpa_network:
-                    networks_dict = self.get_psk_network(
-                        mirror_ap,
-                        self.user_params["wpa_networks"],
-                        hidden=hidden,
-                        same_ssid=same_ssid,
-                        security_mode=hostapd_constants.WPA_STRING)
-                    self.wpa_networks = self.user_params["wpa_networks"]
-
-                    network_list_2g.append(networks_dict["2g"])
-                    network_list_5g.append(networks_dict["5g"])
-
-                if wep_network:
-                    networks_dict = self.get_wep_network(
-                        mirror_ap,
-                        self.user_params["wep_networks"],
-                        hidden=hidden,
-                        same_ssid=same_ssid)
-                    self.wep_networks = self.user_params["wep_networks"]
-
-                    network_list_2g.append(networks_dict["2g"])
-                    network_list_5g.append(networks_dict["5g"])
-
-                if ent_network:
-                    networks_dict = self.get_open_network(
-                        mirror_ap,
-                        self.user_params["ent_networks"],
-                        hidden=hidden,
-                        same_ssid=same_ssid)
-                    networks_dict["2g"][
-                        "security"] = hostapd_constants.ENT_STRING
-                    networks_dict["2g"].update(radius_conf_2g)
-                    networks_dict["5g"][
-                        "security"] = hostapd_constants.ENT_STRING
-                    networks_dict["5g"].update(radius_conf_5g)
-                    self.ent_networks = self.user_params["ent_networks"]
-
-                    network_list_2g.append(networks_dict["2g"])
-                    network_list_5g.append(networks_dict["5g"])
-
-                if ent_network_pwd:
-                    networks_dict = self.get_open_network(
-                        mirror_ap,
-                        self.user_params["ent_networks_pwd"],
-                        hidden=hidden,
-                        same_ssid=same_ssid)
-                    networks_dict["2g"][
-                        "security"] = hostapd_constants.ENT_STRING
-                    networks_dict["2g"].update(radius_conf_pwd)
-                    networks_dict["5g"][
-                        "security"] = hostapd_constants.ENT_STRING
-                    networks_dict["5g"].update(radius_conf_pwd)
-                    self.ent_networks_pwd = self.user_params[
-                        "ent_networks_pwd"]
-
-                    network_list_2g.append(networks_dict["2g"])
-                    network_list_5g.append(networks_dict["5g"])
-
-            orig_network_list_5g = copy.copy(network_list_5g)
-            orig_network_list_2g = copy.copy(network_list_2g)
-
-            if len(network_list_5g) > 1:
-                self.config_5g = self._generate_legacy_ap_config(
-                    network_list_5g)
-            if len(network_list_2g) > 1:
-                self.config_2g = self._generate_legacy_ap_config(
-                    network_list_2g)
-
-            self.access_points[count].start_ap(self.config_2g)
-            self.access_points[count].start_ap(self.config_5g)
-            self.populate_bssid(count, self.access_points[count],
-                                orig_network_list_5g, orig_network_list_2g)
-
-        # Repeat configuration on the second router.
-        if mirror_ap and ap_count == 2:
-            self.access_points[AP_2].start_ap(self.config_2g)
-            self.access_points[AP_2].start_ap(self.config_5g)
-            self.populate_bssid(AP_2, self.access_points[AP_2],
-                                orig_network_list_5g, orig_network_list_2g)
-
-    def _kill_processes(self, ap, daemon):
-        """ Kill hostapd and dhcpd daemons
-
-        Args:
-            ap: AP to cleanup
-            daemon: process to kill
-
-        Returns: True/False if killing process is successful
-        """
-        self.log.info("Killing %s" % daemon)
-        pids = ap.ssh.run('pidof %s' % daemon, ignore_status=True)
-        if pids.stdout:
-            ap.ssh.run('kill %s' % pids.stdout, ignore_status=True)
-        time.sleep(3)
-        pids = ap.ssh.run('pidof %s' % daemon, ignore_status=True)
-        if pids.stdout:
-            return False
-        return True
-
-    def _cleanup_hostapd_and_dhcpd(self, count):
-        """ Check if AP was cleaned up properly
-
-        Kill hostapd and dhcpd processes if cleanup was not successful in the
-        last run
-
-        Args:
-            count: AP to check
-
-        Returns:
-            New AccessPoint object if AP required cleanup
-
-        Raises:
-            Error: if the AccessPoint timed out to setup
-        """
-        ap = self.access_points[count]
-        phy_ifaces = ap.interfaces.get_physical_interface()
-        kill_hostapd = False
-        for iface in phy_ifaces:
-            if '2g_' in iface or '5g_' in iface or 'xg_' in iface:
-                kill_hostapd = True
-                break
-
-        if not kill_hostapd:
-            return
-
-        self.log.debug("Cleanup AP")
-        if not self._kill_processes(ap, 'hostapd') or \
-            not self._kill_processes(ap, 'dhcpd'):
-            raise ("Failed to cleanup AP")
-
-        ap.__init__(self.user_params['AccessPoint'][count])
-
-    def _generate_legacy_ap_config(self, network_list):
-        bss_settings = []
-        wlan_2g = self.access_points[AP_1].wlan_2g
-        wlan_5g = self.access_points[AP_1].wlan_5g
-        ap_settings = network_list.pop(0)
-        # TODO:(bmahadev) This is a bug. We should not have to pop the first
-        # network in the list and treat it as a separate case. Instead,
-        # create_ap_preset() should be able to take NULL ssid and security and
-        # build config based on the bss_Settings alone.
-        hostapd_config_settings = network_list.pop(0)
-        for network in network_list:
-            if "password" in network:
-                bss_settings.append(
-                    hostapd_bss_settings.BssSettings(
-                        name=network["SSID"],
-                        ssid=network["SSID"],
-                        hidden=network["hiddenSSID"],
-                        security=hostapd_security.Security(
-                            security_mode=network["security"],
-                            password=network["password"])))
-            elif "wepKeys" in network:
-                bss_settings.append(
-                    hostapd_bss_settings.BssSettings(
-                        name=network["SSID"],
-                        ssid=network["SSID"],
-                        hidden=network["hiddenSSID"],
-                        security=hostapd_security.Security(
-                            security_mode=network["security"],
-                            password=network["wepKeys"][0])))
-            elif network["security"] == hostapd_constants.ENT_STRING:
-                bss_settings.append(
-                    hostapd_bss_settings.BssSettings(
-                        name=network["SSID"],
-                        ssid=network["SSID"],
-                        hidden=network["hiddenSSID"],
-                        security=hostapd_security.Security(
-                            security_mode=network["security"],
-                            radius_server_ip=network["radius_server_ip"],
-                            radius_server_port=network["radius_server_port"],
-                            radius_server_secret=network[
-                                "radius_server_secret"])))
-            else:
-                bss_settings.append(
-                    hostapd_bss_settings.BssSettings(
-                        name=network["SSID"],
-                        ssid=network["SSID"],
-                        hidden=network["hiddenSSID"]))
-        if "password" in hostapd_config_settings:
-            config = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=wlan_2g,
-                iface_wlan_5g=wlan_5g,
-                channel=ap_settings["channel"],
-                ssid=hostapd_config_settings["SSID"],
-                hidden=hostapd_config_settings["hiddenSSID"],
-                security=hostapd_security.Security(
-                    security_mode=hostapd_config_settings["security"],
-                    password=hostapd_config_settings["password"]),
-                bss_settings=bss_settings)
-        elif "wepKeys" in hostapd_config_settings:
-            config = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=wlan_2g,
-                iface_wlan_5g=wlan_5g,
-                channel=ap_settings["channel"],
-                ssid=hostapd_config_settings["SSID"],
-                hidden=hostapd_config_settings["hiddenSSID"],
-                security=hostapd_security.Security(
-                    security_mode=hostapd_config_settings["security"],
-                    password=hostapd_config_settings["wepKeys"][0]),
-                bss_settings=bss_settings)
-        else:
-            config = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=wlan_2g,
-                iface_wlan_5g=wlan_5g,
-                channel=ap_settings["channel"],
-                ssid=hostapd_config_settings["SSID"],
-                hidden=hostapd_config_settings["hiddenSSID"],
-                bss_settings=bss_settings)
-        return config
-
-    def configure_packet_capture(
-            self,
-            channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G):
-        """Configure packet capture for 2G and 5G bands.
-
-        Args:
-            channel_5g: Channel to set the monitor mode to for 5G band.
-            channel_2g: Channel to set the monitor mode to for 2G band.
-        """
-        self.packet_capture = self.packet_capture[0]
-        result = self.packet_capture.configure_monitor_mode(
-            hostapd_constants.BAND_2G, channel_2g)
-        if not result:
-            raise ValueError("Failed to configure channel for 2G band")
-
-        result = self.packet_capture.configure_monitor_mode(
-            hostapd_constants.BAND_5G, channel_5g)
-        if not result:
-            raise ValueError("Failed to configure channel for 5G band.")
-
-    @staticmethod
-    def wifi_test_wrap(fn):
-
-        def _safe_wrap_test_case(self, *args, **kwargs):
-            test_id = "%s:%s:%s" % (self.__class__.__name__, self.test_name,
-                                    self.log_begin_time.replace(' ', '-'))
-            self.test_id = test_id
-            self.result_detail = ""
-            tries = int(self.user_params.get("wifi_auto_rerun", 3))
-            for ad in self.android_devices:
-                ad.log_path = self.log_path
-            for i in range(tries + 1):
-                result = True
-                if i > 0:
-                    log_string = "[Test Case] RETRY:%s %s" % (i,
-                                                              self.test_name)
-                    self.log.info(log_string)
-                    self._teardown_test(self.test_name)
-                    self._setup_test(self.test_name)
-                try:
-                    result = fn(self, *args, **kwargs)
-                except signals.TestFailure as e:
-                    self.log.warn("Error msg: %s" % e)
-                    if self.result_detail:
-                        signal.details = self.result_detail
-                    result = False
-                except signals.TestSignal:
-                    if self.result_detail:
-                        signal.details = self.result_detail
-                    raise
-                except Exception as e:
-                    self.log.exception(e)
-                    asserts.fail(self.result_detail)
-                if result is False:
-                    if i < tries:
-                        continue
-                else:
-                    break
-            if result is not False:
-                asserts.explicit_pass(self.result_detail)
-            else:
-                asserts.fail(self.result_detail)
-
-        return _safe_wrap_test_case
diff --git a/src/antlion/test_utils/wifi/aware/AwareBaseTest.py b/src/antlion/test_utils/wifi/aware/AwareBaseTest.py
deleted file mode 100644
index 45b222d..0000000
--- a/src/antlion/test_utils/wifi/aware/AwareBaseTest.py
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-from antlion import asserts
-from antlion import utils
-from antlion.base_test import BaseTestClass
-from antlion.keys import Config
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.aware import aware_const as aconsts
-from antlion.test_utils.wifi.aware import aware_test_utils as autils
-
-
-class AwareBaseTest(BaseTestClass):
-    # message ID counter to make sure all uses are unique
-    msg_id = 0
-
-    # offset (in seconds) to separate the start-up of multiple devices.
-    # De-synchronizes the start-up time so that they don't start and stop scanning
-    # at the same time - which can lead to very long clustering times.
-    device_startup_offset = 2
-
-    def setup_class(self):
-        opt_param = ["pixel_models", "cnss_diag_file", "ranging_role_concurrency_flexible_models"]
-        self.unpack_userparams(opt_param_names=opt_param)
-        if hasattr(self, "cnss_diag_file"):
-            if isinstance(self.cnss_diag_file, list):
-                self.cnss_diag_file = self.cnss_diag_file[0]
-            if not os.path.isfile(self.cnss_diag_file):
-                self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value],
-                    self.cnss_diag_file)
-
-    def setup_test(self):
-        required_params = ("aware_default_power_mode",
-                           "dbs_supported_models",)
-        self.unpack_userparams(required_params)
-
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.start_cnss_diags(
-                self.android_devices, self.cnss_diag_file, self.pixel_models)
-        self.tcpdump_proc = []
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                proc = nutils.start_tcpdump(ad, self.test_name)
-                self.tcpdump_proc.append((ad, proc))
-
-        for ad in self.android_devices:
-            ad.droid.wifiEnableVerboseLogging(1)
-            asserts.skip_if(
-                not ad.droid.doesDeviceSupportWifiAwareFeature(),
-                "Device under test does not support Wi-Fi Aware - skipping test"
-            )
-            aware_avail = ad.droid.wifiIsAwareAvailable()
-            ad.droid.wifiP2pClose()
-            wutils.wifi_toggle_state(ad, True)
-            utils.set_location_service(ad, True)
-            if not aware_avail:
-                self.log.info('Aware not available. Waiting ...')
-                autils.wait_for_event(ad,
-                                      aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
-            ad.aware_capabilities = autils.get_aware_capabilities(ad)
-            self.reset_device_parameters(ad)
-            self.reset_device_statistics(ad)
-            self.set_power_mode_parameters(ad)
-            wutils.set_wifi_country_code(ad, wutils.WifiEnums.CountryCode.US)
-            autils.configure_ndp_allow_any_override(ad, True)
-            # set randomization interval to 0 (disable) to reduce likelihood of
-            # interference in tests
-            autils.configure_mac_random_interval(ad, 0)
-            ad.ed.clear_all_events()
-
-    def teardown_test(self):
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(
-                    proc[0], proc[1], self.test_name, pull_dump=False)
-        self.tcpdump_proc = []
-        for ad in self.android_devices:
-            if not ad.droid.doesDeviceSupportWifiAwareFeature():
-                return
-            ad.droid.wifiP2pClose()
-            ad.droid.wifiAwareDestroyAll()
-            self.reset_device_parameters(ad)
-            autils.validate_forbidden_callbacks(ad)
-
-    def reset_device_parameters(self, ad):
-        """Reset device configurations which may have been set by tests. Should be
-    done before tests start (in case previous one was killed without tearing
-    down) and after they end (to leave device in usable state).
-
-    Args:
-      ad: device to be reset
-    """
-        ad.adb.shell("cmd wifiaware reset")
-
-    def reset_device_statistics(self, ad):
-        """Reset device statistics.
-
-    Args:
-        ad: device to be reset
-    """
-        ad.adb.shell("cmd wifiaware native_cb get_cb_count --reset")
-
-    def set_power_mode_parameters(self, ad):
-        """Set the power configuration DW parameters for the device based on any
-    configuration overrides (if provided)"""
-        if self.aware_default_power_mode == "INTERACTIVE":
-            autils.config_settings_high_power(ad)
-        elif self.aware_default_power_mode == "NON_INTERACTIVE":
-            autils.config_settings_low_power(ad)
-        else:
-            asserts.assert_false(
-                "The 'aware_default_power_mode' configuration must be INTERACTIVE or "
-                "NON_INTERACTIVE")
-
-    def get_next_msg_id(self):
-        """Increment the message ID and returns the new value. Guarantees that
-    each call to the method returns a unique value.
-
-    Returns: a new message id value.
-    """
-        self.msg_id = self.msg_id + 1
-        return self.msg_id
-
-    def on_fail(self, test_name, begin_time):
-        for ad in self.android_devices:
-            ad.take_bug_report(test_name, begin_time)
-            ad.cat_adb_log(test_name, begin_time)
-            wutils.get_ssrdumps(ad)
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-            for ad in self.android_devices:
-                wutils.get_cnss_diag_log(ad)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
-        self.tcpdump_proc = []
diff --git a/src/antlion/test_utils/wifi/aware/__init__.py b/src/antlion/test_utils/wifi/aware/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/wifi/aware/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/wifi/aware/aware_const.py b/src/antlion/test_utils/wifi/aware/aware_const.py
deleted file mode 100644
index a720196..0000000
--- a/src/antlion/test_utils/wifi/aware/aware_const.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-######################################################
-# Aware power settings values for interactive (high power) and
-# non-interactive (low power) modes
-######################################################
-
-POWER_DW_24_INTERACTIVE = 1
-POWER_DW_5_INTERACTIVE = 1
-POWER_DISC_BEACON_INTERVAL_INTERACTIVE = 0
-POWER_NUM_SS_IN_DISC_INTERACTIVE = 0
-POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE = 0
-
-POWER_DW_24_NON_INTERACTIVE = 4
-POWER_DW_5_NON_INTERACTIVE = 0
-POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE = 0
-POWER_NUM_SS_IN_DISC_NON_INTERACTIVE = 0
-POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE = 0
-
-######################################################
-# Broadcast events
-######################################################
-BROADCAST_WIFI_AWARE_AVAILABLE = "WifiAwareAvailable"
-BROADCAST_WIFI_AWARE_NOT_AVAILABLE = "WifiAwareNotAvailable"
-
-######################################################
-# ConfigRequest keys
-######################################################
-
-CONFIG_KEY_5G_BAND = "Support5gBand"
-CONFIG_KEY_MASTER_PREF = "MasterPreference"
-CONFIG_KEY_CLUSTER_LOW = "ClusterLow"
-CONFIG_KEY_CLUSTER_HIGH = "ClusterHigh"
-CONFIG_KEY_ENABLE_IDEN_CB = "EnableIdentityChangeCallback"
-
-######################################################
-# Publish & Subscribe Config keys
-######################################################
-
-DISCOVERY_KEY_SERVICE_NAME = "ServiceName"
-DISCOVERY_KEY_SSI = "ServiceSpecificInfo"
-DISCOVERY_KEY_MATCH_FILTER = "MatchFilter"
-DISCOVERY_KEY_MATCH_FILTER_LIST = "MatchFilterList"
-DISCOVERY_KEY_DISCOVERY_TYPE = "DiscoveryType"
-DISCOVERY_KEY_TTL = "TtlSec"
-DISCOVERY_KEY_TERM_CB_ENABLED = "TerminateNotificationEnabled"
-DISCOVERY_KEY_RANGING_ENABLED = "RangingEnabled"
-DISCOVERY_KEY_MIN_DISTANCE_MM = "MinDistanceMm"
-DISCOVERY_KEY_MAX_DISTANCE_MM = "MaxDistanceMm"
-
-PUBLISH_TYPE_UNSOLICITED = 0
-PUBLISH_TYPE_SOLICITED = 1
-
-SUBSCRIBE_TYPE_PASSIVE = 0
-SUBSCRIBE_TYPE_ACTIVE = 1
-
-######################################################
-# WifiAwareAttachCallback events
-######################################################
-EVENT_CB_ON_ATTACHED = "WifiAwareOnAttached"
-EVENT_CB_ON_ATTACH_FAILED = "WifiAwareOnAttachFailed"
-
-######################################################
-# WifiAwareIdentityChangedListener events
-######################################################
-EVENT_CB_ON_IDENTITY_CHANGED = "WifiAwareOnIdentityChanged"
-
-# WifiAwareAttachCallback & WifiAwareIdentityChangedListener events keys
-EVENT_CB_KEY_REASON = "reason"
-EVENT_CB_KEY_MAC = "mac"
-EVENT_CB_KEY_LATENCY_MS = "latencyMs"
-EVENT_CB_KEY_TIMESTAMP_MS = "timestampMs"
-
-######################################################
-# WifiAwareDiscoverySessionCallback events
-######################################################
-SESSION_CB_ON_PUBLISH_STARTED = "WifiAwareSessionOnPublishStarted"
-SESSION_CB_ON_SUBSCRIBE_STARTED = "WifiAwareSessionOnSubscribeStarted"
-SESSION_CB_ON_SESSION_CONFIG_UPDATED = "WifiAwareSessionOnSessionConfigUpdated"
-SESSION_CB_ON_SESSION_CONFIG_FAILED = "WifiAwareSessionOnSessionConfigFailed"
-SESSION_CB_ON_SESSION_TERMINATED = "WifiAwareSessionOnSessionTerminated"
-SESSION_CB_ON_SERVICE_DISCOVERED = "WifiAwareSessionOnServiceDiscovered"
-SESSION_CB_ON_MESSAGE_SENT = "WifiAwareSessionOnMessageSent"
-SESSION_CB_ON_MESSAGE_SEND_FAILED = "WifiAwareSessionOnMessageSendFailed"
-SESSION_CB_ON_MESSAGE_RECEIVED = "WifiAwareSessionOnMessageReceived"
-SESSION_CB_ON_SERVICE_LOST = "WifiAwareSessionOnServiceLost"
-
-# WifiAwareDiscoverySessionCallback events keys
-SESSION_CB_KEY_CB_ID = "callbackId"
-SESSION_CB_KEY_SESSION_ID = "discoverySessionId"
-SESSION_CB_KEY_REASON = "reason"
-SESSION_CB_KEY_PEER_ID = "peerId"
-SESSION_CB_KEY_SERVICE_SPECIFIC_INFO = "serviceSpecificInfo"
-SESSION_CB_KEY_MATCH_FILTER = "matchFilter"
-SESSION_CB_KEY_MATCH_FILTER_LIST = "matchFilterList"
-SESSION_CB_KEY_MESSAGE = "message"
-SESSION_CB_KEY_MESSAGE_ID = "messageId"
-SESSION_CB_KEY_MESSAGE_AS_STRING = "messageAsString"
-SESSION_CB_KEY_LATENCY_MS = "latencyMs"
-SESSION_CB_KEY_TIMESTAMP_MS = "timestampMs"
-SESSION_CB_KEY_DISTANCE_MM = "distanceMm"
-SESSION_CB_KEY_LOST_REASON = "lostReason"
-
-# WifiAwareDiscoverySessionCallback onServiceLost reason code
-REASON_PEER_NOT_VISIBLE = 1
-
-######################################################
-# WifiAwareRangingListener events (RttManager.RttListener)
-######################################################
-RTT_LISTENER_CB_ON_SUCCESS = "WifiAwareRangingListenerOnSuccess"
-RTT_LISTENER_CB_ON_FAILURE = "WifiAwareRangingListenerOnFailure"
-RTT_LISTENER_CB_ON_ABORT = "WifiAwareRangingListenerOnAborted"
-
-# WifiAwareRangingListener events (RttManager.RttListener) keys
-RTT_LISTENER_CB_KEY_CB_ID = "callbackId"
-RTT_LISTENER_CB_KEY_SESSION_ID = "sessionId"
-RTT_LISTENER_CB_KEY_RESULTS = "Results"
-RTT_LISTENER_CB_KEY_REASON = "reason"
-RTT_LISTENER_CB_KEY_DESCRIPTION = "description"
-
-######################################################
-# Capabilities keys
-######################################################
-
-CAP_MAX_CONCURRENT_AWARE_CLUSTERS = "maxConcurrentAwareClusters"
-CAP_MAX_PUBLISHES = "maxPublishes"
-CAP_MAX_SUBSCRIBES = "maxSubscribes"
-CAP_MAX_SERVICE_NAME_LEN = "maxServiceNameLen"
-CAP_MAX_MATCH_FILTER_LEN = "maxMatchFilterLen"
-CAP_MAX_TOTAL_MATCH_FILTER_LEN = "maxTotalMatchFilterLen"
-CAP_MAX_SERVICE_SPECIFIC_INFO_LEN = "maxServiceSpecificInfoLen"
-CAP_MAX_EXTENDED_SERVICE_SPECIFIC_INFO_LEN = "maxExtendedServiceSpecificInfoLen"
-CAP_MAX_NDI_INTERFACES = "maxNdiInterfaces"
-CAP_MAX_NDP_SESSIONS = "maxNdpSessions"
-CAP_MAX_APP_INFO_LEN = "maxAppInfoLen"
-CAP_MAX_QUEUED_TRANSMIT_MESSAGES = "maxQueuedTransmitMessages"
-CAP_MAX_SUBSCRIBE_INTERFACE_ADDRESSES = "maxSubscribeInterfaceAddresses"
-CAP_SUPPORTED_CIPHER_SUITES = "supportedCipherSuites"
-
-######################################################
-# WifiAwareNetworkCapabilities keys
-######################################################
-
-NET_CAP_IPV6 = "aware_ipv6"
-NET_CAP_PORT = "aware_port"
-NET_CAP_TRANSPORT_PROTOCOL = "aware_transport_protocol"
-
-######################################################
-
-# Aware NDI (NAN data-interface) name prefix
-AWARE_NDI_PREFIX = "aware_data"
-
-# Aware discovery channels
-AWARE_DISCOVERY_CHANNEL_24_BAND = 6
-AWARE_DISCOVERY_CHANNEL_5_BAND = 149
-
-# Aware Data-Path Constants
-DATA_PATH_INITIATOR = 0
-DATA_PATH_RESPONDER = 1
-
-# Maximum send retry
-MAX_TX_RETRIES = 5
-
-# Callback keys (for 'adb shell cmd wifiaware native_cb get_cb_count')
-CB_EV_CLUSTER = "0"
-CB_EV_DISABLED = "1"
-CB_EV_PUBLISH_TERMINATED = "2"
-CB_EV_SUBSCRIBE_TERMINATED = "3"
-CB_EV_MATCH = "4"
-CB_EV_MATCH_EXPIRED = "5"
-CB_EV_FOLLOWUP_RECEIVED = "6"
-CB_EV_TRANSMIT_FOLLOWUP = "7"
-CB_EV_DATA_PATH_REQUEST = "8"
-CB_EV_DATA_PATH_CONFIRM = "9"
-CB_EV_DATA_PATH_TERMINATED = "10"
diff --git a/src/antlion/test_utils/wifi/aware/aware_test_utils.py b/src/antlion/test_utils/wifi/aware/aware_test_utils.py
deleted file mode 100644
index 6c54791..0000000
--- a/src/antlion/test_utils/wifi/aware/aware_test_utils.py
+++ /dev/null
@@ -1,1055 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import base64
-import json
-import queue
-import re
-import statistics
-import time
-from antlion import asserts
-
-from antlion.test_utils.net import connectivity_const as cconsts
-from antlion.test_utils.net import socket_test_utils as sutils
-from antlion.test_utils.wifi.aware import aware_const as aconsts
-
-# arbitrary timeout for events
-EVENT_TIMEOUT = 10
-
-# semi-arbitrary timeout for network formation events. Based on framework
-# timeout for NDP (NAN data-path) negotiation to be completed.
-EVENT_NDP_TIMEOUT = 20
-
-# number of second to 'reasonably' wait to make sure that devices synchronize
-# with each other - useful for OOB test cases, where the OOB discovery would
-# take some time
-WAIT_FOR_CLUSTER = 5
-
-
-def decorate_event(event_name, id):
-    return '%s_%d' % (event_name, id)
-
-
-def wait_for_event(ad, event_name, timeout=EVENT_TIMEOUT):
-    """Wait for the specified event or timeout.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait on
-    timeout: Number of seconds to wait
-  Returns:
-    The event (if available)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.pop_event(event_name, timeout)
-        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-        return event
-    except queue.Empty:
-        ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
-        asserts.fail(event_name)
-
-def _filter_callbacks(event, expected_kv):
-    """
-    Helper method to use in |fail_on_event_with_keys| and
-    |wait_for_event_with_keys|
-    """
-    for expected_k, expected_v in expected_kv:
-        actual_v = event['data'][expected_k]
-        if isinstance(expected_v, dict) and isinstance(actual_v, dict):
-            # |expected_v| not a subset of |actual_v|
-            if not(expected_v.items() <= actual_v.items()):
-                return False
-        else:
-            if actual_v != expected_v:
-                return False
-    return True
-
-
-def wait_for_event_with_keys(ad,
-                             event_name,
-                             timeout=EVENT_TIMEOUT,
-                             *keyvalues):
-    """Wait for the specified event contain the key/value pairs or timeout
-
-  Args:
-    ad: The android device
-    event_name: The event to wait on
-    timeout: Number of seconds to wait
-    keyvalues: Expected (key, value) pairs. If the value for a key is a dict,
-               then this will perform subset matching for that key.
-  Returns:
-    The event (if available)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.wait_for_event(event_name, _filter_callbacks, timeout,
-                                     keyvalues)
-        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-        return event
-    except queue.Empty:
-        ad.log.info('%sTimed out while waiting for %s (%s)', prefix,
-                    event_name, keyvalues)
-        asserts.fail(event_name)
-
-
-def fail_on_event(ad, event_name, timeout=EVENT_TIMEOUT):
-    """Wait for a timeout period and looks for the specified event - fails if it
-  is observed.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait for (and fail on its appearance)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.pop_event(event_name, timeout)
-        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
-                    event['data'])
-        asserts.fail(event_name, extras=event)
-    except queue.Empty:
-        ad.log.info('%s%s not seen (as expected)', prefix, event_name)
-        return
-
-
-def fail_on_event_with_keys(ad, event_name, timeout=EVENT_TIMEOUT, *keyvalues):
-    """Wait for a timeout period and looks for the specified event which contains
-  the key/value pairs - fails if it is observed.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait on
-    timeout: Number of seconds to wait
-    keyvalues: Expected (key, value) pairs. If the value for a key is a dict,
-               then this will perform subset matching for that key.
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.wait_for_event(event_name, _filter_callbacks, timeout,
-                                     keyvalues)
-        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
-                    event['data'])
-        asserts.fail(event_name, extras=event)
-    except queue.Empty:
-        ad.log.info('%s%s (%s) not seen (as expected)', prefix, event_name,
-                    keyvalues)
-        return
-
-
-def verify_no_more_events(ad, timeout=EVENT_TIMEOUT):
-    """Verify that there are no more events in the queue.
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    should_fail = False
-    try:
-        while True:
-            event = ad.ed.pop_events('.*', timeout, freq=0)
-            ad.log.info('%sQueue contains %s', prefix, event)
-            should_fail = True
-    except queue.Empty:
-        if should_fail:
-            asserts.fail('%sEvent queue not empty' % prefix)
-        ad.log.info('%sNo events in the queue (as expected)', prefix)
-        return
-
-
-def encode_list(list_of_objects):
-    """Converts the list of strings or bytearrays to a list of b64 encoded
-  bytearrays.
-
-  A None object is treated as a zero-length bytearray.
-
-  Args:
-    list_of_objects: A list of strings or bytearray objects
-  Returns: A list of the same objects, converted to bytes and b64 encoded.
-  """
-    encoded_list = []
-    for obj in list_of_objects:
-        if obj is None:
-            obj = bytes()
-        if isinstance(obj, str):
-            encoded_list.append(
-                base64.b64encode(bytes(obj, 'utf-8')).decode('utf-8'))
-        else:
-            encoded_list.append(base64.b64encode(obj).decode('utf-8'))
-    return encoded_list
-
-
-def decode_list(list_of_b64_strings):
-    """Converts the list of b64 encoded strings to a list of bytearray.
-
-  Args:
-    list_of_b64_strings: list of strings, each of which is b64 encoded array
-  Returns: a list of bytearrays.
-  """
-    decoded_list = []
-    for str in list_of_b64_strings:
-        decoded_list.append(base64.b64decode(str))
-    return decoded_list
-
-
-def construct_max_match_filter(max_size):
-    """Constructs a maximum size match filter that fits into the 'max_size' bytes.
-
-  Match filters are a set of LVs (Length, Value pairs) where L is 1 byte. The
-  maximum size match filter will contain max_size/2 LVs with all Vs (except
-  possibly the last one) of 1 byte, the last V may be 2 bytes for odd max_size.
-
-  Args:
-    max_size: Maximum size of the match filter.
-  Returns: an array of bytearrays.
-  """
-    mf_list = []
-    num_lvs = max_size // 2
-    for i in range(num_lvs - 1):
-        mf_list.append(bytes([i]))
-    if (max_size % 2 == 0):
-        mf_list.append(bytes([255]))
-    else:
-        mf_list.append(bytes([254, 255]))
-    return mf_list
-
-
-def assert_equal_strings(first, second, msg=None, extras=None):
-    """Assert equality of the string operands - where None is treated as equal to
-  an empty string (''), otherwise fail the test.
-
-  Error message is "first != second" by default. Additional explanation can
-  be supplied in the message.
-
-  Args:
-      first, seconds: The strings that are evaluated for equality.
-      msg: A string that adds additional info about the failure.
-      extras: An optional field for extra information to be included in
-              test result.
-  """
-    if first == None:
-        first = ''
-    if second == None:
-        second = ''
-    asserts.assert_equal(first, second, msg, extras)
-
-
-def get_aware_capabilities(ad):
-    """Get the Wi-Fi Aware capabilities from the specified device. The
-  capabilities are a dictionary keyed by aware_const.CAP_* keys.
-
-  Args:
-    ad: the Android device
-  Returns: the capability dictionary.
-  """
-    return json.loads(ad.adb.shell('cmd wifiaware state_mgr get_capabilities'))
-
-
-def get_wifi_mac_address(ad):
-    """Get the Wi-Fi interface MAC address as a upper-case string of hex digits
-  without any separators (e.g. ':').
-
-  Args:
-    ad: Device on which to run.
-  """
-    return ad.droid.wifiGetConnectionInfo()['mac_address'].upper().replace(
-        ':', '')
-
-
-def validate_forbidden_callbacks(ad, limited_cb=None):
-    """Validate that the specified callbacks have not been called more then permitted.
-
-  In addition to the input configuration also validates that forbidden callbacks
-  have never been called.
-
-  Args:
-    ad: Device on which to run.
-    limited_cb: Dictionary of CB_EV_* ids and maximum permitted calls (0
-                meaning never).
-  """
-    cb_data = json.loads(ad.adb.shell('cmd wifiaware native_cb get_cb_count'))
-
-    if limited_cb is None:
-        limited_cb = {}
-
-    fail = False
-    for cb_event in limited_cb.keys():
-        if cb_event in cb_data:
-            if cb_data[cb_event] > limited_cb[cb_event]:
-                fail = True
-                ad.log.info(
-                    'Callback %s observed %d times: more then permitted %d times',
-                    cb_event, cb_data[cb_event], limited_cb[cb_event])
-
-    asserts.assert_false(fail, 'Forbidden callbacks observed', extras=cb_data)
-
-
-def extract_stats(ad, data, results, key_prefix, log_prefix):
-    """Extract statistics from the data, store in the results dictionary, and
-  output to the info log.
-
-  Args:
-    ad: Android device (for logging)
-    data: A list containing the data to be analyzed.
-    results: A dictionary into which to place the statistics.
-    key_prefix: A string prefix to use for the dict keys storing the
-                extracted stats.
-    log_prefix: A string prefix to use for the info log.
-    include_data: If True includes the raw data in the dictionary,
-                  otherwise just the stats.
-  """
-    num_samples = len(data)
-    results['%snum_samples' % key_prefix] = num_samples
-
-    if not data:
-        return
-
-    data_min = min(data)
-    data_max = max(data)
-    data_mean = statistics.mean(data)
-    data_cdf = extract_cdf(data)
-    data_cdf_decile = extract_cdf_decile(data_cdf)
-
-    results['%smin' % key_prefix] = data_min
-    results['%smax' % key_prefix] = data_max
-    results['%smean' % key_prefix] = data_mean
-    results['%scdf' % key_prefix] = data_cdf
-    results['%scdf_decile' % key_prefix] = data_cdf_decile
-    results['%sraw_data' % key_prefix] = data
-
-    if num_samples > 1:
-        data_stdev = statistics.stdev(data)
-        results['%sstdev' % key_prefix] = data_stdev
-        ad.log.info(
-            '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, stdev=%.2f, cdf_decile=%s',
-            log_prefix, num_samples, data_min, data_max, data_mean, data_stdev,
-            data_cdf_decile)
-    else:
-        ad.log.info(
-            '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, cdf_decile=%s',
-            log_prefix, num_samples, data_min, data_max, data_mean,
-            data_cdf_decile)
-
-
-def extract_cdf_decile(cdf):
-    """Extracts the 10%, 20%, ..., 90% points from the CDF and returns their
-  value (a list of 9 values).
-
-  Since CDF may not (will not) have exact x% value picks the value >= x%.
-
-  Args:
-    cdf: a list of 2 lists, the X and Y of the CDF.
-  """
-    decades = []
-    next_decade = 10
-    for x, y in zip(cdf[0], cdf[1]):
-        while 100 * y >= next_decade:
-            decades.append(x)
-            next_decade = next_decade + 10
-        if next_decade == 100:
-            break
-    return decades
-
-
-def extract_cdf(data):
-    """Calculates the Cumulative Distribution Function (CDF) of the data.
-
-  Args:
-      data: A list containing data (does not have to be sorted).
-
-  Returns: a list of 2 lists: the X and Y axis of the CDF.
-  """
-    x = []
-    cdf = []
-    if not data:
-        return (x, cdf)
-
-    all_values = sorted(data)
-    for val in all_values:
-        if not x:
-            x.append(val)
-            cdf.append(1)
-        else:
-            if x[-1] == val:
-                cdf[-1] += 1
-            else:
-                x.append(val)
-                cdf.append(cdf[-1] + 1)
-
-    scale = 1.0 / len(all_values)
-    for i in range(len(cdf)):
-        cdf[i] = cdf[i] * scale
-
-    return (x, cdf)
-
-
-def get_mac_addr(device, interface):
-    """Get the MAC address of the specified interface. Uses ifconfig and parses
-  its output. Normalizes string to remove ':' and upper case.
-
-  Args:
-    device: Device on which to query the interface MAC address.
-    interface: Name of the interface for which to obtain the MAC address.
-  """
-    out = device.adb.shell("ifconfig %s" % interface)
-    res = re.match(".* HWaddr (\S+).*", out, re.S)
-    asserts.assert_true(res,
-                        'Unable to obtain MAC address for interface %s' %
-                        interface,
-                        extras=out)
-    return res.group(1).upper().replace(':', '')
-
-
-def get_ipv6_addr(device, interface):
-    """Get the IPv6 address of the specified interface. Uses ifconfig and parses
-  its output. Returns a None if the interface does not have an IPv6 address
-  (indicating it is not UP).
-
-  Args:
-    device: Device on which to query the interface IPv6 address.
-    interface: Name of the interface for which to obtain the IPv6 address.
-  """
-    out = device.adb.shell("ifconfig %s" % interface)
-    res = re.match(".*inet6 addr: (\S+)/.*", out, re.S)
-    if not res:
-        return None
-    return res.group(1)
-
-
-def verify_socket_connect(dut_s, dut_c, ipv6_s, ipv6_c, port):
-    """Verify the socket connection between server (dut_s) and client (dut_c)
-    using the given IPv6 addresses.
-
-    Opens a ServerSocket on the server and tries to connect to it
-    from the client.
-
-    Args:
-        dut_s, dut_c: the server and client devices under test (DUTs)
-        ipv6_s, ipv6_c: the scoped link-local addresses of the server and client.
-        port: the port to use
-    Return: True on success, False otherwise
-    """
-    server_sock = None
-    sock_c = None
-    sock_s = None
-    try:
-        server_sock = sutils.open_server_socket(dut_s, ipv6_s, port)
-        port_to_use = port
-        if port == 0:
-            port_to_use = dut_s.droid.getTcpServerSocketPort(server_sock)
-        sock_c, sock_s = sutils.open_connect_socket(dut_c, dut_s, ipv6_c,
-                                                    ipv6_s, 0, port_to_use,
-                                                    server_sock)
-    except:
-        return False
-    finally:
-        if sock_c is not None:
-            sutils.close_socket(dut_c, sock_c)
-        if sock_s is not None:
-            sutils.close_socket(dut_s, sock_s)
-        if server_sock is not None:
-            sutils.close_server_socket(dut_s, server_sock)
-    return True
-
-
-def run_ping6(dut, target_ip, duration=60):
-    """Run ping test and return the latency result
-
-    Args:
-        dut: the dut which run the ping cmd
-        target_ip: target IP Address for ping
-        duration: the duration time of the ping
-
-    return: dict contains "min/avg/max/mdev" result
-    """
-    cmd = "ping6 -w %d %s" % (duration, target_ip)
-    ping_result = dut.adb.shell(cmd, timeout=duration + 1)
-    res = re.match(".*mdev = (\S+) .*", ping_result, re.S)
-    asserts.assert_true(res, "Cannot reach the IP address %s", target_ip)
-    title = ["min", "avg", "max", "mdev"]
-    result = res.group(1).split("/")
-    latency_result = {}
-    for i in range(len(title)):
-        latency_result[title[i]] = result[i]
-    return latency_result
-
-
-def reset_device_parameters(ad):
-    """Reset device configurations.
-
-    Args:
-      ad: device to be reset
-    """
-    ad.adb.shell("cmd wifiaware reset")
-
-
-def reset_device_statistics(ad):
-    """Reset device statistics.
-
-    Args:
-        ad: device to be reset
-    """
-    ad.adb.shell("cmd wifiaware native_cb get_cb_count --reset")
-
-
-def set_power_mode_parameters(ad, power_mode):
-    """Set device power mode.
-
-    Set the power configuration DW parameters for the device based on any
-    configuration overrides (if provided)
-
-    Args:
-        ad: android device
-        power_mode: Desired power mode (INTERACTIVE or NON_INTERACTIVE)
-    """
-    if power_mode == "INTERACTIVE":
-        config_settings_high_power(ad)
-    elif power_mode == "NON_INTERACTIVE":
-        config_settings_low_power(ad)
-    else:
-        asserts.assert_false(
-            "The 'aware_default_power_mode' configuration must be INTERACTIVE or "
-            "NON_INTERACTIVE")
-
-
-#########################################################
-# Aware primitives
-#########################################################
-
-
-def request_network(dut, ns):
-    """Request a Wi-Fi Aware network.
-
-  Args:
-    dut: Device
-    ns: Network specifier
-  Returns: the request key
-  """
-    network_req = {"TransportType": 5, "NetworkSpecifier": ns}
-    return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
-
-
-def get_network_specifier(dut, id, dev_type, peer_mac, sec):
-    """Create a network specifier for the device based on the security
-  configuration.
-
-  Args:
-    dut: device
-    id: session ID
-    dev_type: device type - Initiator or Responder
-    peer_mac: the discovery MAC address of the peer
-    sec: security configuration
-  """
-    if sec is None:
-        return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            id, dev_type, peer_mac)
-    if isinstance(sec, str):
-        return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            id, dev_type, peer_mac, sec)
-    return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-        id, dev_type, peer_mac, None, sec)
-
-
-def configure_power_setting(device, mode, name, value):
-    """Use the command-line API to configure the power setting
-
-  Args:
-    device: Device on which to perform configuration
-    mode: The power mode being set, should be "default", "inactive", or "idle"
-    name: One of the power settings from 'wifiaware set-power'.
-    value: An integer.
-  """
-    device.adb.shell("cmd wifiaware native_api set-power %s %s %d" %
-                     (mode, name, value))
-
-
-def configure_mac_random_interval(device, interval_sec):
-    """Use the command-line API to configure the MAC address randomization
-  interval.
-
-  Args:
-    device: Device on which to perform configuration
-    interval_sec: The MAC randomization interval in seconds. A value of 0
-                  disables all randomization.
-  """
-    device.adb.shell(
-        "cmd wifiaware native_api set mac_random_interval_sec %d" %
-        interval_sec)
-
-
-def configure_ndp_allow_any_override(device, override_api_check):
-    """Use the command-line API to configure whether an NDP Responder may be
-  configured to accept an NDP request from ANY peer.
-
-  By default the target API level of the requesting app determines whether such
-  configuration is permitted. This allows overriding the API check and allowing
-  it.
-
-  Args:
-    device: Device on which to perform configuration.
-    override_api_check: True to allow a Responder to ANY configuration, False to
-                        perform the API level check.
-  """
-    device.adb.shell("cmd wifiaware state_mgr allow_ndp_any %s" %
-                     ("true" if override_api_check else "false"))
-
-
-def config_settings_high_power(device):
-    """Configure device's power settings values to high power mode -
-  whether device is in interactive or non-interactive modes"""
-    configure_power_setting(device, "default", "dw_24ghz",
-                            aconsts.POWER_DW_24_INTERACTIVE)
-    configure_power_setting(device, "default", "dw_5ghz",
-                            aconsts.POWER_DW_5_INTERACTIVE)
-    configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                            aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
-    configure_power_setting(device, "default", "num_ss_in_discovery",
-                            aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
-    configure_power_setting(device, "default", "enable_dw_early_term",
-                            aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
-
-    configure_power_setting(device, "inactive", "dw_24ghz",
-                            aconsts.POWER_DW_24_INTERACTIVE)
-    configure_power_setting(device, "inactive", "dw_5ghz",
-                            aconsts.POWER_DW_5_INTERACTIVE)
-    configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                            aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
-    configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                            aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
-    configure_power_setting(device, "inactive", "enable_dw_early_term",
-                            aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
-
-
-def config_settings_low_power(device):
-    """Configure device's power settings values to low power mode - whether
-  device is in interactive or non-interactive modes"""
-    configure_power_setting(device, "default", "dw_24ghz",
-                            aconsts.POWER_DW_24_NON_INTERACTIVE)
-    configure_power_setting(device, "default", "dw_5ghz",
-                            aconsts.POWER_DW_5_NON_INTERACTIVE)
-    configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                            aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
-    configure_power_setting(device, "default", "num_ss_in_discovery",
-                            aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
-    configure_power_setting(device, "default", "enable_dw_early_term",
-                            aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
-
-    configure_power_setting(device, "inactive", "dw_24ghz",
-                            aconsts.POWER_DW_24_NON_INTERACTIVE)
-    configure_power_setting(device, "inactive", "dw_5ghz",
-                            aconsts.POWER_DW_5_NON_INTERACTIVE)
-    configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                            aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
-    configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                            aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
-    configure_power_setting(device, "inactive", "enable_dw_early_term",
-                            aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
-
-
-def config_power_settings(device,
-                          dw_24ghz,
-                          dw_5ghz,
-                          disc_beacon_interval=None,
-                          num_ss_in_disc=None,
-                          enable_dw_early_term=None):
-    """Configure device's discovery window (DW) values to the specified values -
-  whether the device is in interactive or non-interactive mode.
-
-  Args:
-    dw_24ghz: DW interval in the 2.4GHz band.
-    dw_5ghz: DW interval in the 5GHz band.
-    disc_beacon_interval: The discovery beacon interval (in ms). If None then
-                          not set.
-    num_ss_in_disc: Number of spatial streams to use for discovery. If None then
-                    not set.
-    enable_dw_early_term: If True then enable early termination of the DW. If
-                          None then not set.
-  """
-    configure_power_setting(device, "default", "dw_24ghz", dw_24ghz)
-    configure_power_setting(device, "default", "dw_5ghz", dw_5ghz)
-    configure_power_setting(device, "inactive", "dw_24ghz", dw_24ghz)
-    configure_power_setting(device, "inactive", "dw_5ghz", dw_5ghz)
-
-    if disc_beacon_interval is not None:
-        configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                                disc_beacon_interval)
-        configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                                disc_beacon_interval)
-
-    if num_ss_in_disc is not None:
-        configure_power_setting(device, "default", "num_ss_in_discovery",
-                                num_ss_in_disc)
-        configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                                num_ss_in_disc)
-
-    if enable_dw_early_term is not None:
-        configure_power_setting(device, "default", "enable_dw_early_term",
-                                enable_dw_early_term)
-        configure_power_setting(device, "inactive", "enable_dw_early_term",
-                                enable_dw_early_term)
-
-
-def create_discovery_config(service_name,
-                            d_type,
-                            ssi=None,
-                            match_filter=None,
-                            match_filter_list=None,
-                            ttl=0,
-                            term_cb_enable=True):
-    """Create a publish discovery configuration based on input parameters.
-
-  Args:
-    service_name: Service name - required
-    d_type: Discovery type (publish or subscribe constants)
-    ssi: Supplemental information - defaults to None
-    match_filter, match_filter_list: The match_filter, only one mechanism can
-                                     be used to specify. Defaults to None.
-    ttl: Time-to-live - defaults to 0 (i.e. non-self terminating)
-    term_cb_enable: True (default) to enable callback on termination, False
-                    means that no callback is called when session terminates.
-  Returns:
-    publish discovery configuration object.
-  """
-    config = {}
-    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
-    config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = d_type
-    if ssi is not None:
-        config[aconsts.DISCOVERY_KEY_SSI] = ssi
-    if match_filter is not None:
-        config[aconsts.DISCOVERY_KEY_MATCH_FILTER] = match_filter
-    if match_filter_list is not None:
-        config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = match_filter_list
-    config[aconsts.DISCOVERY_KEY_TTL] = ttl
-    config[aconsts.DISCOVERY_KEY_TERM_CB_ENABLED] = term_cb_enable
-    return config
-
-
-def add_ranging_to_pub(p_config, enable_ranging):
-    """Add ranging enabled configuration to a publish configuration (only relevant
-  for publish configuration).
-
-  Args:
-    p_config: The Publish discovery configuration.
-    enable_ranging: True to enable ranging, False to disable.
-  Returns:
-    The modified publish configuration.
-  """
-    p_config[aconsts.DISCOVERY_KEY_RANGING_ENABLED] = enable_ranging
-    return p_config
-
-
-def add_ranging_to_sub(s_config, min_distance_mm, max_distance_mm):
-    """Add ranging distance configuration to a subscribe configuration (only
-  relevant to a subscribe configuration).
-
-  Args:
-    s_config: The Subscribe discovery configuration.
-    min_distance_mm, max_distance_mm: The min and max distance specification.
-                                      Used if not None.
-  Returns:
-    The modified subscribe configuration.
-  """
-    if min_distance_mm is not None:
-        s_config[aconsts.DISCOVERY_KEY_MIN_DISTANCE_MM] = min_distance_mm
-    if max_distance_mm is not None:
-        s_config[aconsts.DISCOVERY_KEY_MAX_DISTANCE_MM] = max_distance_mm
-    return s_config
-
-
-def attach_with_identity(dut):
-    """Start an Aware session (attach) and wait for confirmation and identity
-  information (mac address).
-
-  Args:
-    dut: Device under test
-  Returns:
-    id: Aware session ID.
-    mac: Discovery MAC address of this device.
-  """
-    id = dut.droid.wifiAwareAttach(True)
-    wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
-    event = wait_for_event(dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    mac = event["data"]["mac"]
-
-    return id, mac
-
-
-def create_discovery_pair(p_dut,
-                          s_dut,
-                          p_config,
-                          s_config,
-                          device_startup_offset,
-                          msg_id=None):
-    """Creates a discovery session (publish and subscribe), and waits for
-  service discovery - at that point the sessions are connected and ready for
-  further messaging of data-path setup.
-
-  Args:
-    p_dut: Device to use as publisher.
-    s_dut: Device to use as subscriber.
-    p_config: Publish configuration.
-    s_config: Subscribe configuration.
-    device_startup_offset: Number of seconds to offset the enabling of NAN on
-                           the two devices.
-    msg_id: Controls whether a message is sent from Subscriber to Publisher
-            (so that publisher has the sub's peer ID). If None then not sent,
-            otherwise should be an int for the message id.
-  Returns: variable size list of:
-    p_id: Publisher attach session id
-    s_id: Subscriber attach session id
-    p_disc_id: Publisher discovery session id
-    s_disc_id: Subscriber discovery session id
-    peer_id_on_sub: Peer ID of the Publisher as seen on the Subscriber
-    peer_id_on_pub: Peer ID of the Subscriber as seen on the Publisher. Only
-                    included if |msg_id| is not None.
-  """
-    p_dut.pretty_name = 'Publisher'
-    s_dut.pretty_name = 'Subscriber'
-
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach()
-    wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach()
-    wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
-
-    # Publisher: start publish and wait for confirmation
-    p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
-    wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-
-    # Subscriber: start subscribe and wait for confirmation
-    s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
-    wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
-
-    # Subscriber: wait for service discovery
-    discovery_event = wait_for_event(s_dut,
-                                     aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-    peer_id_on_sub = discovery_event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
-
-    # Optionally send a message from Subscriber to Publisher
-    if msg_id is not None:
-        ping_msg = 'PING'
-
-        # Subscriber: send message to peer (Publisher)
-        s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, msg_id,
-                                         ping_msg, aconsts.MAX_TX_RETRIES)
-        sub_tx_msg_event = wait_for_event(s_dut,
-                                          aconsts.SESSION_CB_ON_MESSAGE_SENT)
-        asserts.assert_equal(
-            msg_id,
-            sub_tx_msg_event['data'][aconsts.SESSION_CB_KEY_MESSAGE_ID],
-            'Subscriber -> Publisher message ID corrupted')
-
-        # Publisher: wait for received message
-        pub_rx_msg_event = wait_for_event(
-            p_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-        peer_id_on_pub = pub_rx_msg_event['data'][
-            aconsts.SESSION_CB_KEY_PEER_ID]
-        asserts.assert_equal(
-            ping_msg,
-            pub_rx_msg_event['data'][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
-            'Subscriber -> Publisher message corrupted')
-        return p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub, peer_id_on_pub
-
-    return p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub
-
-
-def create_ib_ndp(p_dut, s_dut, p_config, s_config, device_startup_offset):
-    """Create an NDP (using in-band discovery)
-
-  Args:
-    p_dut: Device to use as publisher.
-    s_dut: Device to use as subscriber.
-    p_config: Publish configuration.
-    s_config: Subscribe configuration.
-    device_startup_offset: Number of seconds to offset the enabling of NAN on
-                           the two devices.
-  """
-    (p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
-     peer_id_on_pub) = create_discovery_pair(p_dut,
-                                             s_dut,
-                                             p_config,
-                                             s_config,
-                                             device_startup_offset,
-                                             msg_id=9999)
-
-    # Publisher: request network
-    p_req_key = request_network(
-        p_dut,
-        p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, peer_id_on_pub,
-                                                    None))
-
-    # Subscriber: request network
-    s_req_key = request_network(
-        s_dut,
-        s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id, peer_id_on_sub,
-                                                    None))
-
-    # Publisher & Subscriber: wait for network formation
-    p_net_event_nc = wait_for_event_with_keys(
-        p_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_CAPABILITIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-    s_net_event_nc = wait_for_event_with_keys(
-        s_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_CAPABILITIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, s_req_key))
-
-    # validate no leak of information
-    asserts.assert_false(
-        cconsts.NETWORK_CB_KEY_NETWORK_SPECIFIER in p_net_event_nc["data"],
-        "Network specifier leak!")
-    asserts.assert_false(
-        cconsts.NETWORK_CB_KEY_NETWORK_SPECIFIER in s_net_event_nc["data"],
-        "Network specifier leak!")
-
-    # note that Pub <-> Sub since IPv6 are of peer's!
-    p_ipv6 = s_net_event_nc["data"][aconsts.NET_CAP_IPV6]
-    s_ipv6 = p_net_event_nc["data"][aconsts.NET_CAP_IPV6]
-
-    p_net_event_lp = wait_for_event_with_keys(
-        p_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-    s_net_event_lp = wait_for_event_with_keys(
-        s_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, s_req_key))
-
-    p_aware_if = p_net_event_lp["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    s_aware_if = s_net_event_lp["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-
-    return p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6, s_ipv6
-
-
-def create_oob_ndp_on_sessions(init_dut, resp_dut, init_id, init_mac, resp_id,
-                               resp_mac):
-    """Create an NDP on top of existing Aware sessions (using OOB discovery)
-
-  Args:
-    init_dut: Initiator device
-    resp_dut: Responder device
-    init_id: Initiator attach session id
-    init_mac: Initiator discovery MAC address
-    resp_id: Responder attach session id
-    resp_mac: Responder discovery MAC address
-  Returns:
-    init_req_key: Initiator network request
-    resp_req_key: Responder network request
-    init_aware_if: Initiator Aware data interface
-    resp_aware_if: Responder Aware data interface
-    init_ipv6: Initiator IPv6 address
-    resp_ipv6: Responder IPv6 address
-  """
-    # Responder: request network
-    resp_req_key = request_network(
-        resp_dut,
-        resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
-
-    # Initiator: request network
-    init_req_key = request_network(
-        init_dut,
-        init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
-
-    # Initiator & Responder: wait for network formation
-    init_net_event_nc = wait_for_event_with_keys(
-        init_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_CAPABILITIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-    resp_net_event_nc = wait_for_event_with_keys(
-        resp_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_CAPABILITIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
-
-    # validate no leak of information
-    asserts.assert_false(
-        cconsts.NETWORK_CB_KEY_NETWORK_SPECIFIER in init_net_event_nc["data"],
-        "Network specifier leak!")
-    asserts.assert_false(
-        cconsts.NETWORK_CB_KEY_NETWORK_SPECIFIER in resp_net_event_nc["data"],
-        "Network specifier leak!")
-
-    # note that Init <-> Resp since IPv6 are of peer's!
-    resp_ipv6 = init_net_event_nc["data"][aconsts.NET_CAP_IPV6]
-    init_ipv6 = resp_net_event_nc["data"][aconsts.NET_CAP_IPV6]
-
-    init_net_event_lp = wait_for_event_with_keys(
-        init_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-    resp_net_event_lp = wait_for_event_with_keys(
-        resp_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
-
-    init_aware_if = init_net_event_lp['data'][
-        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    resp_aware_if = resp_net_event_lp['data'][
-        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-
-    return (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
-            init_ipv6, resp_ipv6)
-
-
-def create_oob_ndp(init_dut, resp_dut):
-    """Create an NDP (using OOB discovery)
-
-  Args:
-    init_dut: Initiator device
-    resp_dut: Responder device
-  """
-    init_dut.pretty_name = 'Initiator'
-    resp_dut.pretty_name = 'Responder'
-
-    # Initiator+Responder: attach and wait for confirmation & identity
-    init_id = init_dut.droid.wifiAwareAttach(True)
-    wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    init_ident_event = wait_for_event(init_dut,
-                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    init_mac = init_ident_event['data']['mac']
-    resp_id = resp_dut.droid.wifiAwareAttach(True)
-    wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    resp_ident_event = wait_for_event(resp_dut,
-                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    resp_mac = resp_ident_event['data']['mac']
-
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(WAIT_FOR_CLUSTER)
-
-    (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-     resp_ipv6) = create_oob_ndp_on_sessions(init_dut, resp_dut, init_id,
-                                             init_mac, resp_id, resp_mac)
-
-    return (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
-            init_ipv6, resp_ipv6)
diff --git a/src/antlion/test_utils/wifi/ota_chamber.py b/src/antlion/test_utils/wifi/ota_chamber.py
deleted file mode 100644
index 215c349..0000000
--- a/src/antlion/test_utils/wifi/ota_chamber.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import contextlib
-import io
-import serial
-import time
-from antlion import logger
-from antlion import utils
-
-SHORT_SLEEP = 1
-CHAMBER_SLEEP = 30
-
-
-def create(configs):
-    """Factory method for OTA chambers.
-
-    Args:
-        configs: list of dicts with chamber settings. settings must contain the
-        following: type (string denoting type of chamber)
-    """
-    objs = []
-    for config in configs:
-        try:
-            chamber_class = globals()[config['model']]
-        except KeyError:
-            raise KeyError('Invalid chamber configuration.')
-        objs.append(chamber_class(config))
-    return objs
-
-
-def detroy(objs):
-    return
-
-
-class OtaChamber(object):
-    """Base class implementation for OTA chamber.
-
-    Base class provides functions whose implementation is shared by all
-    chambers.
-    """
-    def reset_chamber(self):
-        """Resets the chamber to its zero/home state."""
-        raise NotImplementedError
-
-    def set_orientation(self, orientation):
-        """Set orientation for turn table in OTA chamber.
-
-        Args:
-            angle: desired turn table orientation in degrees
-        """
-        raise NotImplementedError
-
-    def set_stirrer_pos(self, stirrer_id, position):
-        """Starts turntables and stirrers in OTA chamber."""
-        raise NotImplementedError
-
-    def start_continuous_stirrers(self):
-        """Starts turntables and stirrers in OTA chamber."""
-        raise NotImplementedError
-
-    def stop_continuous_stirrers(self):
-        """Stops turntables and stirrers in OTA chamber."""
-        raise NotImplementedError
-
-    def step_stirrers(self, steps):
-        """Move stepped stirrers in OTA chamber to next step."""
-        raise NotImplementedError
-
-
-class MockChamber(OtaChamber):
-    """Class that implements mock chamber for test development and debug."""
-    def __init__(self, config):
-        self.config = config.copy()
-        self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger('OtaChamber|{}'.format(
-            self.device_id))
-        self.current_mode = None
-        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3', '6GHz']
-
-    def set_orientation(self, orientation):
-        self.log.info('Setting orientation to {} degrees.'.format(orientation))
-
-    def reset_chamber(self):
-        self.log.info('Resetting chamber to home state')
-
-    def set_stirrer_pos(self, stirrer_id, position):
-        """Starts turntables and stirrers in OTA chamber."""
-        self.log.info('Setting stirrer {} to {}.'.format(stirrer_id, position))
-
-    def start_continuous_stirrers(self):
-        """Starts turntables and stirrers in OTA chamber."""
-        self.log.info('Starting continuous stirrer motion')
-
-    def stop_continuous_stirrers(self):
-        """Stops turntables and stirrers in OTA chamber."""
-        self.log.info('Stopping continuous stirrer motion')
-
-    def configure_stepped_stirrers(self, steps):
-        """Programs parameters for stepped stirrers in OTA chamber."""
-        self.log.info('Configuring stepped stirrers')
-
-    def step_stirrers(self, steps):
-        """Move stepped stirrers in OTA chamber to next step."""
-        self.log.info('Moving stirrers to the next step')
-
-
-class OctoboxChamber(OtaChamber):
-    """Class that implements Octobox chamber."""
-    def __init__(self, config):
-        self.config = config.copy()
-        self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger('OtaChamber|{}'.format(
-            self.device_id))
-        self.TURNTABLE_FILE_PATH = '/usr/local/bin/fnPerformaxCmd'
-        utils.exe_cmd('sudo {} -d {} -i 0'.format(self.TURNTABLE_FILE_PATH,
-                                                  self.device_id))
-        self.current_mode = None
-        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3', '6GHz']
-
-    def set_orientation(self, orientation):
-        self.log.info('Setting orientation to {} degrees.'.format(orientation))
-        utils.exe_cmd('sudo {} -d {} -p {}'.format(self.TURNTABLE_FILE_PATH,
-                                                   self.device_id,
-                                                   orientation))
-
-    def reset_chamber(self):
-        self.log.info('Resetting chamber to home state')
-        self.set_orientation(0)
-
-
-class ChamberAutoConnect(object):
-    def __init__(self, chamber, chamber_config):
-        self._chamber = chamber
-        self._config = chamber_config
-
-    def __getattr__(self, item):
-        def chamber_call(*args, **kwargs):
-            self._chamber.connect(self._config['ip_address'],
-                                  self._config['username'],
-                                  self._config['password'])
-            return getattr(self._chamber, item)(*args, **kwargs)
-
-        return chamber_call
-
-
-class BluetestChamber(OtaChamber):
-    """Class that implements Octobox chamber."""
-    def __init__(self, config):
-        import flow
-        self.config = config.copy()
-        self.log = logger.create_tagged_trace_logger('OtaChamber|{}'.format(
-            self.config['ip_address']))
-        self.chamber = ChamberAutoConnect(flow.Flow(), self.config)
-        self.stirrer_ids = [0, 1, 2]
-        self.current_mode = None
-        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3']
-
-    # Capture print output decorator
-    @staticmethod
-    def _capture_output(func, *args, **kwargs):
-        """Creates a decorator to capture stdout from bluetest module"""
-        f = io.StringIO()
-        with contextlib.redirect_stdout(f):
-            func(*args, **kwargs)
-        output = f.getvalue()
-        return output
-
-    def _connect(self):
-        self.chamber.connect(self.config['ip_address'],
-                             self.config['username'], self.config['password'])
-
-    def _init_manual_mode(self):
-        self.current_mode = 'manual'
-        for stirrer_id in self.stirrer_ids:
-            out = self._capture_output(
-                self.chamber.chamber_stirring_manual_init, stirrer_id)
-            if "failed" in out:
-                self.log.warning("Initialization error: {}".format(out))
-        time.sleep(CHAMBER_SLEEP)
-
-    def _init_continuous_mode(self):
-        self.current_mode = 'continuous'
-        self.chamber.chamber_stirring_continuous_init()
-
-    def _init_stepped_mode(self, steps):
-        self.current_mode = 'stepped'
-        self.current_stepped_pos = 0
-        self.chamber.chamber_stirring_stepped_init(steps, False)
-
-    def set_stirrer_pos(self, stirrer_id, position):
-        if self.current_mode != 'manual':
-            self._init_manual_mode()
-        self.log.info('Setting stirrer {} to {}.'.format(stirrer_id, position))
-        out = self._capture_output(
-            self.chamber.chamber_stirring_manual_set_pos, stirrer_id, position)
-        if "failed" in out:
-            self.log.warning("Bluetest error: {}".format(out))
-            self.log.warning("Set position failed. Retrying.")
-            self.current_mode = None
-            self.set_stirrer_pos(stirrer_id, position)
-        else:
-            self._capture_output(self.chamber.chamber_stirring_manual_wait,
-                                 CHAMBER_SLEEP)
-            self.log.warning('Stirrer {} at {}.'.format(stirrer_id, position))
-
-    def set_orientation(self, orientation):
-        self.set_stirrer_pos(2, orientation * 100 / 360)
-
-    def start_continuous_stirrers(self):
-        if self.current_mode != 'continuous':
-            self._init_continuous_mode()
-        self.chamber.chamber_stirring_continuous_start()
-
-    def stop_continuous_stirrers(self):
-        self.chamber.chamber_stirring_continuous_stop()
-
-    def step_stirrers(self, steps):
-        if self.current_mode != 'stepped':
-            self._init_stepped_mode(steps)
-        if self.current_stepped_pos == 0:
-            self.current_stepped_pos += 1
-            return
-        self.current_stepped_pos += 1
-        self.chamber.chamber_stirring_stepped_next_pos()
-
-    def reset_chamber(self):
-        if self.current_mode == 'continuous':
-            self._init_continuous_mode()
-            time.sleep(SHORT_SLEEP)
-            self._init_continuous_mode()
-        else:
-            self._init_manual_mode()
-
-
-class EInstrumentChamber(OtaChamber):
-    """Class that implements Einstrument Chamber."""
-    def __init__(self, config):
-        self.config = config.copy()
-        self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger(
-            'EInstrumentChamber|{}'.format(self.device_id))
-        self.current_mode = None
-        self.ser = self._get_serial(config['port'])
-
-    def _get_serial(self, port, baud=9600):
-        """Read com port.
-
-        Args:
-            port: turn table com port
-            baud: baud rate
-        """
-        ser = serial.Serial(port, baud)
-        return ser
-
-    def set_orientation(self, orientation):
-        if int(orientation) > 360:
-            orientation = int(orientation) % 360
-        elif int(orientation) < 0:
-            orientation = 0
-        self.log.info('Setting orientation to {} degrees.'.format(orientation))
-        orientation = str('DG') + str(orientation) + str(';')
-        self.ser.write(orientation.encode())
-        return orientation
-
-    def reset_chamber(self):
-        self.log.info('Resetting turn table to zero degree')
-        self.set_orientation(0)
diff --git a/src/antlion/test_utils/wifi/ota_sniffer.py b/src/antlion/test_utils/wifi/ota_sniffer.py
deleted file mode 100644
index 4200d24..0000000
--- a/src/antlion/test_utils/wifi/ota_sniffer.py
+++ /dev/null
@@ -1,601 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import csv
-import os
-import posixpath
-import time
-import antlion.test_utils.wifi.wifi_test_utils as wutils
-
-from antlion import context
-from antlion import logger
-from antlion import utils
-from antlion.controllers.utils_lib import ssh
-
-WifiEnums = wutils.WifiEnums
-SNIFFER_TIMEOUT = 6
-
-
-def create(configs):
-    """Factory method for sniffer.
-    Args:
-        configs: list of dicts with sniffer settings.
-        Settings must contain the following : ssh_settings, type, OS, interface.
-
-    Returns:
-        objs: list of sniffer class objects.
-    """
-    objs = []
-    for config in configs:
-        try:
-            if config['type'] == 'tshark':
-                if config['os'] == 'unix':
-                    objs.append(TsharkSnifferOnUnix(config))
-                elif config['os'] == 'linux':
-                    objs.append(TsharkSnifferOnLinux(config))
-                else:
-                    raise RuntimeError('Wrong sniffer config')
-
-            elif config['type'] == 'mock':
-                objs.append(MockSniffer(config))
-        except KeyError:
-            raise KeyError('Invalid sniffer configurations')
-        return objs
-
-
-def destroy(objs):
-    return
-
-
-class OtaSnifferBase(object):
-    """Base class defining common sniffers functions."""
-
-    _log_file_counter = 0
-
-    @property
-    def started(self):
-        raise NotImplementedError('started must be specified.')
-
-    def start_capture(self, network, duration=30):
-        """Starts the sniffer Capture.
-
-        Args:
-            network: dict containing network information such as SSID, etc.
-            duration: duration of sniffer capture in seconds.
-        """
-        raise NotImplementedError('start_capture must be specified.')
-
-    def stop_capture(self, tag=''):
-        """Stops the sniffer Capture.
-
-        Args:
-            tag: string to tag sniffer capture file name with.
-        """
-        raise NotImplementedError('stop_capture must be specified.')
-
-    def _get_remote_dump_path(self):
-        """Returns name of the sniffer dump file."""
-        remote_file_name = 'sniffer_dump.{}'.format(
-            self.sniffer_output_file_type)
-        remote_dump_path = posixpath.join(posixpath.sep, 'tmp',
-                                          remote_file_name)
-        return remote_dump_path
-
-    def _get_full_file_path(self, tag=None):
-        """Returns the full file path for the sniffer capture dump file.
-
-        Returns the full file path (on test machine) for the sniffer capture
-        dump file.
-
-        Args:
-            tag: The tag appended to the sniffer capture dump file .
-        """
-        tags = [tag, 'count', OtaSnifferBase._log_file_counter]
-        out_file_name = 'Sniffer_Capture_%s.%s' % ('_'.join([
-            str(x) for x in tags if x != '' and x is not None
-        ]), self.sniffer_output_file_type)
-        OtaSnifferBase._log_file_counter += 1
-
-        file_path = os.path.join(self.log_path, out_file_name)
-        return file_path
-
-    @property
-    def log_path(self):
-        current_context = context.get_current_context()
-        full_out_dir = os.path.join(current_context.get_full_output_path(),
-                                    'sniffer_captures')
-
-        # Ensure the directory exists.
-        os.makedirs(full_out_dir, exist_ok=True)
-
-        return full_out_dir
-
-
-class MockSniffer(OtaSnifferBase):
-    """Class that implements mock sniffer for test development and debug."""
-    def __init__(self, config):
-        self.log = logger.create_tagged_trace_logger('Mock Sniffer')
-
-    def start_capture(self, network, duration=30):
-        """Starts sniffer capture on the specified machine.
-
-        Args:
-            network: dict of network credentials.
-            duration: duration of the sniff.
-        """
-        self.log.debug('Starting sniffer.')
-
-    def stop_capture(self):
-        """Stops the sniffer.
-
-        Returns:
-            log_file: name of processed sniffer.
-        """
-
-        self.log.debug('Stopping sniffer.')
-        log_file = self._get_full_file_path()
-        with open(log_file, 'w') as file:
-            file.write('this is a sniffer dump.')
-        return log_file
-
-
-class TsharkSnifferBase(OtaSnifferBase):
-    """Class that implements Tshark based sniffer controller. """
-
-    TYPE_SUBTYPE_DICT = {
-        '0': 'Association Requests',
-        '1': 'Association Responses',
-        '2': 'Reassociation Requests',
-        '3': 'Resssociation Responses',
-        '4': 'Probe Requests',
-        '5': 'Probe Responses',
-        '8': 'Beacon',
-        '9': 'ATIM',
-        '10': 'Disassociations',
-        '11': 'Authentications',
-        '12': 'Deauthentications',
-        '13': 'Actions',
-        '24': 'Block ACK Requests',
-        '25': 'Block ACKs',
-        '26': 'PS-Polls',
-        '27': 'RTS',
-        '28': 'CTS',
-        '29': 'ACK',
-        '30': 'CF-Ends',
-        '31': 'CF-Ends/CF-Acks',
-        '32': 'Data',
-        '33': 'Data+CF-Ack',
-        '34': 'Data+CF-Poll',
-        '35': 'Data+CF-Ack+CF-Poll',
-        '36': 'Null',
-        '37': 'CF-Ack',
-        '38': 'CF-Poll',
-        '39': 'CF-Ack+CF-Poll',
-        '40': 'QoS Data',
-        '41': 'QoS Data+CF-Ack',
-        '42': 'QoS Data+CF-Poll',
-        '43': 'QoS Data+CF-Ack+CF-Poll',
-        '44': 'QoS Null',
-        '46': 'QoS CF-Poll (Null)',
-        '47': 'QoS CF-Ack+CF-Poll (Null)'
-    }
-
-    TSHARK_COLUMNS = [
-        'frame_number', 'frame_time_relative', 'mactime', 'frame_len', 'rssi',
-        'channel', 'ta', 'ra', 'bssid', 'type', 'subtype', 'duration', 'seq',
-        'retry', 'pwrmgmt', 'moredata', 'ds', 'phy', 'radio_datarate',
-        'vht_datarate', 'radiotap_mcs_index', 'vht_mcs', 'wlan_data_rate',
-        '11n_mcs_index', '11ac_mcs', '11n_bw', '11ac_bw', 'vht_nss', 'mcs_gi',
-        'vht_gi', 'vht_coding', 'ba_bm', 'fc_status', 'bf_report'
-    ]
-
-    TSHARK_OUTPUT_COLUMNS = [
-        'frame_number', 'frame_time_relative', 'mactime', 'ta', 'ra', 'bssid',
-        'rssi', 'channel', 'frame_len', 'Info', 'radio_datarate',
-        'radiotap_mcs_index', 'pwrmgmt', 'phy', 'vht_nss', 'vht_mcs',
-        'vht_datarate', '11ac_mcs', '11ac_bw', 'vht_gi', 'vht_coding',
-        'wlan_data_rate', '11n_mcs_index', '11n_bw', 'mcs_gi', 'type',
-        'subtype', 'duration', 'seq', 'retry', 'moredata', 'ds', 'ba_bm',
-        'fc_status', 'bf_report'
-    ]
-
-    TSHARK_FIELDS_LIST = [
-        'frame.number', 'frame.time_relative', 'radiotap.mactime', 'frame.len',
-        'radiotap.dbm_antsignal', 'wlan_radio.channel', 'wlan.ta', 'wlan.ra',
-        'wlan.bssid', 'wlan.fc.type', 'wlan.fc.type_subtype', 'wlan.duration',
-        'wlan.seq', 'wlan.fc.retry', 'wlan.fc.pwrmgt', 'wlan.fc.moredata',
-        'wlan.fc.ds', 'wlan_radio.phy', 'radiotap.datarate',
-        'radiotap.vht.datarate.0', 'radiotap.mcs.index', 'radiotap.vht.mcs.0',
-        'wlan_radio.data_rate', 'wlan_radio.11n.mcs_index',
-        'wlan_radio.11ac.mcs', 'wlan_radio.11n.bandwidth',
-        'wlan_radio.11ac.bandwidth', 'radiotap.vht.nss.0', 'radiotap.mcs.gi',
-        'radiotap.vht.gi', 'radiotap.vht.coding.0', 'wlan.ba.bm',
-        'wlan.fcs.status', 'wlan.vht.compressed_beamforming_report.snr'
-    ]
-
-    def __init__(self, config):
-        self.sniffer_proc_pid = None
-        self.log = logger.create_tagged_trace_logger('Tshark Sniffer')
-        self.ssh_config = config['ssh_config']
-        self.sniffer_os = config['os']
-        self.run_as_sudo = config.get('run_as_sudo', False)
-        self.sniffer_output_file_type = config['output_file_type']
-        self.sniffer_snap_length = config['snap_length']
-        self.sniffer_interface = config['interface']
-        self.sniffer_disabled = False
-
-        #Logging into sniffer
-        self.log.info('Logging into sniffer.')
-        self._sniffer_server = ssh.connection.SshConnection(
-            ssh.settings.from_config(self.ssh_config))
-        # Get tshark params
-        self.tshark_fields = self._generate_tshark_fields(
-            self.TSHARK_FIELDS_LIST)
-        self.tshark_path = self._sniffer_server.run('which tshark').stdout
-
-    @property
-    def _started(self):
-        return self.sniffer_proc_pid is not None
-
-    def _scan_for_networks(self):
-        """Scans for wireless networks on the sniffer."""
-        raise NotImplementedError
-
-    def _get_tshark_command(self, duration):
-        """Frames the appropriate tshark command.
-
-        Args:
-            duration: duration to sniff for.
-
-        Returns:
-            tshark_command : appropriate tshark command.
-        """
-        tshark_command = '{} -l -i {} -I -t u -a duration:{}'.format(
-            self.tshark_path, self.sniffer_interface, int(duration))
-        if self.run_as_sudo:
-            tshark_command = 'sudo {}'.format(tshark_command)
-
-        return tshark_command
-
-    def _get_sniffer_command(self, tshark_command):
-        """
-        Frames the appropriate sniffer command.
-
-        Args:
-            tshark_command: framed tshark command
-
-        Returns:
-            sniffer_command: appropriate sniffer command
-        """
-        if self.sniffer_output_file_type in ['pcap', 'pcapng']:
-            sniffer_command = ' {tshark} -s {snaplength} -w {log_file} '.format(
-                tshark=tshark_command,
-                snaplength=self.sniffer_snap_length,
-                log_file=self._get_remote_dump_path())
-
-        elif self.sniffer_output_file_type == 'csv':
-            sniffer_command = '{tshark} {fields} > {log_file}'.format(
-                tshark=tshark_command,
-                fields=self.tshark_fields,
-                log_file=self._get_remote_dump_path())
-
-        else:
-            raise KeyError('Sniffer output file type not configured correctly')
-
-        return sniffer_command
-
-    def _generate_tshark_fields(self, fields):
-        """Generates tshark fields to be appended to the tshark command.
-
-        Args:
-            fields: list of tshark fields to be appended to the tshark command.
-
-        Returns:
-            tshark_fields: string of tshark fields to be appended
-            to the tshark command.
-        """
-        tshark_fields = "-T fields -y IEEE802_11_RADIO -E separator='^'"
-        for field in fields:
-            tshark_fields = tshark_fields + ' -e {}'.format(field)
-        return tshark_fields
-
-    def _configure_sniffer(self, network, chan, bw):
-        """ Connects to a wireless network using networksetup utility.
-
-        Args:
-            network: dictionary of network credentials; SSID and password.
-        """
-        raise NotImplementedError
-
-    def _run_tshark(self, sniffer_command):
-        """Starts the sniffer.
-
-        Args:
-            sniffer_command: sniffer command to execute.
-        """
-        self.log.debug('Starting sniffer.')
-        sniffer_job = self._sniffer_server.run_async(sniffer_command)
-        self.sniffer_proc_pid = sniffer_job.stdout
-
-    def _stop_tshark(self):
-        """ Stops the sniffer."""
-        self.log.debug('Stopping sniffer')
-
-        # while loop to kill the sniffer process
-        stop_time = time.time() + SNIFFER_TIMEOUT
-        while time.time() < stop_time:
-            # Wait before sending more kill signals
-            time.sleep(0.1)
-            try:
-                # Returns 1 if process was killed
-                self._sniffer_server.run(
-                    'ps aux| grep {} | grep -v grep'.format(
-                        self.sniffer_proc_pid))
-            except:
-                return
-            try:
-                # Returns error if process was killed already
-                self._sniffer_server.run('sudo kill -15 {}'.format(
-                    str(self.sniffer_proc_pid)))
-            except:
-                # Except is hit when tshark is already dead but we will break
-                # out of the loop when confirming process is dead using ps aux
-                pass
-        self.log.warning('Could not stop sniffer. Trying with SIGKILL.')
-        try:
-            self.log.debug('Killing sniffer with SIGKILL.')
-            self._sniffer_server.run('sudo kill -9 {}'.format(
-                str(self.sniffer_proc_pid)))
-        except:
-            self.log.debug('Sniffer process may have stopped succesfully.')
-
-    def _process_tshark_dump(self, log_file):
-        """ Process tshark dump for better readability.
-
-        Processes tshark dump for better readability and saves it to a file.
-        Adds an info column at the end of each row. Format of the info columns:
-        subtype of the frame, sequence no and retry status.
-
-        Args:
-            log_file : unprocessed sniffer output
-        Returns:
-            log_file : processed sniffer output
-        """
-        temp_dump_file = os.path.join(self.log_path, 'sniffer_temp_dump.csv')
-        utils.exe_cmd('cp {} {}'.format(log_file, temp_dump_file))
-
-        with open(temp_dump_file, 'r') as input_csv, open(log_file,
-                                                          'w') as output_csv:
-            reader = csv.DictReader(input_csv,
-                                    fieldnames=self.TSHARK_COLUMNS,
-                                    delimiter='^')
-            writer = csv.DictWriter(output_csv,
-                                    fieldnames=self.TSHARK_OUTPUT_COLUMNS,
-                                    delimiter='\t')
-            writer.writeheader()
-            for row in reader:
-                if row['subtype'] in self.TYPE_SUBTYPE_DICT:
-                    row['Info'] = '{sub} S={seq} retry={retry_status}'.format(
-                        sub=self.TYPE_SUBTYPE_DICT[row['subtype']],
-                        seq=row['seq'],
-                        retry_status=row['retry'])
-                else:
-                    row['Info'] = '{} S={} retry={}\n'.format(
-                        row['subtype'], row['seq'], row['retry'])
-                writer.writerow(row)
-
-        utils.exe_cmd('rm -f {}'.format(temp_dump_file))
-        return log_file
-
-    def start_capture(self, network, chan, bw, duration=60):
-        """Starts sniffer capture on the specified machine.
-
-        Args:
-            network: dict describing network to sniff on.
-            duration: duration of sniff.
-        """
-        # Checking for existing sniffer processes
-        if self._started:
-            self.log.debug('Sniffer already running')
-            return
-
-        # Configure sniffer
-        self._configure_sniffer(network, chan, bw)
-        tshark_command = self._get_tshark_command(duration)
-        sniffer_command = self._get_sniffer_command(tshark_command)
-
-        # Starting sniffer capture by executing tshark command
-        self._run_tshark(sniffer_command)
-
-    def stop_capture(self, tag=''):
-        """Stops the sniffer.
-
-        Args:
-            tag: tag to be appended to the sniffer output file.
-        Returns:
-            log_file: path to sniffer dump.
-        """
-        # Checking if there is an ongoing sniffer capture
-        if not self._started:
-            self.log.debug('No sniffer process running')
-            return
-        # Killing sniffer process
-        self._stop_tshark()
-
-        # Processing writing capture output to file
-        log_file = self._get_full_file_path(tag)
-        self._sniffer_server.run('sudo chmod 777 {}'.format(
-            self._get_remote_dump_path()))
-        self._sniffer_server.pull_file(log_file, self._get_remote_dump_path())
-
-        if self.sniffer_output_file_type == 'csv':
-            log_file = self._process_tshark_dump(log_file)
-
-        self.sniffer_proc_pid = None
-        return log_file
-
-
-class TsharkSnifferOnUnix(TsharkSnifferBase):
-    """Class that implements Tshark based sniffer controller on Unix systems."""
-    def _scan_for_networks(self):
-        """Scans the wireless networks on the sniffer.
-
-        Returns:
-            scan_results : output of the scan command.
-        """
-        scan_command = '/usr/local/bin/airport -s'
-        scan_result = self._sniffer_server.run(scan_command).stdout
-
-        return scan_result
-
-    def _configure_sniffer(self, network, chan, bw):
-        """Connects to a wireless network using networksetup utility.
-
-        Args:
-            network: dictionary of network credentials; SSID and password.
-        """
-
-        self.log.debug('Connecting to network {}'.format(network['SSID']))
-
-        if 'password' not in network:
-            network['password'] = ''
-
-        connect_command = 'networksetup -setairportnetwork en0 {} {}'.format(
-            network['SSID'], network['password'])
-        self._sniffer_server.run(connect_command)
-
-
-class TsharkSnifferOnLinux(TsharkSnifferBase):
-    """Class that implements Tshark based sniffer controller on Linux."""
-    def __init__(self, config):
-        super().__init__(config)
-        self._init_sniffer()
-        self.channel = None
-        self.bandwidth = None
-
-    def _init_sniffer(self):
-        """Function to configure interface for the first time"""
-        self._sniffer_server.run('sudo modprobe -r iwlwifi')
-        self._sniffer_server.run('sudo dmesg -C')
-        self._sniffer_server.run('cat /dev/null | sudo tee /var/log/syslog')
-        self._sniffer_server.run('sudo modprobe iwlwifi debug=0x1')
-        # Wait for wifi config changes before trying to further configuration
-        # e.g. setting monitor mode (which will fail if above is not complete)
-        time.sleep(1)
-
-    def start_capture(self, network, chan, bw, duration=60):
-        """Starts sniffer capture on the specified machine.
-
-        Args:
-            network: dict describing network to sniff on.
-            duration: duration of sniff.
-        """
-        # If sniffer doesnt support the channel, return
-        if '6g' in str(chan):
-            self.log.debug('Channel not supported on sniffer')
-            return
-        # Checking for existing sniffer processes
-        if self._started:
-            self.log.debug('Sniffer already running')
-            return
-
-        # Configure sniffer
-        self._configure_sniffer(network, chan, bw)
-        tshark_command = self._get_tshark_command(duration)
-        sniffer_command = self._get_sniffer_command(tshark_command)
-
-        # Starting sniffer capture by executing tshark command
-        self._run_tshark(sniffer_command)
-
-    def set_monitor_mode(self, chan, bw):
-        """Function to configure interface to monitor mode
-
-        Brings up the sniffer wireless interface in monitor mode and
-        tunes it to the appropriate channel and bandwidth
-
-        Args:
-            chan: primary channel (int) to tune the sniffer to
-            bw: bandwidth (int) to tune the sniffer to
-        """
-        if chan == self.channel and bw == self.bandwidth:
-            return
-
-        self.channel = chan
-        self.bandwidth = bw
-
-        channel_map = {
-            80: {
-                tuple(range(36, 50, 2)): 42,
-                tuple(range(52, 66, 2)): 58,
-                tuple(range(100, 114, 2)): 106,
-                tuple(range(116, 130, 2)): 122,
-                tuple(range(132, 146, 2)): 138,
-                tuple(range(149, 163, 2)): 155
-            },
-            40: {
-                (36, 38, 40): 38,
-                (44, 46, 48): 46,
-                (52, 54, 56): 54,
-                (60, 62, 64): 62,
-                (100, 102, 104): 102,
-                (108, 110, 112): 108,
-                (116, 118, 120): 118,
-                (124, 126, 128): 126,
-                (132, 134, 136): 134,
-                (140, 142, 144): 142,
-                (149, 151, 153): 151,
-                (157, 159, 161): 159
-            },
-            160: {
-                (36, 38, 40): 50
-            }
-        }
-
-        if chan <= 13:
-            primary_freq = WifiEnums.channel_2G_to_freq[chan]
-        else:
-            primary_freq = WifiEnums.channel_5G_to_freq[chan]
-
-        self._sniffer_server.run('sudo ifconfig {} down'.format(
-            self.sniffer_interface))
-        self._sniffer_server.run('sudo iwconfig {} mode monitor'.format(
-            self.sniffer_interface))
-        self._sniffer_server.run('sudo ifconfig {} up'.format(
-            self.sniffer_interface))
-
-        if bw in channel_map:
-            for tuple_chan in channel_map[bw]:
-                if chan in tuple_chan:
-                    center_freq = WifiEnums.channel_5G_to_freq[channel_map[bw]
-                                                               [tuple_chan]]
-                    self._sniffer_server.run(
-                        'sudo iw dev {} set freq {} {} {}'.format(
-                            self.sniffer_interface, primary_freq, bw,
-                            center_freq))
-
-        else:
-            self._sniffer_server.run('sudo iw dev {} set freq {}'.format(
-                self.sniffer_interface, primary_freq))
-
-    def _configure_sniffer(self, network, chan, bw):
-        """ Connects to a wireless network using networksetup utility.
-
-        Args:
-            network: dictionary of network credentials; SSID and password.
-        """
-
-        self.log.debug('Setting monitor mode on Ch {}, bw {}'.format(chan, bw))
-        self.set_monitor_mode(chan, bw)
diff --git a/src/antlion/test_utils/wifi/p2p/WifiP2pBaseTest.py b/src/antlion/test_utils/wifi/p2p/WifiP2pBaseTest.py
deleted file mode 100644
index 5fb8081..0000000
--- a/src/antlion/test_utils/wifi/p2p/WifiP2pBaseTest.py
+++ /dev/null
@@ -1,159 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import antlion.utils
-import os
-import re
-import time
-
-from antlion import asserts
-from antlion import utils
-from antlion.base_test import BaseTestClass
-from antlion.keys import Config
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.p2p import wifi_p2p_const as p2pconsts
-
-WAIT_TIME = 60
-
-
-class WifiP2pBaseTest(BaseTestClass):
-    def __init__(self, controllers):
-        if not hasattr(self, 'android_devices'):
-            super(WifiP2pBaseTest, self).__init__(controllers)
-
-    def setup_class(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        required_params = ()
-        optional_params = ("skip_read_factory_mac", "pixel_models", "cnss_diag_file")
-        self.unpack_userparams(required_params,
-                               optional_params,
-                               skip_read_factory_mac=0)
-
-        self.dut1 = self.android_devices[0]
-        self.dut2 = self.android_devices[1]
-        if self.skip_read_factory_mac:
-            self.dut1_mac = None
-            self.dut2_mac = None
-        else:
-            self.dut1_mac = self.get_p2p_mac_address(self.dut1)
-            self.dut2_mac = self.get_p2p_mac_address(self.dut2)
-
-        #init location before init p2p
-        antlion.utils.set_location_service(self.dut1, True)
-        antlion.utils.set_location_service(self.dut2, True)
-
-        wutils.wifi_test_device_init(self.dut1)
-        utils.sync_device_time(self.dut1)
-        self.dut1.droid.wifiP2pInitialize()
-        time.sleep(p2pconsts.DEFAULT_FUNCTION_SWITCH_TIME)
-        asserts.assert_true(self.dut1.droid.wifiP2pIsEnabled(),
-                            "DUT1's p2p should be initialized but it didn't")
-        self.dut1.name = "Android_" + self.dut1.serial
-        self.dut1.droid.wifiP2pSetDeviceName(self.dut1.name)
-        wutils.wifi_test_device_init(self.dut2)
-        utils.sync_device_time(self.dut2)
-        self.dut2.droid.wifiP2pInitialize()
-        time.sleep(p2pconsts.DEFAULT_FUNCTION_SWITCH_TIME)
-        asserts.assert_true(self.dut2.droid.wifiP2pIsEnabled(),
-                            "DUT2's p2p should be initialized but it didn't")
-        self.dut2.name = "Android_" + self.dut2.serial
-        self.dut2.droid.wifiP2pSetDeviceName(self.dut2.name)
-
-        if len(self.android_devices) > 2:
-            self.dut3 = self.android_devices[2]
-            antlion.utils.set_location_service(self.dut3, True)
-            wutils.wifi_test_device_init(self.dut3)
-            utils.sync_device_time(self.dut3)
-            self.dut3.droid.wifiP2pInitialize()
-            time.sleep(p2pconsts.DEFAULT_FUNCTION_SWITCH_TIME)
-            asserts.assert_true(
-                self.dut3.droid.wifiP2pIsEnabled(),
-                "DUT3's p2p should be initialized but it didn't")
-            self.dut3.name = "Android_" + self.dut3.serial
-            self.dut3.droid.wifiP2pSetDeviceName(self.dut3.name)
-        if hasattr(self, "cnss_diag_file"):
-            if isinstance(self.cnss_diag_file, list):
-                self.cnss_diag_file = self.cnss_diag_file[0]
-            if not os.path.isfile(self.cnss_diag_file):
-                self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value],
-                    self.cnss_diag_file)
-
-    def teardown_class(self):
-        self.dut1.droid.wifiP2pClose()
-        self.dut2.droid.wifiP2pClose()
-        antlion.utils.set_location_service(self.dut1, False)
-        antlion.utils.set_location_service(self.dut2, False)
-
-        if len(self.android_devices) > 2:
-            self.dut3.droid.wifiP2pClose()
-            antlion.utils.set_location_service(self.dut3, False)
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-
-    def setup_test(self):
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.start_cnss_diags(
-                self.android_devices, self.cnss_diag_file, self.pixel_models)
-        self.tcpdump_proc = []
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                proc = nutils.start_tcpdump(ad, self.test_name)
-                self.tcpdump_proc.append((ad, proc))
-
-        for ad in self.android_devices:
-            ad.ed.clear_all_events()
-
-    def teardown_test(self):
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(
-                    proc[0], proc[1], self.test_name, pull_dump=False)
-        self.tcpdump_proc = []
-        for ad in self.android_devices:
-            # Clear p2p group info
-            ad.droid.wifiP2pRequestPersistentGroupInfo()
-            event = ad.ed.pop_event("WifiP2pOnPersistentGroupInfoAvailable",
-                                    p2pconsts.DEFAULT_TIMEOUT)
-            for network in event['data']:
-                ad.droid.wifiP2pDeletePersistentGroup(network['NetworkId'])
-            # Clear p2p local service
-            ad.droid.wifiP2pClearLocalServices()
-
-    def on_fail(self, test_name, begin_time):
-        for ad in self.android_devices:
-            ad.take_bug_report(test_name, begin_time)
-            ad.cat_adb_log(test_name, begin_time)
-            wutils.get_ssrdumps(ad)
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-            for ad in self.android_devices:
-                wutils.get_cnss_diag_log(ad)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
-        self.tcpdump_proc = []
-
-    def get_p2p_mac_address(self, dut):
-        """Gets the current MAC address being used for Wi-Fi Direct."""
-        dut.reboot()
-        time.sleep(WAIT_TIME)
-        out = dut.adb.shell("ifconfig p2p0")
-        return re.match(".* HWaddr (\S+).*", out, re.S).group(1)
diff --git a/src/antlion/test_utils/wifi/p2p/wifi_p2p_const.py b/src/antlion/test_utils/wifi/p2p/wifi_p2p_const.py
deleted file mode 100644
index eb19776..0000000
--- a/src/antlion/test_utils/wifi/p2p/wifi_p2p_const.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-######################################################
-# Wifi P2p framework designed value
-######################################################
-P2P_FIND_TIMEOUT = 120
-GO_IP_ADDRESS = '192.168.49.1'
-
-######################################################
-# Wifi P2p Acts flow control timer value
-######################################################
-
-DEFAULT_TIMEOUT = 30
-DEFAULT_CONNECT_SLEEPTIME = 3
-DEFAULT_POLLING_SLEEPTIME = 1
-DEFAULT_SLEEPTIME = 5
-DEFAULT_FUNCTION_SWITCH_TIME = 10
-DEFAULT_SERVICE_WAITING_TIME = 20
-DEFAULT_GROUP_CLIENT_LOST_TIME = 60
-
-P2P_CONNECT_NEGOTIATION = 0
-P2P_CONNECT_JOIN = 1
-P2P_CONNECT_INVITATION = 2
-######################################################
-# Wifi P2p sl4a Event String
-######################################################
-CONNECTED_EVENT = "WifiP2pConnected"
-DISCONNECTED_EVENT = "WifiP2pDisconnected"
-PEER_AVAILABLE_EVENT = "WifiP2pOnPeersAvailable"
-CONNECTION_INFO_AVAILABLE_EVENT = "WifiP2pOnConnectionInfoAvailable"
-ONGOING_PEER_INFO_AVAILABLE_EVENT = "WifiP2pOnOngoingPeerAvailable"
-ONGOING_PEER_SET_SUCCESS_EVENT = "WifiP2psetP2pPeerConfigureOnSuccess"
-CONNECT_SUCCESS_EVENT = "WifiP2pConnectOnSuccess"
-CREATE_GROUP_SUCCESS_EVENT = "WifiP2pCreateGroupOnSuccess"
-SET_CHANNEL_SUCCESS_EVENT = "WifiP2pSetChannelsOnSuccess"
-GROUP_INFO_AVAILABLE_EVENT = "WifiP2pOnGroupInfoAvailable"
-
-######################################################
-# Wifi P2p local service event
-####################################################
-
-DNSSD_EVENT = "WifiP2pOnDnsSdServiceAvailable"
-DNSSD_TXRECORD_EVENT = "WifiP2pOnDnsSdTxtRecordAvailable"
-UPNP_EVENT = "WifiP2pOnUpnpServiceAvailable"
-
-DNSSD_EVENT_INSTANCENAME_KEY = "InstanceName"
-DNSSD_EVENT_REGISTRATIONTYPE_KEY = "RegistrationType"
-DNSSD_TXRECORD_EVENT_FULLDOMAINNAME_KEY = "FullDomainName"
-DNSSD_TXRECORD_EVENT_TXRECORDMAP_KEY = "TxtRecordMap"
-UPNP_EVENT_SERVICELIST_KEY = "ServiceList"
-
-######################################################
-# Wifi P2p local service type
-####################################################
-P2P_LOCAL_SERVICE_UPNP = 0
-P2P_LOCAL_SERVICE_IPP = 1
-P2P_LOCAL_SERVICE_AFP = 2
-
-######################################################
-# Wifi P2p group capability
-######################################################
-P2P_GROUP_CAPAB_GROUP_OWNER = 1
-
-
-######################################################
-# Wifi P2p UPnP MediaRenderer local service
-######################################################
-class UpnpTestData():
-    AVTransport = "urn:schemas-upnp-org:service:AVTransport:1"
-    ConnectionManager = "urn:schemas-upnp-org:service:ConnectionManager:1"
-    serviceType = "urn:schemas-upnp-org:device:MediaRenderer:1"
-    uuid = "6859dede-8574-59ab-9332-123456789011"
-    rootdevice = "upnp:rootdevice"
-
-
-######################################################
-# Wifi P2p Bonjour IPP & AFP local service
-######################################################
-class IppTestData():
-    ippInstanceName = "MyPrinter"
-    ippRegistrationType = "_ipp._tcp"
-    ippDomainName = "myprinter._ipp._tcp.local."
-    ipp_txtRecord = {"txtvers": "1", "pdl": "application/postscript"}
-
-
-class AfpTestData():
-    afpInstanceName = "Example"
-    afpRegistrationType = "_afpovertcp._tcp"
-    afpDomainName = "example._afpovertcp._tcp.local."
-    afp_txtRecord = {}
diff --git a/src/antlion/test_utils/wifi/p2p/wifi_p2p_test_utils.py b/src/antlion/test_utils/wifi/p2p/wifi_p2p_test_utils.py
deleted file mode 100755
index f9144b5..0000000
--- a/src/antlion/test_utils/wifi/p2p/wifi_p2p_test_utils.py
+++ /dev/null
@@ -1,739 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion import asserts
-from antlion import utils
-from antlion.test_utils.wifi.p2p import wifi_p2p_const as p2pconsts
-import antlion.utils
-
-
-def is_discovered(event, ad):
-    """Check an Android device exist in WifiP2pOnPeersAvailable event or not.
-
-    Args:
-        event: WifiP2pOnPeersAvailable which include all of p2p devices.
-        ad: The android device
-    Returns:
-        True: if an Android device exist in p2p list
-        False: if not exist
-    """
-    for device in event['data']['Peers']:
-        if device['Name'] == ad.name:
-            ad.deviceAddress = device['Address']
-            return True
-    return False
-
-
-def check_disconnect(ad, timeout=p2pconsts.DEFAULT_TIMEOUT):
-    """Check an Android device disconnect or not
-
-    Args:
-        ad: The android device
-    """
-    ad.droid.wifiP2pRequestConnectionInfo()
-    # wait disconnect event
-    ad.ed.pop_event(p2pconsts.DISCONNECTED_EVENT, timeout)
-
-
-def p2p_disconnect(ad):
-    """Invoke an Android device removeGroup to trigger p2p disconnect
-
-    Args:
-        ad: The android device
-    """
-    ad.log.debug("Disconnect")
-    ad.droid.wifiP2pRemoveGroup()
-    check_disconnect(ad)
-
-
-def p2p_connection_ping_test(ad, target_ip_address):
-    """Let an Android device to start ping target_ip_address
-
-    Args:
-        ad: The android device
-        target_ip_address: ip address which would like to ping
-    """
-    ad.log.debug("Run Ping Test, %s ping %s " % (ad.serial, target_ip_address))
-    asserts.assert_true(
-        antlion.utils.adb_shell_ping(ad,
-                                  count=6,
-                                  dest_ip=target_ip_address,
-                                  timeout=20), "%s ping failed" % (ad.serial))
-
-
-def is_go(ad):
-    """Check an Android p2p role is Go or not
-
-    Args:
-        ad: The android device
-    Return:
-        True: An Android device is p2p  go
-        False: An Android device is p2p gc
-    """
-    ad.log.debug("is go check")
-    ad.droid.wifiP2pRequestConnectionInfo()
-    ad_connect_info_event = ad.ed.pop_event(
-        p2pconsts.CONNECTION_INFO_AVAILABLE_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-    if ad_connect_info_event['data']['isGroupOwner']:
-        return True
-    return False
-
-
-def p2p_go_ip(ad):
-    """Get GO IP address
-
-    Args:
-        ad: The android device
-    Return:
-        GO IP address
-    """
-    ad.log.debug("p2p go ip")
-    ad.droid.wifiP2pRequestConnectionInfo()
-    ad_connect_info_event = ad.ed.pop_event(
-        p2pconsts.CONNECTION_INFO_AVAILABLE_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-    ad.log.debug("p2p go ip: %s" %
-                 ad_connect_info_event['data']['groupOwnerHostAddress'])
-    return ad_connect_info_event['data']['groupOwnerHostAddress']
-
-
-def p2p_get_current_group(ad):
-    """Get current group information
-
-    Args:
-        ad: The android device
-    Return:
-        p2p group information
-    """
-    ad.log.debug("get current group")
-    ad.droid.wifiP2pRequestGroupInfo()
-    ad_group_info_event = ad.ed.pop_event(p2pconsts.GROUP_INFO_AVAILABLE_EVENT,
-                                          p2pconsts.DEFAULT_TIMEOUT)
-    ad.log.debug(
-        "p2p group: SSID:%s, password:%s, owner address: %s, interface: %s" %
-        (ad_group_info_event['data']['NetworkName'],
-         ad_group_info_event['data']['Passphrase'],
-         ad_group_info_event['data']['OwnerAddress'],
-         ad_group_info_event['data']['Interface']))
-    return ad_group_info_event['data']
-
-
-def is_ongoing_peer_ready(peerConfig, waitForPin):
-    """Check whether the peer config is ready
-
-    Args:
-        peerConfig: the ongoing config
-        waitForPin: this config needs key or not
-    Return:
-        true for ready; false otherwise.
-    """
-    if peerConfig is None:
-        return False
-    if not peerConfig['data'][WifiP2PEnums.WifiP2pConfig.DEVICEADDRESS_KEY]:
-        return False
-    if not waitForPin:
-        return True
-    if WifiP2PEnums.WpsInfo.WPS_PIN_KEY in peerConfig['data'][
-            WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY]:
-        return True
-    return False
-
-
-def wait_for_ongoing_peer_ready(ad, waitForPin, maxPollingCount):
-    """wait for the ongoing peer data ready
-
-    Args:
-        ad: The android device
-        waitForPin: this config needs key or not
-        maxPollingCount: the max polling count
-    Return:
-        the ongoing peer config
-    """
-    ad_peerConfig = None
-    ad.log.debug("%s is waiting for the ongoing peer, max polling count %s" %
-                 (ad.name, maxPollingCount))
-    while maxPollingCount > 0:
-        ad.droid.requestP2pPeerConfigure()
-        ad_peerConfig = ad.ed.pop_event(
-            p2pconsts.ONGOING_PEER_INFO_AVAILABLE_EVENT,
-            p2pconsts.DEFAULT_TIMEOUT)
-        maxPollingCount -= 1
-        if is_ongoing_peer_ready(ad_peerConfig, waitForPin):
-            break
-        ad.log.debug("%s is not ready for next step" % (ad.name))
-        time.sleep(p2pconsts.DEFAULT_POLLING_SLEEPTIME)
-    asserts.assert_true(
-        ad_peerConfig['data'][WifiP2PEnums.WifiP2pConfig.DEVICEADDRESS_KEY],
-        "DUT %s does not receive the request." % (ad.name))
-    ad.log.debug(ad_peerConfig['data'])
-    return ad_peerConfig
-
-
-#trigger p2p connect to ad2 from ad1
-def p2p_connect(ad1,
-                ad2,
-                isReconnect,
-                wpsSetup,
-                p2p_connect_type=p2pconsts.P2P_CONNECT_NEGOTIATION,
-                go_ad=None):
-    """trigger p2p connect to ad2 from ad1
-
-    Args:
-        ad1: The android device
-        ad2: The android device
-        isReconnect: boolean, if persist group is exist,
-                isReconnect is true, otherswise is false.
-        wpsSetup: which wps connection would like to use
-        p2p_connect_type: enumeration, which type this p2p connection is
-        go_ad: The group owner android device which is used for the invitation connection
-    """
-    ad1.log.info("Create p2p connection from %s to %s via wps: %s type %d" %
-                 (ad1.name, ad2.name, wpsSetup, p2p_connect_type))
-    if p2p_connect_type == p2pconsts.P2P_CONNECT_INVITATION:
-        if go_ad is None:
-            go_ad = ad1
-        find_p2p_device(ad1, ad2)
-        # GO might be another peer, so ad2 needs to find it first.
-        find_p2p_group_owner(ad2, go_ad)
-    elif p2p_connect_type == p2pconsts.P2P_CONNECT_JOIN:
-        find_p2p_group_owner(ad1, ad2)
-    else:
-        find_p2p_device(ad1, ad2)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-    wifi_p2p_config = {
-        WifiP2PEnums.WifiP2pConfig.DEVICEADDRESS_KEY: ad2.deviceAddress,
-        WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY: {
-            WifiP2PEnums.WpsInfo.WPS_SETUP_KEY: wpsSetup
-        }
-    }
-    ad1.droid.wifiP2pConnect(wifi_p2p_config)
-    ad1.ed.pop_event(p2pconsts.CONNECT_SUCCESS_EVENT,
-                     p2pconsts.DEFAULT_TIMEOUT)
-    if not isReconnect:
-        # ad1 is the initiator, it should be ready soon.
-        ad1_peerConfig = wait_for_ongoing_peer_ready(
-            ad1, wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_DISPLAY, 6)
-        # auto-join tries 10 times to find groups, and
-        # one round takes 2 - 3 seconds.
-        ad2_peerConfig = wait_for_ongoing_peer_ready(
-            ad2, wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_KEYPAD, 31)
-        if wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_DISPLAY:
-            asserts.assert_true(
-                WifiP2PEnums.WpsInfo.WPS_PIN_KEY in ad1_peerConfig['data'][
-                    WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY],
-                "Can't get display pin value")
-            ad2_peerConfig['data'][WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY][
-                WifiP2PEnums.WpsInfo.WPS_PIN_KEY] = ad1_peerConfig['data'][
-                    WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY][
-                        WifiP2PEnums.WpsInfo.WPS_PIN_KEY]
-            ad2.droid.setP2pPeerConfigure(ad2_peerConfig['data'])
-            ad2.ed.pop_event(p2pconsts.ONGOING_PEER_SET_SUCCESS_EVENT,
-                             p2pconsts.DEFAULT_TIMEOUT)
-            ad2.droid.wifiP2pAcceptConnection()
-        elif wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_KEYPAD:
-            asserts.assert_true(
-                WifiP2PEnums.WpsInfo.WPS_PIN_KEY in ad2_peerConfig['data'][
-                    WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY],
-                "Can't get keypad pin value")
-            ad1_peerConfig['data'][WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY][
-                WifiP2PEnums.WpsInfo.WPS_PIN_KEY] = ad2_peerConfig['data'][
-                    WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY][
-                        WifiP2PEnums.WpsInfo.WPS_PIN_KEY]
-            ad1.droid.setP2pPeerConfigure(ad1_peerConfig['data'])
-            ad1.ed.pop_event(p2pconsts.ONGOING_PEER_SET_SUCCESS_EVENT,
-                             p2pconsts.DEFAULT_TIMEOUT)
-            ad1.droid.wifiP2pAcceptConnection()
-            time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-            ad2.droid.wifiP2pConfirmConnection()
-        elif wpsSetup == WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_PBC:
-            ad2.droid.wifiP2pAcceptConnection()
-            if p2p_connect_type == p2pconsts.P2P_CONNECT_INVITATION:
-                time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-                go_ad.droid.wifiP2pAcceptConnection()
-
-    #wait connected event
-    if p2p_connect_type == p2pconsts.P2P_CONNECT_INVITATION:
-        go_ad.ed.pop_event(p2pconsts.CONNECTED_EVENT,
-                           p2pconsts.DEFAULT_TIMEOUT)
-    else:
-        ad1.ed.pop_event(p2pconsts.CONNECTED_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-    ad2.ed.pop_event(p2pconsts.CONNECTED_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-
-
-def p2p_connect_with_config(ad1, ad2, network_name, passphrase, band):
-    """trigger p2p connect to ad2 from ad1 with config
-
-    Args:
-        ad1: The android device
-        ad2: The android device
-        network_name: the network name of the desired group.
-        passphrase: the passphrase of the desired group.
-        band: the operating band of the desired group.
-    """
-    ad1.log.info("Create p2p connection from %s to %s" % (ad1.name, ad2.name))
-    find_p2p_device(ad1, ad2)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-    wifi_p2p_config = {
-        WifiP2PEnums.WifiP2pConfig.NETWORK_NAME: network_name,
-        WifiP2PEnums.WifiP2pConfig.PASSPHRASE: passphrase,
-        WifiP2PEnums.WifiP2pConfig.GROUP_BAND: band,
-        WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY: {
-            WifiP2PEnums.WpsInfo.WPS_SETUP_KEY:
-            WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_PBC
-        }
-    }
-    ad1.droid.wifiP2pConnect(wifi_p2p_config)
-    ad1.ed.pop_event(p2pconsts.CONNECT_SUCCESS_EVENT,
-                     p2pconsts.DEFAULT_TIMEOUT)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-
-    #wait connected event
-    ad1.ed.pop_event(p2pconsts.CONNECTED_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-    ad2.ed.pop_event(p2pconsts.CONNECTED_EVENT, p2pconsts.DEFAULT_TIMEOUT)
-
-
-def find_p2p_device(ad1, ad2):
-    """Check an Android device ad1 can discover an Android device ad2
-
-    Args:
-        ad1: The android device
-        ad2: The android device
-    """
-    ad1.droid.wifiP2pDiscoverPeers()
-    ad2.droid.wifiP2pDiscoverPeers()
-    p2p_find_result = False
-    ad1.ed.clear_events(p2pconsts.PEER_AVAILABLE_EVENT)
-    while not p2p_find_result:
-        ad1_event = ad1.ed.pop_event(p2pconsts.PEER_AVAILABLE_EVENT,
-                                     p2pconsts.P2P_FIND_TIMEOUT)
-        ad1.log.debug(ad1_event['data'])
-        p2p_find_result = is_discovered(ad1_event, ad2)
-    asserts.assert_true(p2p_find_result,
-                        "DUT didn't discovered peer:%s device" % (ad2.name))
-
-
-def find_p2p_group_owner(ad1, ad2):
-    """Check an Android device ad1 can discover an Android device ad2 which
-       is a group owner
-
-    Args:
-        ad1: The android device
-        ad2: The android device which is a group owner
-    """
-    p2p_find_result = False
-    ad1.ed.clear_events(p2pconsts.PEER_AVAILABLE_EVENT)
-    while not p2p_find_result:
-        ad2.droid.wifiP2pStopPeerDiscovery()
-        ad1.droid.wifiP2pStopPeerDiscovery()
-        ad2.droid.wifiP2pDiscoverPeers()
-        ad1.droid.wifiP2pDiscoverPeers()
-        ad1_event = ad1.ed.pop_event(p2pconsts.PEER_AVAILABLE_EVENT,
-                                     p2pconsts.P2P_FIND_TIMEOUT)
-        ad1.log.debug(ad1_event['data'])
-        for device in ad1_event['data']['Peers']:
-            if (device['Name'] == ad2.name and int(device['GroupCapability'])
-                    & p2pconsts.P2P_GROUP_CAPAB_GROUP_OWNER):
-                ad2.deviceAddress = device['Address']
-                p2p_find_result = True
-    asserts.assert_true(
-        p2p_find_result,
-        "DUT didn't discovered group owner peer:%s device" % (ad2.name))
-
-
-def createP2pLocalService(ad, serviceCategory):
-    """Based on serviceCategory to create p2p local service
-            on an Android device ad
-
-    Args:
-        ad: The android device
-        serviceCategory: p2p local service type, UPNP / IPP / AFP,
-    """
-    testData = genTestData(serviceCategory)
-    if serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_UPNP:
-        ad.droid.wifiP2pCreateUpnpServiceInfo(testData[0], testData[1],
-                                              testData[2])
-    elif (serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_IPP
-          or serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_AFP):
-        ad.droid.wifiP2pCreateBonjourServiceInfo(testData[0], testData[1],
-                                                 testData[2])
-    ad.droid.wifiP2pAddLocalService()
-
-
-def requestServiceAndCheckResult(ad_serviceProvider, ad_serviceReceiver,
-                                 serviceType, queryString1, queryString2):
-    """Based on serviceType and query info, check service request result
-            same as expect or not on an Android device ad_serviceReceiver.
-            And remove p2p service request after result check.
-
-    Args:
-        ad_serviceProvider: The android device which provide p2p local service
-        ad_serviceReceiver: The android device which query p2p local service
-        serviceType: P2p local service type, Upnp or Bonjour
-        queryString1: Query String, NonNull
-        queryString2: Query String, used for Bonjour, Nullable
-    """
-    expectData = genExpectTestData(serviceType, queryString1, queryString2)
-    find_p2p_device(ad_serviceReceiver, ad_serviceProvider)
-    ad_serviceReceiver.droid.wifiP2pStopPeerDiscovery()
-    ad_serviceReceiver.droid.wifiP2pClearServiceRequests()
-    time.sleep(p2pconsts.DEFAULT_FUNCTION_SWITCH_TIME)
-
-    ad_serviceReceiver.droid.wifiP2pDiscoverServices()
-    serviceData = {}
-    service_id = 0
-    if (serviceType ==
-            WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_BONJOUR):
-        ad_serviceReceiver.log.info(
-            "Request bonjour service in \
-                %s with Query String %s and %s " %
-            (ad_serviceReceiver.name, queryString1, queryString2))
-        ad_serviceReceiver.log.info("expectData %s" % expectData)
-        if queryString1 != None:
-            service_id = ad_serviceReceiver.droid.wifiP2pAddDnssdServiceRequest(
-                queryString1, queryString2)
-        else:
-            service_id = ad_serviceReceiver.droid.wifiP2pAddServiceRequest(
-                serviceType)
-            ad_serviceReceiver.log.info("request bonjour service id %s" %
-                                        service_id)
-        ad_serviceReceiver.droid.wifiP2pSetDnsSdResponseListeners()
-        ad_serviceReceiver.droid.wifiP2pDiscoverServices()
-        ad_serviceReceiver.log.info("Check Service Listener")
-        time.sleep(p2pconsts.DEFAULT_SERVICE_WAITING_TIME)
-        try:
-            dnssd_events = ad_serviceReceiver.ed.pop_all(p2pconsts.DNSSD_EVENT)
-            dnssd_txrecord_events = ad_serviceReceiver.ed.pop_all(
-                p2pconsts.DNSSD_TXRECORD_EVENT)
-            dns_service = WifiP2PEnums.WifiP2pDnsSdServiceResponse()
-            for dnssd_event in dnssd_events:
-                if dnssd_event['data'][
-                        'SourceDeviceAddress'] == ad_serviceProvider.deviceAddress:
-                    dns_service.InstanceName = dnssd_event['data'][
-                        p2pconsts.DNSSD_EVENT_INSTANCENAME_KEY]
-                    dns_service.RegistrationType = dnssd_event['data'][
-                        p2pconsts.DNSSD_EVENT_REGISTRATIONTYPE_KEY]
-                    dns_service.FullDomainName = ""
-                    dns_service.TxtRecordMap = ""
-                    serviceData[dns_service.toString()] = 1
-            for dnssd_txrecord_event in dnssd_txrecord_events:
-                if dnssd_txrecord_event['data'][
-                        'SourceDeviceAddress'] == ad_serviceProvider.deviceAddress:
-                    dns_service.InstanceName = ""
-                    dns_service.RegistrationType = ""
-                    dns_service.FullDomainName = dnssd_txrecord_event['data'][
-                        p2pconsts.DNSSD_TXRECORD_EVENT_FULLDOMAINNAME_KEY]
-                    dns_service.TxtRecordMap = dnssd_txrecord_event['data'][
-                        p2pconsts.DNSSD_TXRECORD_EVENT_TXRECORDMAP_KEY]
-                    serviceData[dns_service.toString()] = 1
-            ad_serviceReceiver.log.info("serviceData %s" % serviceData)
-            if len(serviceData) == 0:
-                ad_serviceReceiver.droid.wifiP2pRemoveServiceRequest(
-                    service_id)
-                return -1
-        except queue.Empty as error:
-            ad_serviceReceiver.log.info("dnssd event is empty", )
-    elif (serviceType ==
-          WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_UPNP):
-        ad_serviceReceiver.log.info(
-            "Request upnp service in %s with Query String %s " %
-            (ad_serviceReceiver.name, queryString1))
-        ad_serviceReceiver.log.info("expectData %s" % expectData)
-        if queryString1 != None:
-            service_id = ad_serviceReceiver.droid.wifiP2pAddUpnpServiceRequest(
-                queryString1)
-        else:
-            service_id = ad_serviceReceiver.droid.wifiP2pAddServiceRequest(
-                WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_UPNP)
-        ad_serviceReceiver.droid.wifiP2pSetUpnpResponseListeners()
-        ad_serviceReceiver.droid.wifiP2pDiscoverServices()
-        ad_serviceReceiver.log.info("Check Service Listener")
-        time.sleep(p2pconsts.DEFAULT_SERVICE_WAITING_TIME)
-        try:
-            upnp_events = ad_serviceReceiver.ed.pop_all(p2pconsts.UPNP_EVENT)
-            for upnp_event in upnp_events:
-                if upnp_event['data']['Device'][
-                        'Address'] == ad_serviceProvider.deviceAddress:
-                    for service in upnp_event['data'][
-                            p2pconsts.UPNP_EVENT_SERVICELIST_KEY]:
-                        serviceData[service] = 1
-            ad_serviceReceiver.log.info("serviceData %s" % serviceData)
-            if len(serviceData) == 0:
-                ad_serviceReceiver.droid.wifiP2pRemoveServiceRequest(
-                    service_id)
-                return -1
-        except queue.Empty as error:
-            ad_serviceReceiver.log.info("p2p upnp event is empty", )
-
-    ad_serviceReceiver.log.info("Check ServiceList")
-    asserts.assert_true(checkServiceQueryResult(serviceData, expectData),
-                        "ServiceList not same as Expect")
-    # After service checked, remove the service_id
-    ad_serviceReceiver.droid.wifiP2pRemoveServiceRequest(service_id)
-    return 0
-
-
-def requestServiceAndCheckResultWithRetry(ad_serviceProvider,
-                                          ad_serviceReceiver,
-                                          serviceType,
-                                          queryString1,
-                                          queryString2,
-                                          retryCount=3):
-    """ allow failures for requestServiceAndCheckResult. Service
-        discovery might fail unexpectedly because the request packet might not be
-        recevied by the service responder due to p2p state switch.
-
-    Args:
-        ad_serviceProvider: The android device which provide p2p local service
-        ad_serviceReceiver: The android device which query p2p local service
-        serviceType: P2p local service type, Upnp or Bonjour
-        queryString1: Query String, NonNull
-        queryString2: Query String, used for Bonjour, Nullable
-        retryCount: maximum retry count, default is 3
-    """
-    ret = 0
-    while retryCount > 0:
-        ret = requestServiceAndCheckResult(ad_serviceProvider,
-                                           ad_serviceReceiver, serviceType,
-                                           queryString1, queryString2)
-        if (ret == 0):
-            break
-        retryCount -= 1
-
-    asserts.assert_equal(0, ret, "cannot find any services with retries.")
-
-
-def checkServiceQueryResult(serviceList, expectServiceList):
-    """Check serviceList same as expectServiceList or not
-
-    Args:
-        serviceList: ServiceList which get from query result
-        expectServiceList: ServiceList which hardcode in genExpectTestData
-    Return:
-        True: serviceList  same as expectServiceList
-        False:Exist discrepancy between serviceList and expectServiceList
-    """
-    tempServiceList = serviceList.copy()
-    tempExpectServiceList = expectServiceList.copy()
-    for service in serviceList.keys():
-        if service in expectServiceList:
-            del tempServiceList[service]
-            del tempExpectServiceList[service]
-    return len(tempExpectServiceList) == 0 and len(tempServiceList) == 0
-
-
-def genTestData(serviceCategory):
-    """Based on serviceCategory to generator Test Data
-
-    Args:
-        serviceCategory: P2p local service type, Upnp or Bonjour
-    Return:
-        TestData
-    """
-    testData = []
-    if serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_UPNP:
-        testData.append(p2pconsts.UpnpTestData.uuid)
-        testData.append(p2pconsts.UpnpTestData.serviceType)
-        testData.append([
-            p2pconsts.UpnpTestData.AVTransport,
-            p2pconsts.UpnpTestData.ConnectionManager
-        ])
-    elif serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_IPP:
-        testData.append(p2pconsts.IppTestData.ippInstanceName)
-        testData.append(p2pconsts.IppTestData.ippRegistrationType)
-        testData.append(p2pconsts.IppTestData.ipp_txtRecord)
-    elif serviceCategory == p2pconsts.P2P_LOCAL_SERVICE_AFP:
-        testData.append(p2pconsts.AfpTestData.afpInstanceName)
-        testData.append(p2pconsts.AfpTestData.afpRegistrationType)
-        testData.append(p2pconsts.AfpTestData.afp_txtRecord)
-
-    return testData
-
-
-def genExpectTestData(serviceType, queryString1, queryString2):
-    """Based on serviceCategory to generator expect serviceList
-
-    Args:
-        serviceType: P2p local service type, Upnp or Bonjour
-        queryString1: Query String, NonNull
-        queryString2: Query String, used for Bonjour, Nullable
-    Return:
-        expectServiceList
-    """
-    expectServiceList = {}
-    if (serviceType ==
-            WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_BONJOUR):
-        ipp_service = WifiP2PEnums.WifiP2pDnsSdServiceResponse()
-        afp_service = WifiP2PEnums.WifiP2pDnsSdServiceResponse()
-        if queryString1 == p2pconsts.IppTestData.ippRegistrationType:
-            if queryString2 == p2pconsts.IppTestData.ippInstanceName:
-                ipp_service.InstanceName = ""
-                ipp_service.RegistrationType = ""
-                ipp_service.FullDomainName = p2pconsts.IppTestData.ippDomainName
-                ipp_service.TxtRecordMap = p2pconsts.IppTestData.ipp_txtRecord
-                expectServiceList[ipp_service.toString()] = 1
-                return expectServiceList
-            ipp_service.InstanceName = p2pconsts.IppTestData.ippInstanceName
-            ipp_service.RegistrationType = (
-                p2pconsts.IppTestData.ippRegistrationType + ".local.")
-            ipp_service.FullDomainName = ""
-            ipp_service.TxtRecordMap = ""
-            expectServiceList[ipp_service.toString()] = 1
-            return expectServiceList
-        elif queryString1 == p2pconsts.AfpTestData.afpRegistrationType:
-            if queryString2 == p2pconsts.AfpTestData.afpInstanceName:
-                afp_service.InstanceName = ""
-                afp_service.RegistrationType = ""
-                afp_service.FullDomainName = p2pconsts.AfpTestData.afpDomainName
-                afp_service.TxtRecordMap = p2pconsts.AfpTestData.afp_txtRecord
-                expectServiceList[afp_service.toString()] = 1
-                return expectServiceList
-        ipp_service.InstanceName = p2pconsts.IppTestData.ippInstanceName
-        ipp_service.RegistrationType = (
-            p2pconsts.IppTestData.ippRegistrationType + ".local.")
-        ipp_service.FullDomainName = ""
-        ipp_service.TxtRecordMap = ""
-        expectServiceList[ipp_service.toString()] = 1
-
-        ipp_service.InstanceName = ""
-        ipp_service.RegistrationType = ""
-        ipp_service.FullDomainName = p2pconsts.IppTestData.ippDomainName
-        ipp_service.TxtRecordMap = p2pconsts.IppTestData.ipp_txtRecord
-        expectServiceList[ipp_service.toString()] = 1
-
-        afp_service.InstanceName = p2pconsts.AfpTestData.afpInstanceName
-        afp_service.RegistrationType = (
-            p2pconsts.AfpTestData.afpRegistrationType + ".local.")
-        afp_service.FullDomainName = ""
-        afp_service.TxtRecordMap = ""
-        expectServiceList[afp_service.toString()] = 1
-
-        afp_service.InstanceName = ""
-        afp_service.RegistrationType = ""
-        afp_service.FullDomainName = p2pconsts.AfpTestData.afpDomainName
-        afp_service.TxtRecordMap = p2pconsts.AfpTestData.afp_txtRecord
-        expectServiceList[afp_service.toString()] = 1
-
-        return expectServiceList
-    elif serviceType == WifiP2PEnums.WifiP2pServiceInfo.WIFI_P2P_SERVICE_TYPE_UPNP:
-        upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid + "::" + (
-            p2pconsts.UpnpTestData.rootdevice)
-        expectServiceList[upnp_service] = 1
-        if queryString1 != "upnp:rootdevice":
-            upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid + (
-                "::" + p2pconsts.UpnpTestData.AVTransport)
-            expectServiceList[upnp_service] = 1
-            upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid + (
-                "::" + p2pconsts.UpnpTestData.ConnectionManager)
-            expectServiceList[upnp_service] = 1
-            upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid + (
-                "::" + p2pconsts.UpnpTestData.serviceType)
-            expectServiceList[upnp_service] = 1
-            upnp_service = "uuid:" + p2pconsts.UpnpTestData.uuid
-            expectServiceList[upnp_service] = 1
-
-    return expectServiceList
-
-
-def p2p_create_group(ad):
-    """Create a group as Group Owner
-
-    Args:
-        ad: The android device
-    """
-    ad.droid.wifiP2pCreateGroup()
-    ad.ed.pop_event(p2pconsts.CREATE_GROUP_SUCCESS_EVENT,
-                    p2pconsts.DEFAULT_TIMEOUT)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-
-
-def p2p_create_group_with_config(ad, network_name, passphrase, band):
-    """Create a group as Group Owner
-
-    Args:
-        ad: The android device
-    """
-    wifi_p2p_config = {
-        WifiP2PEnums.WifiP2pConfig.NETWORK_NAME: network_name,
-        WifiP2PEnums.WifiP2pConfig.PASSPHRASE: passphrase,
-        WifiP2PEnums.WifiP2pConfig.GROUP_BAND: band,
-        WifiP2PEnums.WifiP2pConfig.WPSINFO_KEY: {
-            WifiP2PEnums.WpsInfo.WPS_SETUP_KEY:
-            WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_PBC
-        }
-    }
-    ad.droid.wifiP2pCreateGroupWithConfig(wifi_p2p_config)
-    ad.ed.pop_event(p2pconsts.CREATE_GROUP_SUCCESS_EVENT,
-                    p2pconsts.DEFAULT_TIMEOUT)
-    time.sleep(p2pconsts.DEFAULT_SLEEPTIME)
-
-
-def wifi_p2p_set_channels_for_current_group(ad, listening_chan,
-                                            operating_chan):
-    """Sets the listening channel and operating channel of the current group
-       created with initialize.
-
-    Args:
-        ad: The android device
-        listening_chan: Integer, the listening channel
-        operating_chan: Integer, the operating channel
-    """
-    ad.droid.wifiP2pSetChannelsForCurrentGroup(listening_chan, operating_chan)
-    ad.ed.pop_event(p2pconsts.SET_CHANNEL_SUCCESS_EVENT,
-                    p2pconsts.DEFAULT_TIMEOUT)
-
-
-class WifiP2PEnums():
-    class WifiP2pConfig():
-        DEVICEADDRESS_KEY = "deviceAddress"
-        WPSINFO_KEY = "wpsInfo"
-        GO_INTENT_KEY = "groupOwnerIntent"
-        NETID_KEY = "netId"
-        NETWORK_NAME = "networkName"
-        PASSPHRASE = "passphrase"
-        GROUP_BAND = "groupOwnerBand"
-
-    class WpsInfo():
-        WPS_SETUP_KEY = "setup"
-        BSSID_KEY = "BSSID"
-        WPS_PIN_KEY = "pin"
-        #TODO: remove it from wifi_test_utils.py
-        WIFI_WPS_INFO_PBC = 0
-        WIFI_WPS_INFO_DISPLAY = 1
-        WIFI_WPS_INFO_KEYPAD = 2
-        WIFI_WPS_INFO_LABEL = 3
-        WIFI_WPS_INFO_INVALID = 4
-
-    class WifiP2pServiceInfo():
-        #TODO: remove it from wifi_test_utils.py
-        # Macros for wifi p2p.
-        WIFI_P2P_SERVICE_TYPE_ALL = 0
-        WIFI_P2P_SERVICE_TYPE_BONJOUR = 1
-        WIFI_P2P_SERVICE_TYPE_UPNP = 2
-        WIFI_P2P_SERVICE_TYPE_VENDOR_SPECIFIC = 255
-
-    class WifiP2pDnsSdServiceResponse():
-        def __init__(self):
-            pass
-
-        InstanceName = ""
-        RegistrationType = ""
-        FullDomainName = ""
-        TxtRecordMap = {}
-
-        def toString(self):
-            return self.InstanceName + self.RegistrationType + (
-                self.FullDomainName + str(self.TxtRecordMap))
diff --git a/src/antlion/test_utils/wifi/pdu_controller_utils.py b/src/antlion/test_utils/wifi/pdu_controller_utils.py
deleted file mode 100644
index e243d33..0000000
--- a/src/antlion/test_utils/wifi/pdu_controller_utils.py
+++ /dev/null
@@ -1,203 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import time
-from antlion import logger
-from antlion import utils
-
-
-def create(configs):
-    """Factory method for PDU.
-
-    Args:
-        configs: list of dicts with pdu settings. settings must contain the
-        following type (string denoting type of pdu)
-    """
-    objs = []
-    for config in configs:
-        try:
-            pdu_class = globals()[config['device']]
-        except KeyError:
-            raise KeyError('Invalid pdu configuration.')
-        objs.append(pdu_class(config))
-    return objs
-
-
-def destroy(objs):
-    return
-
-
-class Pdu(object):
-    """Base class implementation for PDU.
-
-    Base class provides functions whose implementation is shared by all
-    chambers.
-    """
-
-    def on_all(self):
-        """Turn on all outlets."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def off_all(self):
-        """Turn off all outlets."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def _set_status(self, action, status):
-        """Set outlets to on or off."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def get_status(self):
-        """Get outlets status."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def turn_on_outlets(self, outlets):
-        """Turn on specific outlets."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def turn_off_outlets(self, outlets):
-        """Turn off specific outlets."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-
-class PanioPs1158(Pdu):
-    def __init__(self, config):
-        self.config = config.copy()
-        self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger('pdu_ps1158[{}]'.format(
-            self.device_id))
-
-    def on_all(self):
-        """Turn on all outlets"""
-        self._set_status("on", '11111111')
-
-    def off_all(self):
-        """Turn off all outlets"""
-        self._set_status("off", "11111111")
-
-    def _set_status(self, action, status):
-        """Set outlets to on or off.
-
-        Args:
-            action: "on" or "off"
-            status: 8 bits of 0 or 1. e.g., "11111111"
-        """
-        cmd = "curl http://{}:{}@{}/{}s.cgi?led={}".format(self.config['username'],
-                                                           self.config['password'],
-                                                           self.config['host'],
-                                                           action,
-                                                           status)
-        self.log.info("PDU cmd: {}".format(cmd))
-        utils.start_standing_subprocess(cmd)
-        time.sleep(10)
-
-    def get_status(self):
-        """Get outlets status
-
-        Returns:
-            A tuple of (outlets_list, outlets_str)
-                outlets_list:
-                    A List indicates the status of the outlets.
-                    e.g., outlet 1 is ON, returns:
-                        ['1', '0', '0', '0', '0', '0', '0', '0',]
-                    e.g., outlets 1 & 8 are ON, returns:
-                        ['1', '0', '0', '0', '0', '0', '0', '1']
-
-                outlets_str:
-                    A string indicates the status of the outlets.
-                    e.g., outlet 1 is ON:
-                        returns: '1'
-                    e.g., outlet 1 & 3 $ 8 are ON:
-                        returns: '138'
-        """
-        outlets_str = ""
-        cmd = "curl http://{}:{}@{}/status.xml".format(self.config['username'],
-                                                       self.config['password'],
-                                                       self.config['host'])
-        proc = utils.start_standing_subprocess(cmd)
-        time.sleep(1)
-        try:
-            outlets_list = proc.communicate()[0].decode().split(",")[10:18]
-
-            """Translate a list of strings to a sequence of strings.
-            e.g.
-                ['1', '0', '0', '0', '0', '0', '0', '0',] turns into '1'
-                ['1', '1', '1', '1', '1', '1', '1', '1'] turns into '12345678'
-            """
-            for i in range(len(outlets_list)):
-                if outlets_list[i] == '1':
-                    outlets_str = outlets_str + str(i + 1)
-        except:
-            raise KeyError("Fail to get status from PDU.")
-
-        return outlets_list, outlets_str
-
-    def turn_on_outlets(self, outlets):
-        """Turn specific outlets on
-
-        Args:
-            outlets: A string of outlet numbers.
-            e.g., '1' means outlets status will be: '10000000'
-            e.g., '378' means outlets status will be: '00100011'
-        """
-        self.off_all()
-        expect_outlets = ["1" if str(i) in outlets else "0" for i in range(1, 9)]
-        self._set_status("on", "".join(expect_outlets))
-
-        # Check if outlets are on as expected.
-        actual_outlets, _ = self.get_status()
-        self.log.info("Expect outlets : {}".format(expect_outlets))
-        self.log.info("Actual outlets : {}".format(actual_outlets))
-        if expect_outlets == actual_outlets:
-            self.log.info("Outlets are ON as expected")
-        else:
-            self.log.error("Outlets are not correctly turn on")
-
-    def turn_off_outlets(self, outlets):
-        """Turn specific outlets off
-
-        Args:
-            outlets: A string of outlet numbers.
-            e.g., '1' means outlets status will be: '01111111'
-            e.g., '378' means outlets status will be: '11011100'
-
-        """
-        self.on_all()
-        expect_outlets = ["1" if str(i) in outlets else "0" for i in range(1, 9)]
-        self._set_status("off", "".join(expect_outlets))
-
-        # Check if outlets are on as expected.
-        actual_outlets, _ = self.get_status()
-        temp_list = []
-
-        """When Turn off outlets, Panio ps1158 use "1" to turn off a outlet
-        (e.g., curl http://{}@{}/offs.cgi?led=00000001 to turn off outlet 8,
-        but actual outlets status will be '11111110', so need to
-        Turn "1" into "0" and vice versa to match the actual outlets status.
-        """
-        for outlet in expect_outlets:
-            if outlet == '1':
-                outlet = '0'
-                temp_list.append(outlet)
-            elif outlet == '0':
-                outlet = '1'
-                temp_list.append(outlet)
-        expect_outlets = temp_list
-        self.log.info("Expect outlets : {}".format(expect_outlets))
-        self.log.info("Actual outlets : {}".format(actual_outlets))
-        if expect_outlets == actual_outlets:
-            self.log.info("Outlets are OFF as expected")
-        else:
-            self.log.error("Outlets are not correctly turn off")
diff --git a/src/antlion/test_utils/wifi/rpm_controller_utils.py b/src/antlion/test_utils/wifi/rpm_controller_utils.py
deleted file mode 100644
index 8f1fd0c..0000000
--- a/src/antlion/test_utils/wifi/rpm_controller_utils.py
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from antlion.controllers.attenuator_lib._tnhelper import _ascii_string
-
-import logging
-import telnetlib
-
-ID = '.A'
-LOGIN_PWD = 'admn'
-ON = 'On'
-OFF = 'Off'
-PASSWORD = 'Password: '
-PORT = 23
-RPM_PROMPT = 'Switched CDU: '
-SEPARATOR = '\n'
-TIMEOUT = 3
-USERNAME = 'Username: '
-
-
-class RpmControllerError(Exception):
-    """Error related to RPM switch."""
-
-class RpmController(object):
-    """Class representing telnet to RPM switch.
-
-    Each object represents a telnet connection to the RPM switch's IP.
-
-    Attributes:
-        tn: represents a connection to RPM switch.
-        host: IP address of the RPM controller.
-    """
-    def __init__(self, host):
-        """Initializes the RPM controller object.
-
-        Establishes a telnet connection and login to the switch.
-        """
-        self.host = host
-        logging.info('RPM IP: %s' % self.host)
-
-        self.tn = telnetlib.Telnet(self.host)
-        self.tn.open(self.host, PORT, TIMEOUT)
-        self.run(USERNAME, LOGIN_PWD)
-        result = self.run(PASSWORD, LOGIN_PWD)
-        if RPM_PROMPT not in result:
-            raise RpmControllerError('Failed to login to rpm controller %s'
-                                     % self.host)
-
-    def run(self, prompt, cmd_str):
-        """Method to run commands on the RPM.
-
-        This method simply runs a command and returns output in decoded format.
-        The calling methods should take care of parsing the expected result
-        from this output.
-
-        Args:
-            prompt: Expected prompt before running a command.
-            cmd_str: Command to run on RPM.
-
-        Returns:
-            Decoded text returned by the command.
-        """
-        cmd_str = '%s%s' % (cmd_str, SEPARATOR)
-        res = self.tn.read_until(_ascii_string(prompt), TIMEOUT)
-
-        self.tn.write(_ascii_string(cmd_str))
-        idx, val, txt = self.tn.expect(
-            [_ascii_string('\S+%s' % SEPARATOR)], TIMEOUT)
-
-        return txt.decode()
-
-    def set_rpm_port_state(self, rpm_port, state):
-        """Method to turn on/off rpm port.
-
-        Args:
-            rpm_port: port number of the switch to turn on.
-            state: 'on' or 'off'
-
-        Returns:
-            True: if the state is set to the expected value
-        """
-        port = '%s%s' % (ID, rpm_port)
-        logging.info('Turning %s port: %s' % (state, port))
-        self.run(RPM_PROMPT, '%s %s' % (state.lower(), port))
-        result = self.run(RPM_PROMPT, 'status %s' % port)
-        if port not in result:
-            raise RpmControllerError('Port %s doesn\'t exist' % port)
-        return state in result
-
-    def turn_on(self, rpm_port):
-        """Method to turn on a port on the RPM switch.
-
-        Args:
-            rpm_port: port number of the switch to turn on.
-
-        Returns:
-            True if the port is turned on.
-            False if not turned on.
-        """
-        return self.set_rpm_port_state(rpm_port, ON)
-
-    def turn_off(self, rpm_port):
-        """Method to turn off a port on the RPM switch.
-
-        Args:
-            rpm_port: port number of the switch to turn off.
-
-        Returns:
-            True if the port is turned off.
-            False if not turned off.
-        """
-        return self.set_rpm_port_state(rpm_port, OFF)
-
-    def __del__(self):
-        """Close the telnet connection. """
-        self.tn.close()
-
-
-def create_telnet_session(ip):
-    """Returns telnet connection object to RPM's IP."""
-    return RpmController(ip)
-
-def turn_on_ap(pcap, ssid, rpm_port, rpm_ip=None, rpm=None):
-    """Turn on the AP.
-
-    This method turns on the RPM port the AP is connected to,
-    verify the SSID of the AP is found in the scan result through the
-    packet capturer.
-
-    Either IP addr of the RPM switch or the existing telnet connection
-    to the RPM is required. Multiple APs might be connected to the same RPM
-    switch. Instead of connecting/terminating telnet for each AP, the test
-    can maintain a single telnet connection for all the APs.
-
-    Args:
-        pcap: packet capture object.
-        ssid: SSID of the wifi network.
-        rpm_port: Port number on the RPM switch the AP is connected to.
-        rpm_ip: IP address of the RPM switch.
-        rpm: telnet connection object to the RPM switch.
-    """
-    if not rpm and not rpm_ip:
-        logging.error("Failed to turn on AP. Need telnet object or RPM IP")
-        return False
-    elif not rpm:
-        rpm = create_telnet_session(rpm_ip)
-
-    return rpm.turn_on(rpm_port) and pcap.start_scan_and_find_network(ssid)
-
-def turn_off_ap(rpm_port, rpm_ip=None, rpm=None):
-    """ Turn off AP.
-
-    This method turns off the RPM port the AP is connected to.
-
-    Either IP addr of the RPM switch or the existing telnet connection
-    to the RPM is required.
-
-    Args:
-        rpm_port: Port number on the RPM switch the AP is connected to.
-        rpm_ip: IP address of the RPM switch.
-        rpm: telnet connection object to the RPM switch.
-    """
-    if not rpm and not rpm_ip:
-        logging.error("Failed to turn off AP. Need telnet object or RPM IP")
-        return False
-    elif not rpm:
-        rpm = create_telnet_session(rpm_ip)
-
-    return rpm.turn_off(rpm_port)
diff --git a/src/antlion/test_utils/wifi/rtt/RttBaseTest.py b/src/antlion/test_utils/wifi/rtt/RttBaseTest.py
deleted file mode 100644
index bbd3efd..0000000
--- a/src/antlion/test_utils/wifi/rtt/RttBaseTest.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-from antlion import asserts
-from antlion import utils
-from antlion.base_test import BaseTestClass
-from antlion.keys import Config
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.rtt import rtt_const as rconsts
-from antlion.test_utils.wifi.rtt import rtt_test_utils as rutils
-
-
-class RttBaseTest(BaseTestClass):
-
-    def setup_class(self):
-        opt_param = ["pixel_models", "cnss_diag_file", "ranging_role_concurrency_flexible_models"]
-        self.unpack_userparams(opt_param_names=opt_param)
-        if hasattr(self, "cnss_diag_file"):
-            if isinstance(self.cnss_diag_file, list):
-                self.cnss_diag_file = self.cnss_diag_file[0]
-            if not os.path.isfile(self.cnss_diag_file):
-                self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value],
-                    self.cnss_diag_file)
-
-    def setup_test(self):
-        required_params = ("lci_reference", "lcr_reference",
-                           "rtt_reference_distance_mm",
-                           "stress_test_min_iteration_count",
-                           "stress_test_target_run_time_sec")
-        self.unpack_userparams(required_params)
-
-        # can be moved to JSON config file
-        self.rtt_reference_distance_margin_mm = 2000
-        self.rtt_max_failure_rate_two_sided_rtt_percentage = 20
-        self.rtt_max_failure_rate_one_sided_rtt_percentage = 50
-        self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage = 10
-        self.rtt_max_margin_exceeded_rate_one_sided_rtt_percentage = 50
-        self.rtt_min_expected_rssi_dbm = -100
-
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.start_cnss_diags(
-                self.android_devices, self.cnss_diag_file, self.pixel_models)
-        self.tcpdump_proc = []
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                proc = nutils.start_tcpdump(ad, self.test_name)
-                self.tcpdump_proc.append((ad, proc))
-
-        for ad in self.android_devices:
-            utils.set_location_service(ad, True)
-            ad.droid.wifiEnableVerboseLogging(1)
-            asserts.skip_if(
-                not ad.droid.doesDeviceSupportWifiRttFeature(),
-                "Device under test does not support Wi-Fi RTT - skipping test")
-            wutils.wifi_toggle_state(ad, True)
-            rtt_avail = ad.droid.wifiIsRttAvailable()
-            if not rtt_avail:
-                self.log.info('RTT not available. Waiting ...')
-                rutils.wait_for_event(ad, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
-            ad.ed.clear_all_events()
-            rutils.config_privilege_override(ad, False)
-            wutils.set_wifi_country_code(ad, wutils.WifiEnums.CountryCode.US)
-            ad.rtt_capabilities = rutils.get_rtt_capabilities(ad)
-
-    def teardown_test(self):
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(
-                    proc[0], proc[1], self.test_name, pull_dump=False)
-        self.tcpdump_proc = []
-        for ad in self.android_devices:
-            if not ad.droid.doesDeviceSupportWifiRttFeature():
-                return
-
-            # clean-up queue from the System Service UID
-            ad.droid.wifiRttCancelRanging([1000])
-
-    def on_fail(self, test_name, begin_time):
-        for ad in self.android_devices:
-            ad.take_bug_report(test_name, begin_time)
-            ad.cat_adb_log(test_name, begin_time)
-            wutils.get_ssrdumps(ad)
-        if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-            for ad in self.android_devices:
-                wutils.get_cnss_diag_log(ad)
-        for proc in self.tcpdump_proc:
-            nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
-        self.tcpdump_proc = []
diff --git a/src/antlion/test_utils/wifi/rtt/__init__.py b/src/antlion/test_utils/wifi/rtt/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils/wifi/rtt/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils/wifi/rtt/rtt_const.py b/src/antlion/test_utils/wifi/rtt/rtt_const.py
deleted file mode 100644
index 379b9b1..0000000
--- a/src/antlion/test_utils/wifi/rtt/rtt_const.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-######################################################
-# Broadcast events
-######################################################
-BROADCAST_WIFI_RTT_AVAILABLE = "WifiRttAvailable"
-BROADCAST_WIFI_RTT_NOT_AVAILABLE = "WifiRttNotAvailable"
-
-######################################################
-# RangingResultCallback events
-######################################################
-EVENT_CB_RANGING_ON_FAIL = "WifiRttRangingFailure"
-EVENT_CB_RANGING_ON_RESULT = "WifiRttRangingResults"
-
-EVENT_CB_RANGING_KEY_RESULTS = "Results"
-
-EVENT_CB_RANGING_KEY_STATUS = "status"
-EVENT_CB_RANGING_KEY_DISTANCE_MM = "distanceMm"
-EVENT_CB_RANGING_KEY_DISTANCE_STD_DEV_MM = "distanceStdDevMm"
-EVENT_CB_RANGING_KEY_RSSI = "rssi"
-EVENT_CB_RANGING_KEY_NUM_ATTEMPTED_MEASUREMENTS = "numAttemptedMeasurements"
-EVENT_CB_RANGING_KEY_NUM_SUCCESSFUL_MEASUREMENTS = "numSuccessfulMeasurements"
-EVENT_CB_RANGING_KEY_LCI = "lci"
-EVENT_CB_RANGING_KEY_LCR = "lcr"
-EVENT_CB_RANGING_KEY_TIMESTAMP = "timestamp"
-EVENT_CB_RANGING_KEY_MAC = "mac"
-EVENT_CB_RANGING_KEY_PEER_ID = "peerId"
-EVENT_CB_RANGING_KEY_MAC_AS_STRING = "macAsString"
-
-EVENT_CB_RANGING_STATUS_SUCCESS = 0
-EVENT_CB_RANGING_STATUS_FAIL = 1
-EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC = 2
-
-######################################################
-# status codes
-######################################################
-
-RANGING_FAIL_CODE_GENERIC = 1
-RANGING_FAIL_CODE_RTT_NOT_AVAILABLE = 2
-
-######################################################
-# ScanResults keys
-######################################################
-
-SCAN_RESULT_KEY_RTT_RESPONDER = "is80211McRTTResponder"
-
-######################################################
-# Capabilities keys
-######################################################
-
-CAP_RTT_ONE_SIDED_SUPPORTED = "rttOneSidedSupported"
-CAP_FTM_SUPPORTED = "rttFtmSupported"
-CAP_LCI_SUPPORTED = "lciSupported"
-CAP_LCR_SUPPORTED = "lcrSupported"
-CAP_RESPONDER_SUPPORTED = "responderSupported"
-CAP_MC_VERSION = "mcVersion"
diff --git a/src/antlion/test_utils/wifi/rtt/rtt_test_utils.py b/src/antlion/test_utils/wifi/rtt/rtt_test_utils.py
deleted file mode 100644
index a0777d8..0000000
--- a/src/antlion/test_utils/wifi/rtt/rtt_test_utils.py
+++ /dev/null
@@ -1,501 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import queue
-import statistics
-import time
-
-from antlion import asserts
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.rtt import rtt_const as rconsts
-
-# arbitrary timeout for events
-EVENT_TIMEOUT = 15
-
-
-def decorate_event(event_name, id):
-    return '%s_%d' % (event_name, id)
-
-
-def wait_for_event(ad, event_name, timeout=EVENT_TIMEOUT):
-    """Wait for the specified event or timeout.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait on
-    timeout: Number of seconds to wait
-  Returns:
-    The event (if available)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.pop_event(event_name, timeout)
-        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-        return event
-    except queue.Empty:
-        ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
-        asserts.fail(event_name)
-
-
-def fail_on_event(ad, event_name, timeout=EVENT_TIMEOUT):
-    """Wait for a timeout period and looks for the specified event - fails if it
-  is observed.
-
-  Args:
-    ad: The android device
-    event_name: The event to wait for (and fail on its appearance)
-  """
-    prefix = ''
-    if hasattr(ad, 'pretty_name'):
-        prefix = '[%s] ' % ad.pretty_name
-    try:
-        event = ad.ed.pop_event(event_name, timeout)
-        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
-                    event['data'])
-        asserts.fail(event_name, extras=event)
-    except queue.Empty:
-        ad.log.info('%s%s not seen (as expected)', prefix, event_name)
-        return
-
-
-def get_rtt_capabilities(ad):
-    """Get the Wi-Fi RTT capabilities from the specified device. The
-  capabilities are a dictionary keyed by rtt_const.CAP_* keys.
-
-  Args:
-    ad: the Android device
-  Returns: the capability dictionary.
-  """
-    return json.loads(ad.adb.shell('cmd wifirtt get_capabilities'))
-
-
-def config_privilege_override(dut, override_to_no_privilege):
-    """Configure the device to override the permission check and to disallow any
-  privileged RTT operations, e.g. disallow one-sided RTT to Responders (APs)
-  which do not support IEEE 802.11mc.
-
-  Args:
-    dut: Device to configure.
-    override_to_no_privilege: True to indicate no privileged ops, False for
-                              default (which will allow privileged ops).
-  """
-    dut.adb.shell("cmd wifirtt set override_assume_no_privilege %d" %
-                  (1 if override_to_no_privilege else 0))
-
-
-def get_rtt_constrained_results(scanned_networks, support_rtt):
-    """Filter the input list and only return those networks which either support
-  or do not support RTT (IEEE 802.11mc.)
-
-  Args:
-    scanned_networks: A list of networks from scan results.
-      support_rtt: True - only return those APs which support RTT, False - only
-                   return those APs which do not support RTT.
-
-  Returns: a sub-set of the scanned_networks per support_rtt constraint.
-  """
-    matching_networks = []
-    for network in scanned_networks:
-        if support_rtt:
-            if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in network
-                    and network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-                matching_networks.append(network)
-        else:
-            if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER not in network
-                    or not network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-                matching_networks.append(network)
-
-    return matching_networks
-
-
-def scan_networks(dut, max_tries=3):
-    """Perform a scan and return scan results.
-
-  Args:
-    dut: Device under test.
-    max_retries: Retry scan to ensure network is found
-
-  Returns: an array of scan results.
-  """
-    scan_results = []
-    for num_tries in range(max_tries):
-        wutils.start_wifi_connection_scan(dut)
-        scan_results = dut.droid.wifiGetScanResults()
-        if scan_results:
-            break
-    return scan_results
-
-
-def scan_with_rtt_support_constraint(dut, support_rtt, repeat=0):
-    """Perform a scan and return scan results of APs: only those that support or
-  do not support RTT (IEEE 802.11mc) - per the support_rtt parameter.
-
-  Args:
-    dut: Device under test.
-    support_rtt: True - only return those APs which support RTT, False - only
-                 return those APs which do not support RTT.
-    repeat: Re-scan this many times to find an RTT supporting network.
-
-  Returns: an array of scan results.
-  """
-    for i in range(repeat + 1):
-        scan_results = scan_networks(dut)
-        aps = get_rtt_constrained_results(scan_results, support_rtt)
-        if len(aps) != 0:
-            return aps
-
-    return []
-
-
-def select_best_scan_results(scans, select_count, lowest_rssi=-80):
-    """Select the strongest 'select_count' scans in the input list based on
-  highest RSSI. Exclude all very weak signals, even if results in a shorter
-  list.
-
-  Args:
-    scans: List of scan results.
-    select_count: An integer specifying how many scans to return at most.
-    lowest_rssi: The lowest RSSI to accept into the output.
-  Returns: a list of the strongest 'select_count' scan results from the scans
-           list.
-  """
-
-    def takeRssi(element):
-        return element['level']
-
-    result = []
-    scans.sort(key=takeRssi, reverse=True)
-    for scan in scans:
-        if len(result) == select_count:
-            break
-        if scan['level'] < lowest_rssi:
-            break  # rest are lower since we're sorted
-        result.append(scan)
-
-    return result
-
-
-def validate_ap_result(scan_result, range_result):
-    """Validate the range results:
-  - Successful if AP (per scan result) support 802.11mc (allowed to fail
-    otherwise)
-  - MAC of result matches the BSSID
-
-  Args:
-    scan_result: Scan result for the AP
-    range_result: Range result returned by the RTT API
-  """
-    asserts.assert_equal(
-        scan_result[wutils.WifiEnums.BSSID_KEY],
-        range_result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID],
-        'MAC/BSSID mismatch')
-    if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in scan_result
-            and scan_result[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-        asserts.assert_true(
-            range_result[rconsts.EVENT_CB_RANGING_KEY_STATUS] ==
-            rconsts.EVENT_CB_RANGING_STATUS_SUCCESS,
-            'Ranging failed for an AP which supports 802.11mc!')
-
-
-def validate_ap_results(scan_results, range_results):
-    """Validate an array of ranging results against the scan results used to
-  trigger the range. The assumption is that the results are returned in the
-  same order as the request (which were the scan results).
-
-  Args:
-    scan_results: Scans results used to trigger the range request
-    range_results: Range results returned by the RTT API
-  """
-    asserts.assert_equal(
-        len(scan_results), len(range_results),
-        'Mismatch in length of scan results and range results')
-
-    # sort first based on BSSID/MAC
-    scan_results.sort(key=lambda x: x[wutils.WifiEnums.BSSID_KEY])
-    range_results.sort(
-        key=lambda x: x[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID])
-
-    for i in range(len(scan_results)):
-        validate_ap_result(scan_results[i], range_results[i])
-
-
-def validate_aware_mac_result(range_result, mac, description):
-    """Validate the range result for an Aware peer specified with a MAC address:
-  - Correct MAC address.
-
-  The MAC addresses may contain ":" (which are ignored for the comparison) and
-  may be in any case (which is ignored for the comparison).
-
-  Args:
-    range_result: Range result returned by the RTT API
-    mac: MAC address of the peer
-    description: Additional content to print on failure
-  """
-    mac1 = mac.replace(':', '').lower()
-    mac2 = range_result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING].replace(
-        ':', '').lower()
-    asserts.assert_equal(mac1, mac2, '%s: MAC mismatch' % description)
-
-
-def validate_aware_peer_id_result(range_result, peer_id, description):
-    """Validate the range result for An Aware peer specified with a Peer ID:
-  - Correct Peer ID
-  - MAC address information not available
-
-  Args:
-    range_result: Range result returned by the RTT API
-    peer_id: Peer ID of the peer
-    description: Additional content to print on failure
-  """
-    asserts.assert_equal(peer_id,
-                         range_result[rconsts.EVENT_CB_RANGING_KEY_PEER_ID],
-                         '%s: Peer Id mismatch' % description)
-    asserts.assert_false(rconsts.EVENT_CB_RANGING_KEY_MAC in range_result,
-                         '%s: MAC Address not empty!' % description)
-
-
-def extract_stats(results,
-                  range_reference_mm,
-                  range_margin_mm,
-                  min_rssi,
-                  reference_lci=[],
-                  reference_lcr=[],
-                  summary_only=False):
-    """Extract statistics from a list of RTT results. Returns a dictionary
-   with results:
-     - num_results (success or fails)
-     - num_success_results
-     - num_no_results (e.g. timeout)
-     - num_failures
-     - num_range_out_of_margin (only for successes)
-     - num_invalid_rssi (only for successes)
-     - distances: extracted list of distances
-     - distance_std_devs: extracted list of distance standard-deviations
-     - rssis: extracted list of RSSI
-     - distance_mean
-     - distance_std_dev (based on distance - ignoring the individual std-devs)
-     - rssi_mean
-     - rssi_std_dev
-     - status_codes
-     - lcis: extracted list of all of the individual LCI
-     - lcrs: extracted list of all of the individual LCR
-     - any_lci_mismatch: True/False - checks if all LCI results are identical to
-                         the reference LCI.
-     - any_lcr_mismatch: True/False - checks if all LCR results are identical to
-                         the reference LCR.
-     - num_attempted_measurements: extracted list of all of the individual
-                                   number of attempted measurements.
-     - num_successful_measurements: extracted list of all of the individual
-                                    number of successful measurements.
-     - invalid_num_attempted: True/False - checks if number of attempted
-                              measurements is non-zero for successful results.
-     - invalid_num_successful: True/False - checks if number of successful
-                               measurements is non-zero for successful results.
-
-  Args:
-    results: List of RTT results.
-    range_reference_mm: Reference value for the distance (in mm)
-    range_margin_mm: Acceptable absolute margin for distance (in mm)
-    min_rssi: Acceptable minimum RSSI value.
-    reference_lci, reference_lcr: Reference values for LCI and LCR.
-    summary_only: Only include summary keys (reduce size).
-
-  Returns: A dictionary of stats.
-  """
-    stats = {}
-    stats['num_results'] = 0
-    stats['num_success_results'] = 0
-    stats['num_no_results'] = 0
-    stats['num_failures'] = 0
-    stats['num_range_out_of_margin'] = 0
-    stats['num_invalid_rssi'] = 0
-    stats['any_lci_mismatch'] = False
-    stats['any_lcr_mismatch'] = False
-    stats['invalid_num_attempted'] = False
-    stats['invalid_num_successful'] = False
-
-    range_max_mm = range_reference_mm + range_margin_mm
-    range_min_mm = range_reference_mm - range_margin_mm
-
-    distances = []
-    distance_std_devs = []
-    rssis = []
-    num_attempted_measurements = []
-    num_successful_measurements = []
-    status_codes = []
-    lcis = []
-    lcrs = []
-
-    for i in range(len(results)):
-        result = results[i]
-
-        if result is None:  # None -> timeout waiting for RTT result
-            stats['num_no_results'] = stats['num_no_results'] + 1
-            continue
-        stats['num_results'] = stats['num_results'] + 1
-
-        status_codes.append(result[rconsts.EVENT_CB_RANGING_KEY_STATUS])
-        if status_codes[-1] != rconsts.EVENT_CB_RANGING_STATUS_SUCCESS:
-            stats['num_failures'] = stats['num_failures'] + 1
-            continue
-        stats['num_success_results'] = stats['num_success_results'] + 1
-
-        distance_mm = result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_MM]
-        distances.append(distance_mm)
-        if not range_min_mm <= distance_mm <= range_max_mm:
-            stats[
-                'num_range_out_of_margin'] = stats['num_range_out_of_margin'] + 1
-        distance_std_devs.append(
-            result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_STD_DEV_MM])
-
-        rssi = result[rconsts.EVENT_CB_RANGING_KEY_RSSI]
-        rssis.append(rssi)
-        if not min_rssi <= rssi <= 0:
-            stats['num_invalid_rssi'] = stats['num_invalid_rssi'] + 1
-
-        num_attempted = result[
-            rconsts.EVENT_CB_RANGING_KEY_NUM_ATTEMPTED_MEASUREMENTS]
-        num_attempted_measurements.append(num_attempted)
-        if num_attempted == 0:
-            stats['invalid_num_attempted'] = True
-
-        num_successful = result[
-            rconsts.EVENT_CB_RANGING_KEY_NUM_SUCCESSFUL_MEASUREMENTS]
-        num_successful_measurements.append(num_successful)
-        if num_successful == 0:
-            stats['invalid_num_successful'] = True
-
-        lcis.append(result[rconsts.EVENT_CB_RANGING_KEY_LCI])
-        if (result[rconsts.EVENT_CB_RANGING_KEY_LCI] != reference_lci):
-            stats['any_lci_mismatch'] = True
-        lcrs.append(result[rconsts.EVENT_CB_RANGING_KEY_LCR])
-        if (result[rconsts.EVENT_CB_RANGING_KEY_LCR] != reference_lcr):
-            stats['any_lcr_mismatch'] = True
-
-    if len(distances) > 0:
-        stats['distance_mean'] = statistics.mean(distances)
-    if len(distances) > 1:
-        stats['distance_std_dev'] = statistics.stdev(distances)
-    if len(rssis) > 0:
-        stats['rssi_mean'] = statistics.mean(rssis)
-    if len(rssis) > 1:
-        stats['rssi_std_dev'] = statistics.stdev(rssis)
-    if not summary_only:
-        stats['distances'] = distances
-        stats['distance_std_devs'] = distance_std_devs
-        stats['rssis'] = rssis
-        stats['num_attempted_measurements'] = num_attempted_measurements
-        stats['num_successful_measurements'] = num_successful_measurements
-        stats['status_codes'] = status_codes
-        stats['lcis'] = lcis
-        stats['lcrs'] = lcrs
-
-    return stats
-
-
-def run_ranging(dut,
-                aps,
-                iter_count,
-                time_between_iterations,
-                target_run_time_sec=0):
-    """Executing ranging to the set of APs.
-
-  Will execute a minimum of 'iter_count' iterations. Will continue to run
-  until execution time (just) exceeds 'target_run_time_sec'.
-
-  Args:
-    dut: Device under test
-    aps: A list of APs (Access Points) to range to.
-    iter_count: (Minimum) Number of measurements to perform.
-    time_between_iterations: Number of seconds to wait between iterations.
-    target_run_time_sec: The target run time in seconds.
-
-  Returns: a list of the events containing the RTT results (or None for a
-  failed measurement).
-  """
-    max_peers = dut.droid.wifiRttMaxPeersInRequest()
-
-    asserts.assert_true(len(aps) > 0, "Need at least one AP!")
-    if len(aps) > max_peers:
-        aps = aps[0:max_peers]
-
-    events = {}  # need to keep track per BSSID!
-    for ap in aps:
-        events[ap["BSSID"]] = []
-
-    start_clock = time.time()
-    iterations_done = 0
-    run_time = 0
-    while iterations_done < iter_count or (target_run_time_sec != 0
-                                           and run_time < target_run_time_sec):
-        if iterations_done != 0 and time_between_iterations != 0:
-            time.sleep(time_between_iterations)
-
-        id = dut.droid.wifiRttStartRangingToAccessPoints(aps)
-        try:
-            event = dut.ed.pop_event(
-                decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id),
-                EVENT_TIMEOUT)
-            range_results = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS]
-            asserts.assert_equal(
-                len(aps), len(range_results),
-                'Mismatch in length of scan results and range results')
-            for result in range_results:
-                bssid = result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING]
-                asserts.assert_true(
-                    bssid in events,
-                    "Result BSSID %s not in requested AP!?" % bssid)
-                asserts.assert_equal(
-                    len(events[bssid]), iterations_done,
-                    "Duplicate results for BSSID %s!?" % bssid)
-                events[bssid].append(result)
-        except queue.Empty:
-            for ap in aps:
-                events[ap["BSSID"]].append(None)
-
-        iterations_done = iterations_done + 1
-        run_time = time.time() - start_clock
-
-    return events
-
-
-def analyze_results(all_aps_events,
-                    rtt_reference_distance_mm,
-                    distance_margin_mm,
-                    min_expected_rssi,
-                    lci_reference,
-                    lcr_reference,
-                    summary_only=False):
-    """Verifies the results of the RTT experiment.
-
-  Args:
-    all_aps_events: Dictionary of APs, each a list of RTT result events.
-    rtt_reference_distance_mm: Expected distance to the AP (source of truth).
-    distance_margin_mm: Accepted error marging in distance measurement.
-    min_expected_rssi: Minimum acceptable RSSI value
-    lci_reference, lcr_reference: Expected LCI/LCR values (arrays of bytes).
-    summary_only: Only include summary keys (reduce size).
-  """
-    all_stats = {}
-    for bssid, events in all_aps_events.items():
-        stats = extract_stats(events, rtt_reference_distance_mm,
-                              distance_margin_mm, min_expected_rssi,
-                              lci_reference, lcr_reference, summary_only)
-        all_stats[bssid] = stats
-    return all_stats
diff --git a/src/antlion/test_utils/wifi/wifi_constants.py b/src/antlion/test_utils/wifi/wifi_constants.py
deleted file mode 100644
index f07c987..0000000
--- a/src/antlion/test_utils/wifi/wifi_constants.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Constants for Wifi related events.
-WIFI_CONNECTED = "WifiNetworkConnected"
-WIFI_DISCONNECTED = "WifiNetworkDisconnected"
-SUPPLICANT_CON_CHANGED = "SupplicantConnectionChanged"
-WIFI_STATE_CHANGED = "WifiStateChanged"
-WIFI_FORGET_NW_SUCCESS = "WifiManagerForgetNetworkOnSuccess"
-WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH = "WifiManagerNetworkRequestMatchCallbackOnMatch"
-WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_SUCCESS = "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectSuccess"
-WIFI_NETWORK_REQUEST_MATCH_CB_ON_CONNECT_FAILURE = "WifiManagerNetworkRequestMatchCallbackOnUserSelectionConnectFailure"
-WIFI_NETWORK_CB_ON_AVAILABLE = "WifiManagerNetworkCallbackOnAvailable"
-WIFI_NETWORK_CB_ON_UNAVAILABLE = "WifiManagerNetworkCallbackOnUnavailable"
-WIFI_NETWORK_CB_ON_LOST = "WifiManagerNetworkCallbackOnLost"
-WIFI_NETWORK_SUGGESTION_POST_CONNECTION = "WifiNetworkSuggestionPostConnection"
-WIFI_SUBSYSTEM_RESTARTING = "WifiSubsystemRestarting"
-WIFI_SUBSYSTEM_RESTARTED = "WifiSubsystemRestarted"
-
-# These constants will be used by the ACTS wifi tests.
-CONNECT_BY_CONFIG_SUCCESS = 'WifiManagerConnectByConfigOnSuccess'
-CONNECT_BY_NETID_SUCCESS = 'WifiManagerConnectByNetIdOnSuccess'
-
-# Softap related constants
-SOFTAP_CALLBACK_EVENT = "WifiManagerSoftApCallback-"
-# Callback Event for softap state change
-# WifiManagerSoftApCallback-[callbackId]-OnStateChanged
-SOFTAP_STATE_CHANGED = "-OnStateChanged"
-SOFTAP_STATE_CHANGE_CALLBACK_KEY = "State"
-WIFI_AP_DISABLING_STATE = 10
-WIFI_AP_DISABLED_STATE = 11
-WIFI_AP_ENABLING_STATE = 12
-WIFI_AP_ENABLED_STATE = 13
-WIFI_AP_FAILED_STATE = 14
-
-SOFTAP_RANDOMIZATION_NONE = 0
-SOFTAP_RANDOMIZATION_PERSISTENT = 1
-
-# Callback Event for client number change:
-# WifiManagerSoftApCallback-[callbackId]-OnNumClientsChanged
-SOFTAP_NUMBER_CLIENTS_CHANGED_WITH_INFO = "-OnConnectedClientsChangedWithInfo"
-SOFTAP_NUMBER_CLIENTS_CHANGED = "-OnNumClientsChanged"
-SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY = "NumClients"
-SOFTAP_CLIENTS_MACS_CALLBACK_KEY = "MacAddresses"
-# Callback Event for softap info change
-SOFTAP_INFO_CHANGED = "-OnInfoChanged"
-SOFTAP_INFOLIST_CHANGED = "-OnInfoListChanged"
-SOFTAP_INFO_FREQUENCY_CALLBACK_KEY = "frequency"
-SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY = "bandwidth"
-SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY = "wifiStandard"
-SOFTAP_INFO_AUTO_SHUTDOWN_CALLBACK_KEY = "autoShutdownTimeoutMillis"
-SOFTAP_INFO_BSSID_CALLBACK_KEY = "bssid"
-# Callback Event for softap client blocking
-SOFTAP_BLOCKING_CLIENT_CONNECTING = "-OnBlockedClientConnecting"
-SOFTAP_BLOCKING_CLIENT_REASON_KEY = "BlockedReason"
-SOFTAP_BLOCKING_CLIENT_WIFICLIENT_KEY = "WifiClient"
-SAP_CLIENT_BLOCK_REASON_CODE_BLOCKED_BY_USER = 0
-SAP_CLIENT_BLOCK_REASON_CODE_NO_MORE_STAS = 1
-
-# Callback Event for softap capability
-SOFTAP_CAPABILITY_CHANGED = "-OnCapabilityChanged"
-SOFTAP_CAPABILITY_MAX_SUPPORTED_CLIENTS = "maxSupportedClients"
-SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST = "supported2GHzChannellist"
-SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST = "supported5GHzChannellist"
-SOFTAP_CAPABILITY_6GHZ_SUPPORTED_CHANNEL_LIST = "supported6GHzChannellist"
-SOFTAP_CAPABILITY_60GHZ_SUPPORTED_CHANNEL_LIST = "supported60GHzChannellist"
-SOFTAP_CAPABILITY_FEATURE_ACS = "acsOffloadSupported"
-SOFTAP_CAPABILITY_FEATURE_CLIENT_CONTROL = "clientForceDisconnectSupported"
-SOFTAP_CAPABILITY_FEATURE_WPA3_SAE = "wpa3SaeSupported"
-SOFTAP_CAPABILITY_FEATURE_IEEE80211AX = "ieee80211axSupported"
-SOFTAP_CAPABILITY_FEATURE_24GHZ = "24gSupported"
-SOFTAP_CAPABILITY_FEATURE_5GHZ = "5gSupported"
-SOFTAP_CAPABILITY_FEATURE_6GHZ = "6gSupported"
-SOFTAP_CAPABILITY_FEATURE_60GHZ = "60gSupported"
-
-DEFAULT_SOFTAP_TIMEOUT_S = 600 # 10 minutes
-
-# AP related constants
-AP_MAIN = "main_AP"
-AP_AUX = "aux_AP"
-SSID = "SSID"
-
-# cnss_diag property related constants
-DEVICES_USING_LEGACY_PROP = ["sailfish", "marlin", "walleye", "taimen", "muskie"]
-CNSS_DIAG_PROP = "persist.vendor.sys.cnss.diag_txt"
-LEGACY_CNSS_DIAG_PROP = "persist.sys.cnss.diag_txt"
-
-# Delay before registering the match callback.
-NETWORK_REQUEST_CB_REGISTER_DELAY_SEC = 2
-
-# Constants for JSONObject representation of CoexUnsafeChannel
-COEX_BAND = "band"
-COEX_BAND_24_GHZ = "24_GHZ"
-COEX_BAND_5_GHZ = "5_GHZ"
-COEX_CHANNEL = "channel"
-COEX_POWER_CAP_DBM = "powerCapDbm"
-
-# Constants for bundle keys for CoexCallback#onCoexUnsafeChannelsChanged
-KEY_COEX_UNSAFE_CHANNELS = "KEY_COEX_UNSAFE_CHANNELS"
-KEY_COEX_RESTRICTIONS = "KEY_COEX_RESTRICTIONS"
-
-# WiFi standards
-WIFI_STANDARD_11AX = 6
diff --git a/src/antlion/test_utils/wifi/wifi_datastore_utils.py b/src/antlion/test_utils/wifi/wifi_datastore_utils.py
deleted file mode 100755
index 0cd0820..0000000
--- a/src/antlion/test_utils/wifi/wifi_datastore_utils.py
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-import requests
-
-from antlion import asserts
-from antlion import signals
-from antlion import utils
-from antlion.test_utils.wifi import wifi_constants
-"""This file consists of all the helper methods needed to interact with the
-   Datastore @ https://chaos-188802.appspot.com/ used for Android Interop
-   testing.
-"""
-
-DATASTORE_HOST = "https://chaos-188802.appspot.com"
-
-# The Datastore defines the following paths for operating methods.
-ADD_DEVICE = "devices/new"
-REMOVE_DEVICE = "devices/delete"
-LOCK_DEVICE = "devices/lock"
-UNLOCK_DEVICE = "devices/unlock"
-SHOW_DEVICE = "devices/"
-GET_DEVICES = "devices/"
-
-# HTTP content type. JSON encoded with UTF-8 character encoding.
-HTTP_HEADER = {'content-type': 'application/json'}
-
-
-def add_device(name, ap_label, lab_label):
-    """Add a device(AP or Packet Capturer) in datastore.
-
-       Args:
-           name: string, hostname of the device.
-           ap_label: string, AP brand name.
-           lab_label: string, lab label for AP.
-       Returns:
-          True if device was added successfully; 0 otherwise.
-    """
-    request = DATASTORE_HOST + '/' + ADD_DEVICE
-    logging.debug("Request = %s" % request)
-    response = requests.post(request,
-                             headers=HTTP_HEADER,
-                             data=json.dumps({
-                                 "hostname": name,
-                                 "ap_label": ap_label,
-                                 "lab_label": lab_label
-                             }))
-    if response.json()['result'] == 'success':
-        logging.info("Added device %s to datastore" % name)
-        return True
-    return False
-
-
-def remove_device(name):
-    """Delete a device(AP or Packet Capturer) in datastore.
-
-       Args:
-           name: string, hostname of the device to delete.
-       Returns:
-           True if device was deleted successfully; 0 otherwise.
-    """
-    request = DATASTORE_HOST + '/' + REMOVE_DEVICE
-    logging.debug("Request = %s" % request)
-    response = requests.put(request,
-                            headers=HTTP_HEADER,
-                            data=json.dumps({"hostname": name}))
-    result_str = "%s deleted." % name
-    if result_str in response.text:
-        logging.info("Removed device %s from datastore" % name)
-        return True
-    return False
-
-
-def lock_device(name, admin):
-    """Lock a device(AP or Packet Capturer) in datastore.
-
-       Args:
-           name: string, hostname of the device in datastore.
-           admin: string, unique admin name for locking.
-      Returns:
-          True if operation was successful; 0 otherwise.
-    """
-    request = DATASTORE_HOST + '/' + LOCK_DEVICE
-    logging.debug("Request = %s" % request)
-    response = requests.put(request,
-                            headers=HTTP_HEADER,
-                            data=json.dumps({
-                                "hostname": name,
-                                "locked_by": admin
-                            }))
-    if response.json()['result']:
-        logging.info("Locked device %s in datastore" % name)
-        return True
-    return False
-
-
-def unlock_device(name):
-    """Un-lock a device(AP or Packet Capturer) in datastore.
-
-       Args:
-           name: string, hostname of the device in datastore.
-      Returns:
-          True if operation was successful; 0 otherwise.
-    """
-    request = DATASTORE_HOST + '/' + UNLOCK_DEVICE
-    logging.debug("Request = %s" % request)
-    response = requests.put(request,
-                            headers=HTTP_HEADER,
-                            data=json.dumps({"hostname": name}))
-    if response.json()['result']:
-        logging.info("Finished un-locking AP %s in datastore" % name)
-        return True
-    return False
-
-
-def show_device(name):
-    """Show device properties for a given device(AP or Packet Capturer).
-
-       Args:
-           name: string, hostname of the device in datastore to fetch info.
-           Returns: dict of device name:value properties if successful;
-                    None otherwise.
-    """
-    request = DATASTORE_HOST + '/' + SHOW_DEVICE + name
-    logging.debug("Request = %s" % request)
-    response = requests.get(request)
-    if 'error' in response.text:
-        return None
-    return response.json()
-
-
-def get_devices():
-    """Get a list of all devices in the datastore.
-
-    Returns: dict of all devices' name:value properties if successful;
-             None otherwise.
-    """
-    request = DATASTORE_HOST + '/' + GET_DEVICES
-    logging.debug("Request = %s" % request)
-    response = requests.get(request)
-    if 'error' in response.text:
-        return None
-    return response.json()
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py
deleted file mode 100644
index 26466a8..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py
+++ /dev/null
@@ -1,735 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import importlib
-import ipaddress
-import logging
-import numpy
-import re
-import time
-from antlion import asserts
-from antlion import utils
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.utils_lib import ssh
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.wifi_performance_test_utils import ping_utils
-from antlion.test_utils.wifi.wifi_performance_test_utils import qcom_utils
-from antlion.test_utils.wifi.wifi_performance_test_utils import brcm_utils
-
-from concurrent.futures import ThreadPoolExecutor
-
-SHORT_SLEEP = 1
-MED_SLEEP = 6
-CHANNELS_6GHz = ['6g{}'.format(4 * x + 1) for x in range(59)]
-BAND_TO_CHANNEL_MAP = {
-    '2.4GHz': list(range(1, 14)),
-    'UNII-1': [36, 40, 44, 48],
-    'UNII-2':
-    [52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 140],
-    'UNII-3': [149, 153, 157, 161, 165],
-    '6GHz': CHANNELS_6GHz
-}
-CHANNEL_TO_BAND_MAP = {
-    channel: band
-    for band, channels in BAND_TO_CHANNEL_MAP.items() for channel in channels
-}
-
-
-# Decorators
-def nonblocking(f):
-    """Creates a decorator transforming function calls to non-blocking"""
-
-    def wrap(*args, **kwargs):
-        executor = ThreadPoolExecutor(max_workers=1)
-        thread_future = executor.submit(f, *args, **kwargs)
-        # Ensure resources are freed up when executor ruturns or raises
-        executor.shutdown(wait=False)
-        return thread_future
-
-    return wrap
-
-
-def detect_wifi_platform(dut):
-    if hasattr(dut, 'wifi_platform'):
-        return dut.wifi_platform
-    qcom_check = len(dut.get_file_names('/vendor/firmware/wlan/qca_cld/'))
-    if qcom_check:
-        dut.wifi_platform = 'qcom'
-    else:
-        dut.wifi_platform = 'brcm'
-    return dut.wifi_platform
-
-
-def detect_wifi_decorator(f):
-    def wrap(*args, **kwargs):
-        if 'dut' in kwargs:
-            dut = kwargs['dut']
-        else:
-            dut = next(arg for arg in args if type(arg) == AndroidDevice)
-        dut_package = 'acts_contrib.test_utils.wifi.wifi_performance_test_utils.{}_utils'.format(
-            detect_wifi_platform(dut))
-        dut_package = importlib.import_module(dut_package)
-        f_decorated = getattr(dut_package, f.__name__, lambda: None)
-        return (f_decorated(*args, **kwargs))
-
-    return wrap
-
-
-# JSON serializer
-def serialize_dict(input_dict):
-    """Function to serialize dicts to enable JSON output"""
-    output_dict = collections.OrderedDict()
-    for key, value in input_dict.items():
-        output_dict[_serialize_value(key)] = _serialize_value(value)
-    return output_dict
-
-
-def _serialize_value(value):
-    """Function to recursively serialize dict entries to enable JSON output"""
-    if isinstance(value, tuple):
-        return str(value)
-    if isinstance(value, numpy.int64):
-        return int(value)
-    if isinstance(value, numpy.float64):
-        return float(value)
-    if isinstance(value, list):
-        return [_serialize_value(x) for x in value]
-    if isinstance(value, numpy.ndarray):
-        return [_serialize_value(x) for x in value]
-    elif isinstance(value, dict):
-        return serialize_dict(value)
-    elif type(value) in (float, int, bool, str):
-        return value
-    else:
-        return "Non-serializable object"
-
-
-def extract_sub_dict(full_dict, fields):
-    sub_dict = collections.OrderedDict(
-        (field, full_dict[field]) for field in fields)
-    return sub_dict
-
-
-# Miscellaneous Wifi Utilities
-def check_skip_conditions(testcase_params, dut, access_point,
-                          ota_chamber=None):
-    """Checks if test should be skipped."""
-    # Check battery level before test
-    if not health_check(dut, 10):
-        asserts.skip('DUT battery level too low.')
-    if not access_point.band_lookup_by_channel(testcase_params['channel']):
-        asserts.skip('AP does not support requested channel.')
-    if ota_chamber and CHANNEL_TO_BAND_MAP[
-            testcase_params['channel']] not in ota_chamber.SUPPORTED_BANDS:
-        asserts.skip('OTA chamber does not support requested channel.')
-    # Check if 6GHz is supported by checking capabilities in the US.
-    if not dut.droid.wifiCheckState():
-        wutils.wifi_toggle_state(dut, True)
-    iw_list = dut.adb.shell('iw list')
-    supports_6ghz = '6135 MHz' in iw_list
-    supports_160mhz = 'Supported Channel Width: 160 MHz' in iw_list
-    if testcase_params.get('bandwidth', 20) == 160 and not supports_160mhz:
-        asserts.skip('DUT does not support 160 MHz networks.')
-    if testcase_params.get('channel',
-                           6) in CHANNELS_6GHz and not supports_6ghz:
-        asserts.skip('DUT does not support 6 GHz band.')
-
-
-def validate_network(dut, ssid):
-    """Check that DUT has a valid internet connection through expected SSID
-
-    Args:
-        dut: android device of interest
-        ssid: expected ssid
-    """
-    try:
-        connected = wutils.validate_connection(dut, wait_time=3) is not None
-        current_network = dut.droid.wifiGetConnectionInfo()
-    except:
-        connected = False
-        current_network = None
-    if connected and current_network['SSID'] == ssid:
-        return True
-    else:
-        return False
-
-
-def get_server_address(ssh_connection, dut_ip, subnet_mask):
-    """Get server address on a specific subnet,
-
-    This function retrieves the LAN or WAN IP of a remote machine used in
-    testing. If subnet_mask is set to 'public' it returns a machines global ip,
-    else it returns the ip belonging to the dut local network given the dut's
-    ip and subnet mask.
-
-    Args:
-        ssh_connection: object representing server for which we want an ip
-        dut_ip: string in ip address format, i.e., xxx.xxx.xxx.xxx
-        subnet_mask: string representing subnet mask (public for global ip)
-    """
-    ifconfig_out = ssh_connection.run('ifconfig').stdout
-    ip_list = re.findall('inet (?:addr:)?(\d+.\d+.\d+.\d+)', ifconfig_out)
-    ip_list = [ipaddress.ip_address(ip) for ip in ip_list]
-
-    if subnet_mask == 'public':
-        for ip in ip_list:
-            # is_global is not used to allow for CGNAT ips in 100.x.y.z range
-            if not ip.is_private:
-                return str(ip)
-    else:
-        dut_network = ipaddress.ip_network('{}/{}'.format(dut_ip, subnet_mask),
-                                           strict=False)
-        for ip in ip_list:
-            if ip in dut_network:
-                return str(ip)
-    logging.error('No IP address found in requested subnet')
-
-
-# Ping utilities
-def get_ping_stats(src_device, dest_address, ping_duration, ping_interval,
-                   ping_size):
-    """Run ping to or from the DUT.
-
-    The function computes either pings the DUT or pings a remote ip from
-    DUT.
-
-    Args:
-        src_device: object representing device to ping from
-        dest_address: ip address to ping
-        ping_duration: timeout to set on the ping process (in seconds)
-        ping_interval: time between pings (in seconds)
-        ping_size: size of ping packet payload
-    Returns:
-        ping_result: dict containing ping results and other meta data
-    """
-    ping_count = int(ping_duration / ping_interval)
-    ping_deadline = int(ping_count * ping_interval) + 1
-    ping_cmd_linux = 'ping -c {} -w {} -i {} -s {} -D'.format(
-        ping_count,
-        ping_deadline,
-        ping_interval,
-        ping_size,
-    )
-
-    ping_cmd_macos = 'ping -c {} -t {} -i {} -s {}'.format(
-        ping_count,
-        ping_deadline,
-        ping_interval,
-        ping_size,
-    )
-
-    if isinstance(src_device, AndroidDevice):
-        ping_cmd = '{} {}'.format(ping_cmd_linux, dest_address)
-        ping_output = src_device.adb.shell(ping_cmd,
-                                           timeout=ping_deadline + SHORT_SLEEP,
-                                           ignore_status=True)
-    elif isinstance(src_device, ssh.connection.SshConnection):
-        platform = src_device.run('uname').stdout
-        if 'linux' in platform.lower():
-            ping_cmd = 'sudo {} {}'.format(ping_cmd_linux, dest_address)
-        elif 'darwin' in platform.lower():
-            ping_cmd = "sudo {} {}| while IFS= read -r line; do printf '[%s] %s\n' \"$(gdate '+%s.%N')\" \"$line\"; done".format(
-                ping_cmd_macos, dest_address)
-        ping_output = src_device.run(ping_cmd,
-                                     timeout=ping_deadline + SHORT_SLEEP,
-                                     ignore_status=True).stdout
-    else:
-        raise TypeError('Unable to ping using src_device of type %s.' %
-                        type(src_device))
-    return ping_utils.PingResult(ping_output.splitlines())
-
-
-@nonblocking
-def get_ping_stats_nb(src_device, dest_address, ping_duration, ping_interval,
-                      ping_size):
-    return get_ping_stats(src_device, dest_address, ping_duration,
-                          ping_interval, ping_size)
-
-
-# Iperf utilities
-@nonblocking
-def start_iperf_client_nb(iperf_client, iperf_server_address, iperf_args, tag,
-                          timeout):
-    return iperf_client.start(iperf_server_address, iperf_args, tag, timeout)
-
-
-def get_iperf_arg_string(duration,
-                         reverse_direction,
-                         interval=1,
-                         traffic_type='TCP',
-                         socket_size=None,
-                         num_processes=1,
-                         udp_throughput='1000M',
-                         ipv6=False):
-    """Function to format iperf client arguments.
-
-    This function takes in iperf client parameters and returns a properly
-    formatter iperf arg string to be used in throughput tests.
-
-    Args:
-        duration: iperf duration in seconds
-        reverse_direction: boolean controlling the -R flag for iperf clients
-        interval: iperf print interval
-        traffic_type: string specifying TCP or UDP traffic
-        socket_size: string specifying TCP window or socket buffer, e.g., 2M
-        num_processes: int specifying number of iperf processes
-        udp_throughput: string specifying TX throughput in UDP tests, e.g. 100M
-        ipv6: boolean controlling the use of IP V6
-    Returns:
-        iperf_args: string of formatted iperf args
-    """
-    iperf_args = '-i {} -t {} -J '.format(interval, duration)
-    if ipv6:
-        iperf_args = iperf_args + '-6 '
-    if traffic_type.upper() == 'UDP':
-        iperf_args = iperf_args + '-u -b {} -l 1470 -P {} '.format(
-            udp_throughput, num_processes)
-    elif traffic_type.upper() == 'TCP':
-        iperf_args = iperf_args + '-P {} '.format(num_processes)
-    if socket_size:
-        iperf_args = iperf_args + '-w {} '.format(socket_size)
-    if reverse_direction:
-        iperf_args = iperf_args + ' -R'
-    return iperf_args
-
-
-# Attenuator Utilities
-def atten_by_label(atten_list, path_label, atten_level):
-    """Attenuate signals according to their path label.
-
-    Args:
-        atten_list: list of attenuators to iterate over
-        path_label: path label on which to set desired attenuation
-        atten_level: attenuation desired on path
-    """
-    for atten in atten_list:
-        if path_label in atten.path:
-            atten.set_atten(atten_level, retry=True)
-
-
-def get_atten_for_target_rssi(target_rssi, attenuators, dut, ping_server):
-    """Function to estimate attenuation to hit a target RSSI.
-
-    This function estimates a constant attenuation setting on all atennuation
-    ports to hit a target RSSI. The estimate is not meant to be exact or
-    guaranteed.
-
-    Args:
-        target_rssi: rssi of interest
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-    Returns:
-        target_atten: attenuation setting to achieve target_rssi
-    """
-    logging.info('Searching attenuation for RSSI = {}dB'.format(target_rssi))
-    # Set attenuator to 0 dB
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-    # Start ping traffic
-    dut_ip = dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
-    # Measure starting RSSI
-    ping_future = get_ping_stats_nb(src_device=ping_server,
-                                    dest_address=dut_ip,
-                                    ping_duration=1.5,
-                                    ping_interval=0.02,
-                                    ping_size=64)
-    current_rssi = get_connected_rssi(dut,
-                                      num_measurements=4,
-                                      polling_frequency=0.25,
-                                      first_measurement_delay=0.5,
-                                      disconnect_warning=1,
-                                      ignore_samples=1)
-    current_rssi = current_rssi['signal_poll_rssi']['mean']
-    ping_future.result()
-    target_atten = 0
-    logging.debug('RSSI @ {0:.2f}dB attenuation = {1:.2f}'.format(
-        target_atten, current_rssi))
-    within_range = 0
-    for idx in range(20):
-        atten_delta = max(min(current_rssi - target_rssi, 20), -20)
-        target_atten = int((target_atten + atten_delta) * 4) / 4
-        if target_atten < 0:
-            return 0
-        if target_atten > attenuators[0].get_max_atten():
-            return attenuators[0].get_max_atten()
-        for atten in attenuators:
-            atten.set_atten(target_atten, strict=False, retry=True)
-        ping_future = get_ping_stats_nb(src_device=ping_server,
-                                        dest_address=dut_ip,
-                                        ping_duration=1.5,
-                                        ping_interval=0.02,
-                                        ping_size=64)
-        current_rssi = get_connected_rssi(dut,
-                                          num_measurements=4,
-                                          polling_frequency=0.25,
-                                          first_measurement_delay=0.5,
-                                          disconnect_warning=1,
-                                          ignore_samples=1)
-        current_rssi = current_rssi['signal_poll_rssi']['mean']
-        ping_future.result()
-        logging.info('RSSI @ {0:.2f}dB attenuation = {1:.2f}'.format(
-            target_atten, current_rssi))
-        if abs(current_rssi - target_rssi) < 1:
-            if within_range:
-                logging.info(
-                    'Reached RSSI: {0:.2f}. Target RSSI: {1:.2f}.'
-                    'Attenuation: {2:.2f}, Iterations = {3:.2f}'.format(
-                        current_rssi, target_rssi, target_atten, idx))
-                return target_atten
-            else:
-                within_range = True
-        else:
-            within_range = False
-    return target_atten
-
-
-def get_current_atten_dut_chain_map(attenuators,
-                                    dut,
-                                    ping_server,
-                                    ping_from_dut=False):
-    """Function to detect mapping between attenuator ports and DUT chains.
-
-    This function detects the mapping between attenuator ports and DUT chains
-    in cases where DUT chains are connected to only one attenuator port. The
-    function assumes the DUT is already connected to a wifi network. The
-    function starts by measuring per chain RSSI at 0 attenuation, then
-    attenuates one port at a time looking for the chain that reports a lower
-    RSSI.
-
-    Args:
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-        ping_from_dut: boolean controlling whether to ping from or to dut
-    Returns:
-        chain_map: list of dut chains, one entry per attenuator port
-    """
-    # Set attenuator to 0 dB
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-    # Start ping traffic
-    dut_ip = dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
-    if ping_from_dut:
-        ping_future = get_ping_stats_nb(dut, ping_server._settings.hostname,
-                                        11, 0.02, 64)
-    else:
-        ping_future = get_ping_stats_nb(ping_server, dut_ip, 11, 0.02, 64)
-    # Measure starting RSSI
-    base_rssi = get_connected_rssi(dut, 4, 0.25, 1)
-    chain0_base_rssi = base_rssi['chain_0_rssi']['mean']
-    chain1_base_rssi = base_rssi['chain_1_rssi']['mean']
-    if chain0_base_rssi < -70 or chain1_base_rssi < -70:
-        logging.warning('RSSI might be too low to get reliable chain map.')
-    # Compile chain map by attenuating one path at a time and seeing which
-    # chain's RSSI degrades
-    chain_map = []
-    for test_atten in attenuators:
-        # Set one attenuator to 30 dB down
-        test_atten.set_atten(30, strict=False, retry=True)
-        # Get new RSSI
-        test_rssi = get_connected_rssi(dut, 4, 0.25, 1)
-        # Assign attenuator to path that has lower RSSI
-        if chain0_base_rssi > -70 and chain0_base_rssi - test_rssi[
-                'chain_0_rssi']['mean'] > 10:
-            chain_map.append('DUT-Chain-0')
-        elif chain1_base_rssi > -70 and chain1_base_rssi - test_rssi[
-                'chain_1_rssi']['mean'] > 10:
-            chain_map.append('DUT-Chain-1')
-        else:
-            chain_map.append(None)
-        # Reset attenuator to 0
-        test_atten.set_atten(0, strict=False, retry=True)
-    ping_future.result()
-    logging.debug('Chain Map: {}'.format(chain_map))
-    return chain_map
-
-
-def get_full_rf_connection_map(attenuators,
-                               dut,
-                               ping_server,
-                               networks,
-                               ping_from_dut=False):
-    """Function to detect per-network connections between attenuator and DUT.
-
-    This function detects the mapping between attenuator ports and DUT chains
-    on all networks in its arguments. The function connects the DUT to each
-    network then calls get_current_atten_dut_chain_map to get the connection
-    map on the current network. The function outputs the results in two formats
-    to enable easy access when users are interested in indexing by network or
-    attenuator port.
-
-    Args:
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-        networks: dict of network IDs and configs
-    Returns:
-        rf_map_by_network: dict of RF connections indexed by network.
-        rf_map_by_atten: list of RF connections indexed by attenuator
-    """
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-
-    rf_map_by_network = collections.OrderedDict()
-    rf_map_by_atten = [[] for atten in attenuators]
-    for net_id, net_config in networks.items():
-        wutils.reset_wifi(dut)
-        wutils.wifi_connect(dut,
-                            net_config,
-                            num_of_tries=1,
-                            assert_on_fail=False,
-                            check_connectivity=False)
-        rf_map_by_network[net_id] = get_current_atten_dut_chain_map(
-            attenuators, dut, ping_server, ping_from_dut)
-        for idx, chain in enumerate(rf_map_by_network[net_id]):
-            if chain:
-                rf_map_by_atten[idx].append({
-                    'network': net_id,
-                    'dut_chain': chain
-                })
-    logging.debug('RF Map (by Network): {}'.format(rf_map_by_network))
-    logging.debug('RF Map (by Atten): {}'.format(rf_map_by_atten))
-
-    return rf_map_by_network, rf_map_by_atten
-
-
-# Generic device utils
-def get_dut_temperature(dut):
-    """Function to get dut temperature.
-
-    The function fetches and returns the reading from the temperature sensor
-    used for skin temperature and thermal throttling.
-
-    Args:
-        dut: AndroidDevice of interest
-    Returns:
-        temperature: device temperature. 0 if temperature could not be read
-    """
-    candidate_zones = [
-        '/sys/devices/virtual/thermal/tz-by-name/skin-therm/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/sdm-therm-monitor/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/sdm-therm-adc/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/back_therm/temp',
-        '/dev/thermal/tz-by-name/quiet_therm/temp'
-    ]
-    for zone in candidate_zones:
-        try:
-            temperature = int(dut.adb.shell('cat {}'.format(zone)))
-            break
-        except:
-            temperature = 0
-    if temperature == 0:
-        logging.debug('Could not check DUT temperature.')
-    elif temperature > 100:
-        temperature = temperature / 1000
-    return temperature
-
-
-def wait_for_dut_cooldown(dut, target_temp=50, timeout=300):
-    """Function to wait for a DUT to cool down.
-
-    Args:
-        dut: AndroidDevice of interest
-        target_temp: target cooldown temperature
-        timeout: maxt time to wait for cooldown
-    """
-    start_time = time.time()
-    while time.time() - start_time < timeout:
-        temperature = get_dut_temperature(dut)
-        if temperature < target_temp:
-            break
-        time.sleep(SHORT_SLEEP)
-    elapsed_time = time.time() - start_time
-    logging.debug('DUT Final Temperature: {}C. Cooldown duration: {}'.format(
-        temperature, elapsed_time))
-
-
-def health_check(dut, batt_thresh=5, temp_threshold=53, cooldown=1):
-    """Function to check health status of a DUT.
-
-    The function checks both battery levels and temperature to avoid DUT
-    powering off during the test.
-
-    Args:
-        dut: AndroidDevice of interest
-        batt_thresh: battery level threshold
-        temp_threshold: temperature threshold
-        cooldown: flag to wait for DUT to cool down when overheating
-    Returns:
-        health_check: boolean confirming device is healthy
-    """
-    health_check = True
-    battery_level = utils.get_battery_level(dut)
-    if battery_level < batt_thresh:
-        logging.warning('Battery level low ({}%)'.format(battery_level))
-        health_check = False
-    else:
-        logging.debug('Battery level = {}%'.format(battery_level))
-
-    temperature = get_dut_temperature(dut)
-    if temperature > temp_threshold:
-        if cooldown:
-            logging.warning(
-                'Waiting for DUT to cooldown. ({} C)'.format(temperature))
-            wait_for_dut_cooldown(dut, target_temp=temp_threshold - 5)
-        else:
-            logging.warning('DUT Overheating ({} C)'.format(temperature))
-            health_check = False
-    else:
-        logging.debug('DUT Temperature = {} C'.format(temperature))
-    return health_check
-
-
-# Wifi Device Utils
-def empty_rssi_result():
-    return collections.OrderedDict([('data', []), ('mean', float('nan')),
-                                    ('stdev', float('nan'))])
-
-
-@nonblocking
-def get_connected_rssi_nb(dut,
-                          num_measurements=1,
-                          polling_frequency=SHORT_SLEEP,
-                          first_measurement_delay=0,
-                          disconnect_warning=True,
-                          ignore_samples=0,
-                          interface='wlan0'):
-    return get_connected_rssi(dut, num_measurements, polling_frequency,
-                              first_measurement_delay, disconnect_warning,
-                              ignore_samples, interface)
-
-
-@detect_wifi_decorator
-def get_connected_rssi(dut,
-                       num_measurements=1,
-                       polling_frequency=SHORT_SLEEP,
-                       first_measurement_delay=0,
-                       disconnect_warning=True,
-                       ignore_samples=0,
-                       interface='wlan0'):
-    """Gets all RSSI values reported for the connected access point/BSSID.
-
-    Args:
-        dut: android device object from which to get RSSI
-        num_measurements: number of scans done, and RSSIs collected
-        polling_frequency: time to wait between RSSI measurements
-        disconnect_warning: boolean controlling disconnection logging messages
-        ignore_samples: number of leading samples to ignore
-    Returns:
-        connected_rssi: dict containing the measurements results for
-        all reported RSSI values (signal_poll, per chain, etc.) and their
-        statistics
-    """
-
-
-@nonblocking
-def get_scan_rssi_nb(dut, tracked_bssids, num_measurements=1):
-    return get_scan_rssi(dut, tracked_bssids, num_measurements)
-
-
-@detect_wifi_decorator
-def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
-    """Gets scan RSSI for specified BSSIDs.
-
-    Args:
-        dut: android device object from which to get RSSI
-        tracked_bssids: array of BSSIDs to gather RSSI data for
-        num_measurements: number of scans done, and RSSIs collected
-    Returns:
-        scan_rssi: dict containing the measurement results as well as the
-        statistics of the scan RSSI for all BSSIDs in tracked_bssids
-    """
-
-
-@detect_wifi_decorator
-def get_sw_signature(dut):
-    """Function that checks the signature for wifi firmware and config files.
-
-    Returns:
-        bdf_signature: signature consisting of last three digits of bdf cksums
-        fw_signature: floating point firmware version, i.e., major.minor
-    """
-
-
-@detect_wifi_decorator
-def get_country_code(dut):
-    """Function that returns the current wifi country code."""
-
-
-@detect_wifi_decorator
-def push_config(dut, config_file):
-    """Function to push Wifi BDF files
-
-    This function checks for existing wifi bdf files and over writes them all,
-    for simplicity, with the bdf file provided in the arguments. The dut is
-    rebooted for the bdf file to take effect
-
-    Args:
-        dut: dut to push bdf file to
-        config_file: path to bdf_file to push
-    """
-
-
-@detect_wifi_decorator
-def start_wifi_logging(dut):
-    """Function to start collecting wifi-related logs"""
-
-
-@detect_wifi_decorator
-def stop_wifi_logging(dut):
-    """Function to start collecting wifi-related logs"""
-
-
-@detect_wifi_decorator
-def push_firmware(dut, firmware_files):
-    """Function to push Wifi firmware files
-
-    Args:
-        dut: dut to push bdf file to
-        firmware_files: path to wlanmdsp.mbn file
-        datamsc_file: path to Data.msc file
-    """
-
-
-@detect_wifi_decorator
-def disable_beamforming(dut):
-    """Function to disable beamforming."""
-
-
-@detect_wifi_decorator
-def set_nss_capability(dut, nss):
-    """Function to set number of spatial streams supported."""
-
-
-@detect_wifi_decorator
-def set_chain_mask(dut, chain_mask):
-    """Function to set DUT chain mask.
-
-    Args:
-        dut: android device
-        chain_mask: desired chain mask in [0, 1, '2x2']
-    """
-
-
-# Link layer stats utilities
-class LinkLayerStats():
-    def __new__(self, dut, llstats_enabled=True):
-        if detect_wifi_platform(dut) == 'qcom':
-            return qcom_utils.LinkLayerStats(dut, llstats_enabled)
-        else:
-            return brcm_utils.LinkLayerStats(dut, llstats_enabled)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
deleted file mode 100644
index d6c5e39..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
+++ /dev/null
@@ -1,361 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import bokeh, bokeh.plotting, bokeh.io
-import collections
-import itertools
-import json
-import math
-
-
-# Plotting Utilities
-class BokehFigure():
-    """Class enabling  simplified Bokeh plotting."""
-
-    COLORS = [
-        'black',
-        'blue',
-        'blueviolet',
-        'brown',
-        'burlywood',
-        'cadetblue',
-        'cornflowerblue',
-        'crimson',
-        'cyan',
-        'darkblue',
-        'darkgreen',
-        'darkmagenta',
-        'darkorange',
-        'darkred',
-        'deepskyblue',
-        'goldenrod',
-        'green',
-        'grey',
-        'indigo',
-        'navy',
-        'olive',
-        'orange',
-        'red',
-        'salmon',
-        'teal',
-        'yellow',
-    ]
-    MARKERS = [
-        'asterisk', 'circle', 'circle_cross', 'circle_x', 'cross', 'diamond',
-        'diamond_cross', 'hex', 'inverted_triangle', 'square', 'square_x',
-        'square_cross', 'triangle', 'x'
-    ]
-
-    TOOLS = ('box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save')
-
-    def __init__(self,
-                 title=None,
-                 x_label=None,
-                 primary_y_label=None,
-                 secondary_y_label=None,
-                 height=700,
-                 width=1100,
-                 title_size='15pt',
-                 axis_label_size='12pt',
-                 legend_label_size='12pt',
-                 axis_tick_label_size='12pt',
-                 x_axis_type='auto',
-                 sizing_mode='scale_both',
-                 json_file=None):
-        if json_file:
-            self.load_from_json(json_file)
-        else:
-            self.figure_data = []
-            self.fig_property = {
-                'title': title,
-                'x_label': x_label,
-                'primary_y_label': primary_y_label,
-                'secondary_y_label': secondary_y_label,
-                'num_lines': 0,
-                'height': height,
-                'width': width,
-                'title_size': title_size,
-                'axis_label_size': axis_label_size,
-                'legend_label_size': legend_label_size,
-                'axis_tick_label_size': axis_tick_label_size,
-                'x_axis_type': x_axis_type,
-                'sizing_mode': sizing_mode
-            }
-
-    def init_plot(self):
-        self.plot = bokeh.plotting.figure(
-            sizing_mode=self.fig_property['sizing_mode'],
-            plot_width=self.fig_property['width'],
-            plot_height=self.fig_property['height'],
-            title=self.fig_property['title'],
-            tools=self.TOOLS,
-            x_axis_type=self.fig_property['x_axis_type'],
-            output_backend='webgl')
-        tooltips = [
-            ('index', '$index'),
-            ('(x,y)', '($x, $y)'),
-        ]
-        hover_set = []
-        for line in self.figure_data:
-            hover_set.extend(line['hover_text'].keys())
-        hover_set = set(hover_set)
-        for item in hover_set:
-            tooltips.append((item, '@{}'.format(item)))
-        self.plot.hover.tooltips = tooltips
-        self.plot.add_tools(
-            bokeh.models.tools.WheelZoomTool(dimensions='width'))
-        self.plot.add_tools(
-            bokeh.models.tools.WheelZoomTool(dimensions='height'))
-
-    def _filter_line(self, x_data, y_data, hover_text=None):
-        """Function to remove NaN points from bokeh plots."""
-        x_data_filtered = []
-        y_data_filtered = []
-        hover_text_filtered = {}
-        for idx, xy in enumerate(
-                itertools.zip_longest(x_data, y_data, fillvalue=float('nan'))):
-            if not math.isnan(xy[1]):
-                x_data_filtered.append(xy[0])
-                y_data_filtered.append(xy[1])
-                if hover_text:
-                    for key, value in hover_text.items():
-                        hover_text_filtered.setdefault(key, [])
-                        hover_text_filtered[key].append(
-                            value[idx] if len(value) > idx else '')
-        return x_data_filtered, y_data_filtered, hover_text_filtered
-
-    def add_line(self,
-                 x_data,
-                 y_data,
-                 legend,
-                 hover_text=None,
-                 color=None,
-                 width=3,
-                 style='solid',
-                 marker=None,
-                 marker_size=10,
-                 shaded_region=None,
-                 y_axis='default'):
-        """Function to add line to existing BokehFigure.
-
-        Args:
-            x_data: list containing x-axis values for line
-            y_data: list containing y_axis values for line
-            legend: string containing line title
-            hover_text: text to display when hovering over lines
-            color: string describing line color
-            width: integer line width
-            style: string describing line style, e.g, solid or dashed
-            marker: string specifying line marker, e.g., cross
-            shaded region: data describing shaded region to plot
-            y_axis: identifier for y-axis to plot line against
-        """
-        if y_axis not in ['default', 'secondary']:
-            raise ValueError('y_axis must be default or secondary')
-        if color == None:
-            color = self.COLORS[self.fig_property['num_lines'] %
-                                len(self.COLORS)]
-        if style == 'dashed':
-            style = [5, 5]
-        if isinstance(hover_text, list):
-            hover_text = {'info': hover_text}
-        x_data_filter, y_data_filter, hover_text_filter = self._filter_line(
-            x_data, y_data, hover_text)
-        self.figure_data.append({
-            'x_data': x_data_filter,
-            'y_data': y_data_filter,
-            'legend': legend,
-            'hover_text': hover_text_filter,
-            'color': color,
-            'width': width,
-            'style': style,
-            'marker': marker,
-            'marker_size': marker_size,
-            'shaded_region': shaded_region,
-            'y_axis': y_axis
-        })
-        self.fig_property['num_lines'] += 1
-
-    def add_scatter(self,
-                    x_data,
-                    y_data,
-                    legend,
-                    hover_text=None,
-                    color=None,
-                    marker=None,
-                    marker_size=10,
-                    y_axis='default'):
-        """Function to add line to existing BokehFigure.
-
-        Args:
-            x_data: list containing x-axis values for line
-            y_data: list containing y_axis values for line
-            legend: string containing line title
-            hover_text: text to display when hovering over lines
-            color: string describing line color
-            marker: string specifying marker, e.g., cross
-            y_axis: identifier for y-axis to plot line against
-        """
-        if y_axis not in ['default', 'secondary']:
-            raise ValueError('y_axis must be default or secondary')
-        if color == None:
-            color = self.COLORS[self.fig_property['num_lines'] %
-                                len(self.COLORS)]
-        if marker == None:
-            marker = self.MARKERS[self.fig_property['num_lines'] %
-                                  len(self.MARKERS)]
-        self.figure_data.append({
-            'x_data': x_data,
-            'y_data': y_data,
-            'legend': legend,
-            'hover_text': hover_text,
-            'color': color,
-            'width': 0,
-            'style': 'solid',
-            'marker': marker,
-            'marker_size': marker_size,
-            'shaded_region': None,
-            'y_axis': y_axis
-        })
-        self.fig_property['num_lines'] += 1
-
-    def generate_figure(self, output_file=None, save_json=True):
-        """Function to generate and save BokehFigure.
-
-        Args:
-            output_file: string specifying output file path
-        """
-        self.init_plot()
-        two_axes = False
-        for line in self.figure_data:
-            data_dict = {'x': line['x_data'], 'y': line['y_data']}
-            for key, value in line['hover_text'].items():
-                data_dict[key] = value
-            source = bokeh.models.ColumnDataSource(data=data_dict)
-            if line['width'] > 0:
-                self.plot.line(x='x',
-                               y='y',
-                               legend_label=line['legend'],
-                               line_width=line['width'],
-                               color=line['color'],
-                               line_dash=line['style'],
-                               name=line['y_axis'],
-                               y_range_name=line['y_axis'],
-                               source=source)
-            if line['shaded_region']:
-                band_x = line['shaded_region']['x_vector']
-                band_x.extend(line['shaded_region']['x_vector'][::-1])
-                band_y = line['shaded_region']['lower_limit']
-                band_y.extend(line['shaded_region']['upper_limit'][::-1])
-                self.plot.patch(band_x,
-                                band_y,
-                                color='#7570B3',
-                                line_alpha=0.1,
-                                fill_alpha=0.1)
-            if line['marker'] in self.MARKERS:
-                marker_func = getattr(self.plot, line['marker'])
-                marker_func(x='x',
-                            y='y',
-                            size=line['marker_size'],
-                            legend_label=line['legend'],
-                            line_color=line['color'],
-                            fill_color=line['color'],
-                            name=line['y_axis'],
-                            y_range_name=line['y_axis'],
-                            source=source)
-            if line['y_axis'] == 'secondary':
-                two_axes = True
-
-        #x-axis formatting
-        self.plot.xaxis.axis_label = self.fig_property['x_label']
-        self.plot.x_range.range_padding = 0
-        self.plot.xaxis[0].axis_label_text_font_size = self.fig_property[
-            'axis_label_size']
-        self.plot.xaxis.major_label_text_font_size = self.fig_property[
-            'axis_tick_label_size']
-        #y-axis formatting
-        self.plot.yaxis[0].axis_label = self.fig_property['primary_y_label']
-        self.plot.yaxis[0].axis_label_text_font_size = self.fig_property[
-            'axis_label_size']
-        self.plot.yaxis.major_label_text_font_size = self.fig_property[
-            'axis_tick_label_size']
-        self.plot.y_range = bokeh.models.DataRange1d(names=['default'])
-        if two_axes and 'secondary' not in self.plot.extra_y_ranges:
-            self.plot.extra_y_ranges = {
-                'secondary': bokeh.models.DataRange1d(names=['secondary'])
-            }
-            self.plot.add_layout(
-                bokeh.models.LinearAxis(
-                    y_range_name='secondary',
-                    axis_label=self.fig_property['secondary_y_label'],
-                    axis_label_text_font_size=self.
-                    fig_property['axis_label_size']), 'right')
-        # plot formatting
-        self.plot.legend.location = 'top_right'
-        self.plot.legend.click_policy = 'hide'
-        self.plot.title.text_font_size = self.fig_property['title_size']
-        self.plot.legend.label_text_font_size = self.fig_property[
-            'legend_label_size']
-
-        if output_file is not None:
-            self.save_figure(output_file, save_json)
-        return self.plot
-
-    def load_from_json(self, file_path):
-        with open(file_path, 'r') as json_file:
-            fig_dict = json.load(json_file)
-        self.fig_property = fig_dict['fig_property']
-        self.figure_data = fig_dict['figure_data']
-
-    def _save_figure_json(self, output_file):
-        """Function to save a json format of a figure"""
-        figure_dict = collections.OrderedDict(fig_property=self.fig_property,
-                                              figure_data=self.figure_data)
-        output_file = output_file.replace('.html', '_plot_data.json')
-        with open(output_file, 'w') as outfile:
-            json.dump(figure_dict, outfile, indent=4)
-
-    def save_figure(self, output_file, save_json=True):
-        """Function to save BokehFigure.
-
-        Args:
-            output_file: string specifying output file path
-            save_json: flag controlling json outputs
-        """
-        if save_json:
-            self._save_figure_json(output_file)
-        bokeh.io.output_file(output_file)
-        bokeh.io.save(self.plot)
-
-    @staticmethod
-    def save_figures(figure_array, output_file_path, save_json=True):
-        """Function to save list of BokehFigures in one file.
-
-        Args:
-            figure_array: list of BokehFigure object to be plotted
-            output_file: string specifying output file path
-        """
-        for idx, figure in enumerate(figure_array):
-            figure.generate_figure()
-            if save_json:
-                json_file_path = output_file_path.replace(
-                    '.html', '{}-plot_data.json'.format(idx))
-                figure._save_figure_json(json_file_path)
-        plot_array = [figure.plot for figure in figure_array]
-        all_plots = bokeh.layouts.column(children=plot_array,
-                                         sizing_mode='scale_width')
-        bokeh.plotting.output_file(output_file_path)
-        bokeh.plotting.save(all_plots)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
deleted file mode 100644
index 7544f8d..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
+++ /dev/null
@@ -1,578 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import hashlib
-import itertools
-import logging
-import math
-import numpy
-import re
-import statistics
-import time
-
-VERY_SHORT_SLEEP = 0.5
-SHORT_SLEEP = 1
-MED_SLEEP = 6
-DISCONNECTION_MESSAGE_BRCM = 'driver adapter not found'
-RSSI_ERROR_VAL = float('nan')
-RATE_TABLE = {
-    'HT': {
-        1: {
-            20: [7.2, 14.4, 21.7, 28.9, 43.4, 57.8, 65.0, 72.2],
-            40: [15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0]
-        },
-        2: {
-            20: [
-                0, 0, 0, 0, 0, 0, 0, 0, 14.4, 28.8, 43.4, 57.8, 86.8, 115.6,
-                130, 144.4
-            ],
-            40: [0, 0, 0, 0, 0, 0, 0, 0, 30, 60, 90, 120, 180, 240, 270, 300]
-        }
-    },
-    'VHT': {
-        1: {
-            20: [
-                7.2, 14.4, 21.7, 28.9, 43.4, 57.8, 65.0, 72.2, 86.7, 96.2,
-                129.0, 143.4
-            ],
-            40: [
-                15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0, 180.0,
-                200.0, 258, 286.8
-            ],
-            80: [
-                32.5, 65.0, 97.5, 130.0, 195.0, 260.0, 292.5, 325.0, 390.0,
-                433.3, 540.4, 600.4
-            ],
-            160: [
-                65.0, 130.0, 195.0, 260.0, 390.0, 520.0, 585.0, 650.0, 780.0,
-                1080.8, 1200.8
-            ]
-        },
-        2: {
-            20: [
-                14.4, 28.8, 43.4, 57.8, 86.8, 115.6, 130, 144.4, 173.4, 192.4,
-                258, 286.8
-            ],
-            40: [30, 60, 90, 120, 180, 240, 270, 300, 360, 400, 516, 573.6],
-            80: [
-                65, 130, 195, 260, 390, 520, 585, 650, 780, 866.6, 1080.8,
-                1200.8
-            ],
-            160:
-            [130, 260, 390, 520, 780, 1040, 1170, 1300, 1560, 2161.6, 2401.6]
-        },
-    },
-    'HE': {
-        1: {
-            20: [
-                8.6, 17.2, 25.8, 34.4, 51.6, 68.8, 77.4, 86.0, 103.2, 114.7,
-                129.0, 143.4
-            ],
-            40: [
-                17.2, 34.4, 51.6, 68.8, 103.2, 137.6, 154.8, 172, 206.4, 229.4,
-                258, 286.8
-            ],
-            80: [
-                36.0, 72.1, 108.1, 144.1, 216.2, 288.2, 324.3, 360.3, 432.4,
-                480.4, 540.4, 600.4
-            ],
-            160: [
-                72, 144.2, 216.2, 288.2, 432.4, 576.4, 648.6, 720.6, 864.8,
-                960.8, 1080.8, 1200.8
-            ]
-        },
-        2: {
-            20: [
-                17.2, 34.4, 51.6, 68.8, 103.2, 137.6, 154.8, 172, 206.4, 229.4,
-                258, 286.8
-            ],
-            40: [
-                34.4, 68.8, 103.2, 137.6, 206.4, 275.2, 309.6, 344, 412.8,
-                458.8, 516, 573.6
-            ],
-            80: [
-                72, 144.2, 216.2, 288.2, 432.4, 576.4, 648.6, 720.6, 864.8,
-                960.8, 1080.8, 1200.8
-            ],
-            160: [
-                144, 288.4, 432.4, 576.4, 864.8, 1152.8, 1297.2, 1441.2,
-                1729.6, 1921.6, 2161.6, 2401.6
-            ]
-        },
-    },
-}
-
-
-# Rssi Utilities
-def empty_rssi_result():
-    return collections.OrderedDict([('data', []), ('mean', None),
-                                    ('stdev', None)])
-
-
-def get_connected_rssi(dut,
-                       num_measurements=1,
-                       polling_frequency=SHORT_SLEEP,
-                       first_measurement_delay=0,
-                       disconnect_warning=True,
-                       ignore_samples=0,
-                       interface='wlan0'):
-    # yapf: disable
-    connected_rssi = collections.OrderedDict(
-        [('time_stamp', []),
-         ('bssid', []), ('ssid', []), ('frequency', []),
-         ('signal_poll_rssi', empty_rssi_result()),
-         ('signal_poll_avg_rssi', empty_rssi_result()),
-         ('chain_0_rssi', empty_rssi_result()),
-         ('chain_1_rssi', empty_rssi_result())])
-
-    # yapf: enable
-    previous_bssid = 'disconnected'
-    t0 = time.time()
-    time.sleep(first_measurement_delay)
-    for idx in range(num_measurements):
-        measurement_start_time = time.time()
-        connected_rssi['time_stamp'].append(measurement_start_time - t0)
-        # Get signal poll RSSI
-        try:
-            status_output = dut.adb.shell(
-                'wpa_cli -i {} status'.format(interface))
-        except:
-            status_output = ''
-        match = re.search('bssid=.*', status_output)
-        if match:
-            current_bssid = match.group(0).split('=')[1]
-            connected_rssi['bssid'].append(current_bssid)
-        else:
-            current_bssid = 'disconnected'
-            connected_rssi['bssid'].append(current_bssid)
-            if disconnect_warning and previous_bssid != 'disconnected':
-                logging.warning('WIFI DISCONNECT DETECTED!')
-
-        previous_bssid = current_bssid
-        match = re.search('\s+ssid=.*', status_output)
-        if match:
-            ssid = match.group(0).split('=')[1]
-            connected_rssi['ssid'].append(ssid)
-        else:
-            connected_rssi['ssid'].append('disconnected')
-
-        #TODO: SEARCH MAP ; PICK CENTER CHANNEL
-        match = re.search('\s+freq=.*', status_output)
-        if match:
-            frequency = int(match.group(0).split('=')[1])
-            connected_rssi['frequency'].append(frequency)
-        else:
-            connected_rssi['frequency'].append(RSSI_ERROR_VAL)
-
-        if interface == 'wlan0':
-            try:
-                per_chain_rssi = dut.adb.shell('wl phy_rssi_ant')
-                chain_0_rssi = re.search(
-                    r'rssi\[0\]\s(?P<chain_0_rssi>[0-9\-]*)', per_chain_rssi)
-                if chain_0_rssi:
-                    chain_0_rssi = int(chain_0_rssi.group('chain_0_rssi'))
-                else:
-                    chain_0_rssi = -float('inf')
-                chain_1_rssi = re.search(
-                    r'rssi\[1\]\s(?P<chain_1_rssi>[0-9\-]*)', per_chain_rssi)
-                if chain_1_rssi:
-                    chain_1_rssi = int(chain_1_rssi.group('chain_1_rssi'))
-                else:
-                    chain_1_rssi = -float('inf')
-            except:
-                chain_0_rssi = RSSI_ERROR_VAL
-                chain_1_rssi = RSSI_ERROR_VAL
-            connected_rssi['chain_0_rssi']['data'].append(chain_0_rssi)
-            connected_rssi['chain_1_rssi']['data'].append(chain_1_rssi)
-            combined_rssi = math.pow(10, chain_0_rssi / 10) + math.pow(
-                10, chain_1_rssi / 10)
-            combined_rssi = 10 * math.log10(combined_rssi)
-            connected_rssi['signal_poll_rssi']['data'].append(combined_rssi)
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                combined_rssi)
-        else:
-            try:
-                signal_poll_output = dut.adb.shell(
-                    'wpa_cli -i {} signal_poll'.format(interface))
-            except:
-                signal_poll_output = ''
-            match = re.search('RSSI=.*', signal_poll_output)
-            if match:
-                temp_rssi = int(match.group(0).split('=')[1])
-                if temp_rssi == -9999 or temp_rssi == 0:
-                    connected_rssi['signal_poll_rssi']['data'].append(
-                        RSSI_ERROR_VAL)
-                else:
-                    connected_rssi['signal_poll_rssi']['data'].append(
-                        temp_rssi)
-            else:
-                connected_rssi['signal_poll_rssi']['data'].append(
-                    RSSI_ERROR_VAL)
-            connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL)
-            connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL)
-        measurement_elapsed_time = time.time() - measurement_start_time
-        time.sleep(max(0, polling_frequency - measurement_elapsed_time))
-
-    # Statistics, Statistics
-    for key, val in connected_rssi.copy().items():
-        if 'data' not in val:
-            continue
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
-        if len(filtered_rssi_values) > ignore_samples:
-            filtered_rssi_values = filtered_rssi_values[ignore_samples:]
-        if filtered_rssi_values:
-            connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                connected_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
-            else:
-                connected_rssi[key]['stdev'] = 0
-        else:
-            connected_rssi[key]['mean'] = RSSI_ERROR_VAL
-            connected_rssi[key]['stdev'] = RSSI_ERROR_VAL
-
-    return connected_rssi
-
-
-def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
-    scan_rssi = collections.OrderedDict()
-    for bssid in tracked_bssids:
-        scan_rssi[bssid] = empty_rssi_result()
-    for idx in range(num_measurements):
-        scan_output = dut.adb.shell('cmd wifi start-scan')
-        time.sleep(MED_SLEEP)
-        scan_output = dut.adb.shell('cmd wifi list-scan-results')
-        for bssid in tracked_bssids:
-            bssid_result = re.search(bssid + '.*',
-                                     scan_output,
-                                     flags=re.IGNORECASE)
-            if bssid_result:
-                bssid_result = bssid_result.group(0).split()
-                scan_rssi[bssid]['data'].append(int(bssid_result[2]))
-            else:
-                scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL)
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no readings found.
-    for key, val in scan_rssi.items():
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
-        if filtered_rssi_values:
-            scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                scan_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
-            else:
-                scan_rssi[key]['stdev'] = 0
-        else:
-            scan_rssi[key]['mean'] = RSSI_ERROR_VAL
-            scan_rssi[key]['stdev'] = RSSI_ERROR_VAL
-    return scan_rssi
-
-
-def get_sw_signature(dut):
-    bdf_output = dut.adb.shell('cksum /vendor/firmware/bcmdhd*')
-    logging.debug('BDF Checksum output: {}'.format(bdf_output))
-    bdf_signature = sum(
-        [int(line.split(' ')[0]) for line in bdf_output.splitlines()]) % 1000
-
-    fw_version = dut.adb.shell('getprop vendor.wlan.firmware.version')
-    driver_version = dut.adb.shell('getprop vendor.wlan.driver.version')
-    logging.debug('Firmware version : {}. Driver version: {}'.format(
-        fw_version, driver_version))
-    fw_signature = '{}+{}'.format(fw_version, driver_version)
-    fw_signature = int(hashlib.md5(fw_signature.encode()).hexdigest(),
-                       16) % 1000
-    serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
-    return {
-        'config_signature': bdf_signature,
-        'fw_signature': fw_signature,
-        'serial_hash': serial_hash
-    }
-
-
-def get_country_code(dut):
-    try:
-        country_code = dut.adb.shell('wl country').split(' ')[0]
-    except:
-        country_code = 'XZ'
-    if country_code == 'XZ':
-        country_code = 'WW'
-    logging.debug('Country code: {}'.format(country_code))
-    return country_code
-
-
-def push_config(dut, config_file):
-    config_files_list = dut.adb.shell('ls /vendor/etc/*.cal').splitlines()
-    for dst_file in config_files_list:
-        dut.push_system_file(config_file, dst_file)
-    dut.reboot()
-
-
-def start_wifi_logging(dut):
-    pass
-
-
-def stop_wifi_logging(dut):
-    pass
-
-
-def push_firmware(dut, firmware_files):
-    """Function to push Wifi firmware files
-
-    Args:
-        dut: dut to push bdf file to
-        firmware_files: path to wlanmdsp.mbn file
-        datamsc_file: path to Data.msc file
-    """
-    for file in firmware_files:
-        dut.push_system_file(file, '/vendor/firmware/')
-    dut.reboot()
-
-
-def disable_beamforming(dut):
-    dut.adb.shell('wl txbf 0')
-
-
-def set_nss_capability(dut, nss):
-    dut.adb.shell('wl he omi -r {} -t {}'.format(nss, nss))
-
-
-def set_chain_mask(dut, chain):
-    if chain == '2x2':
-        chain = 3
-    else:
-        chain = chain + 1
-    # Get current chain mask
-    try:
-        curr_tx_chain = int(dut.adb.shell('wl txchain'))
-        curr_rx_chain = int(dut.adb.shell('wl rxchain'))
-    except:
-        curr_tx_chain = -1
-        curr_rx_chain = -1
-    if curr_tx_chain == chain and curr_rx_chain == chain:
-        return
-    # Set chain mask if needed
-    dut.adb.shell('wl down')
-    time.sleep(VERY_SHORT_SLEEP)
-    dut.adb.shell('wl txchain 0x{}'.format(chain))
-    dut.adb.shell('wl rxchain 0x{}'.format(chain))
-    dut.adb.shell('wl up')
-
-
-class LinkLayerStats():
-
-    LLSTATS_CMD = 'wl dump ampdu; wl counters;'
-    LL_STATS_CLEAR_CMD = 'wl dump_clear ampdu; wl reset_cnts;'
-    BW_REGEX = re.compile(r'Chanspec:.+ (?P<bandwidth>[0-9]+)MHz')
-    MCS_REGEX = re.compile(r'(?P<count>[0-9]+)\((?P<percent>[0-9]+)%\)')
-    RX_REGEX = re.compile(r'RX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
-                          '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
-    TX_REGEX = re.compile(r'TX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
-                          '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
-    TX_PER_REGEX = re.compile(
-        r'(?P<mode>\S+) PER\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
-        '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
-    RX_FCS_REGEX = re.compile(
-        r'rxbadfcs (?P<rx_bad_fcs>[0-9]*).+\n.+goodfcs (?P<rx_good_fcs>[0-9]*)'
-    )
-    RX_AGG_REGEX = re.compile(r'rxmpduperampdu (?P<aggregation>[0-9]*)')
-    TX_AGG_REGEX = re.compile(r' mpduperampdu (?P<aggregation>[0-9]*)')
-    TX_AGG_STOP_REGEX = re.compile(
-        r'agg stop reason: tot_agg_tried (?P<agg_tried>[0-9]+) agg_txcancel (?P<agg_canceled>[0-9]+) (?P<agg_stop_reason>.+)'
-    )
-    TX_AGG_STOP_REASON_REGEX = re.compile(
-        r'(?P<reason>\w+) [0-9]+ \((?P<value>[0-9]+%)\)')
-    MCS_ID = collections.namedtuple(
-        'mcs_id', ['mode', 'num_streams', 'bandwidth', 'mcs', 'gi'])
-    MODE_MAP = {'0': '11a/g', '1': '11b', '2': '11n', '3': '11ac'}
-    BW_MAP = {'0': 20, '1': 40, '2': 80}
-
-    def __init__(self, dut, llstats_enabled=True):
-        self.dut = dut
-        self.llstats_enabled = llstats_enabled
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def update_stats(self):
-        if self.llstats_enabled:
-            try:
-                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD,
-                                                    timeout=1)
-                self.dut.adb.shell_nb(self.LL_STATS_CLEAR_CMD)
-
-                wl_join = self.dut.adb.shell("wl status")
-                self.bandwidth = int(
-                    re.search(self.BW_REGEX, wl_join).group('bandwidth'))
-            except:
-                llstats_output = ''
-        else:
-            llstats_output = ''
-        self._update_stats(llstats_output)
-
-    def reset_stats(self):
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def _empty_llstats(self):
-        return collections.OrderedDict(mcs_stats=collections.OrderedDict(),
-                                       mpdu_stats=collections.OrderedDict(),
-                                       summary=collections.OrderedDict())
-
-    def _empty_mcs_stat(self):
-        return collections.OrderedDict(txmpdu=0,
-                                       rxmpdu=0,
-                                       mpdu_lost=0,
-                                       retries=0,
-                                       retries_short=0,
-                                       retries_long=0)
-
-    def _mcs_id_to_string(self, mcs_id):
-        mcs_string = '{} Nss{} MCS{} GI{}'.format(mcs_id.mode,
-                                                  mcs_id.num_streams,
-                                                  mcs_id.mcs, mcs_id.gi)
-        return mcs_string
-
-    def _parse_mcs_stats(self, llstats_output):
-        llstats_dict = {}
-        # Look for per-peer stats
-        match = re.search(self.RX_REGEX, llstats_output)
-        if not match:
-            self.reset_stats()
-            return collections.OrderedDict()
-        # Find and process all matches for per stream stats
-        rx_match_iter = re.finditer(self.RX_REGEX, llstats_output)
-        tx_match_iter = re.finditer(self.TX_REGEX, llstats_output)
-        tx_per_match_iter = re.finditer(self.TX_PER_REGEX, llstats_output)
-        for rx_match, tx_match, tx_per_match in zip(rx_match_iter,
-                                                    tx_match_iter,
-                                                    tx_per_match_iter):
-            mode = rx_match.group('mode')
-            mode = 'HT' if mode == 'MCS' else mode
-            for nss in [1, 2]:
-                rx_mcs_iter = re.finditer(self.MCS_REGEX,
-                                          rx_match.group(nss + 1))
-                tx_mcs_iter = re.finditer(self.MCS_REGEX,
-                                          tx_match.group(nss + 1))
-                tx_per_iter = re.finditer(self.MCS_REGEX,
-                                          tx_per_match.group(nss + 1))
-                for mcs, (rx_mcs_stats, tx_mcs_stats,
-                          tx_per_mcs_stats) in enumerate(
-                              itertools.zip_longest(rx_mcs_iter, tx_mcs_iter,
-                                                    tx_per_iter)):
-                    current_mcs = self.MCS_ID(
-                        mode, nss, self.bandwidth,
-                        mcs + int(8 * (mode == 'HT') * (nss - 1)), 0)
-                    current_stats = collections.OrderedDict(
-                        txmpdu=int(tx_mcs_stats.group('count'))
-                        if tx_mcs_stats else 0,
-                        rxmpdu=int(rx_mcs_stats.group('count'))
-                        if rx_mcs_stats else 0,
-                        mpdu_lost=0,
-                        retries=tx_per_mcs_stats.group('count')
-                        if tx_per_mcs_stats else 0,
-                        retries_short=0,
-                        retries_long=0,
-                        mcs_id=current_mcs)
-                    llstats_dict[self._mcs_id_to_string(
-                        current_mcs)] = current_stats
-        return llstats_dict
-
-    def _parse_mpdu_stats(self, llstats_output):
-        rx_agg_match = re.search(self.RX_AGG_REGEX, llstats_output)
-        tx_agg_match = re.search(self.TX_AGG_REGEX, llstats_output)
-        tx_agg_stop_match = re.search(self.TX_AGG_STOP_REGEX, llstats_output)
-        rx_fcs_match = re.search(self.RX_FCS_REGEX, llstats_output)
-
-        if rx_agg_match and tx_agg_match and tx_agg_stop_match and rx_fcs_match:
-            agg_stop_dict = collections.OrderedDict(
-                rx_aggregation=int(rx_agg_match.group('aggregation')),
-                tx_aggregation=int(tx_agg_match.group('aggregation')),
-                tx_agg_tried=int(tx_agg_stop_match.group('agg_tried')),
-                tx_agg_canceled=int(tx_agg_stop_match.group('agg_canceled')),
-                rx_good_fcs=int(rx_fcs_match.group('rx_good_fcs')),
-                rx_bad_fcs=int(rx_fcs_match.group('rx_bad_fcs')),
-                agg_stop_reason=collections.OrderedDict())
-            agg_reason_match = re.finditer(
-                self.TX_AGG_STOP_REASON_REGEX,
-                tx_agg_stop_match.group('agg_stop_reason'))
-            for reason_match in agg_reason_match:
-                agg_stop_dict['agg_stop_reason'][reason_match.group(
-                    'reason')] = reason_match.group('value')
-
-        else:
-            agg_stop_dict = collections.OrderedDict(rx_aggregation=0,
-                                                    tx_aggregation=0,
-                                                    tx_agg_tried=0,
-                                                    tx_agg_canceled=0,
-                                                    rx_good_fcs=0,
-                                                    rx_bad_fcs=0,
-                                                    agg_stop_reason=None)
-        return agg_stop_dict
-
-    def _generate_stats_summary(self, llstats_dict):
-        llstats_summary = collections.OrderedDict(common_tx_mcs=None,
-                                                  common_tx_mcs_count=0,
-                                                  common_tx_mcs_freq=0,
-                                                  common_rx_mcs=None,
-                                                  common_rx_mcs_count=0,
-                                                  common_rx_mcs_freq=0,
-                                                  rx_per=float('nan'))
-        mcs_ids = []
-        tx_mpdu = []
-        rx_mpdu = []
-        phy_rates = []
-        for mcs_str, mcs_stats in llstats_dict['mcs_stats'].items():
-            mcs_id = mcs_stats['mcs_id']
-            mcs_ids.append(mcs_str)
-            tx_mpdu.append(mcs_stats['txmpdu'])
-            rx_mpdu.append(mcs_stats['rxmpdu'])
-            phy_rates.append(RATE_TABLE[mcs_id.mode][mcs_id.num_streams][
-                mcs_id.bandwidth][mcs_id.mcs])
-        if len(tx_mpdu) == 0 or len(rx_mpdu) == 0:
-            return llstats_summary
-        llstats_summary['common_tx_mcs'] = mcs_ids[numpy.argmax(tx_mpdu)]
-        llstats_summary['common_tx_mcs_count'] = numpy.max(tx_mpdu)
-        llstats_summary['common_rx_mcs'] = mcs_ids[numpy.argmax(rx_mpdu)]
-        llstats_summary['common_rx_mcs_count'] = numpy.max(rx_mpdu)
-        if sum(tx_mpdu) and sum(rx_mpdu):
-            llstats_summary['mean_tx_phy_rate'] = numpy.average(
-                phy_rates, weights=tx_mpdu)
-            llstats_summary['mean_rx_phy_rate'] = numpy.average(
-                phy_rates, weights=rx_mpdu)
-            llstats_summary['common_tx_mcs_freq'] = (
-                llstats_summary['common_tx_mcs_count'] / sum(tx_mpdu))
-            llstats_summary['common_rx_mcs_freq'] = (
-                llstats_summary['common_rx_mcs_count'] / sum(rx_mpdu))
-            total_rx_frames = llstats_dict['mpdu_stats'][
-                'rx_good_fcs'] + llstats_dict['mpdu_stats']['rx_bad_fcs']
-            if total_rx_frames:
-                llstats_summary['rx_per'] = (
-                    llstats_dict['mpdu_stats']['rx_bad_fcs'] /
-                    (total_rx_frames)) * 100
-        return llstats_summary
-
-    def _update_stats(self, llstats_output):
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-        self.llstats_incremental['raw_output'] = llstats_output
-        self.llstats_incremental['mcs_stats'] = self._parse_mcs_stats(
-            llstats_output)
-        self.llstats_incremental['mpdu_stats'] = self._parse_mpdu_stats(
-            llstats_output)
-        self.llstats_incremental['summary'] = self._generate_stats_summary(
-            self.llstats_incremental)
-        self.llstats_cumulative['summary'] = self._generate_stats_summary(
-            self.llstats_cumulative)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
deleted file mode 100644
index 3405260..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-RTT_REGEX = re.compile(r'^\[(?P<timestamp>\S+)\] .*? time=(?P<rtt>\S+)')
-LOSS_REGEX = re.compile(r'(?P<loss>\S+)% packet loss')
-
-
-class PingResult(object):
-    """An object that contains the results of running ping command.
-
-    Attributes:
-        connected: True if a connection was made. False otherwise.
-        packet_loss_percentage: The total percentage of packets lost.
-        transmission_times: The list of PingTransmissionTimes containing the
-            timestamps gathered for transmitted packets.
-        rtts: An list-like object enumerating all round-trip-times of
-            transmitted packets.
-        timestamps: A list-like object enumerating the beginning timestamps of
-            each packet transmission.
-        ping_interarrivals: A list-like object enumerating the amount of time
-            between the beginning of each subsequent transmission.
-    """
-    def __init__(self, ping_output):
-        self.packet_loss_percentage = 100
-        self.transmission_times = []
-
-        self.rtts = _ListWrap(self.transmission_times, lambda entry: entry.rtt)
-        self.timestamps = _ListWrap(self.transmission_times,
-                                    lambda entry: entry.timestamp)
-        self.ping_interarrivals = _PingInterarrivals(self.transmission_times)
-
-        self.start_time = 0
-        for line in ping_output:
-            if 'loss' in line:
-                match = re.search(LOSS_REGEX, line)
-                self.packet_loss_percentage = float(match.group('loss'))
-            if 'time=' in line:
-                match = re.search(RTT_REGEX, line)
-                if self.start_time == 0:
-                    self.start_time = float(match.group('timestamp'))
-                self.transmission_times.append(
-                    PingTransmissionTimes(
-                        float(match.group('timestamp')) - self.start_time,
-                        float(match.group('rtt'))))
-        self.connected = len(
-            ping_output) > 1 and self.packet_loss_percentage < 100
-
-    def __getitem__(self, item):
-        if item == 'rtt':
-            return self.rtts
-        if item == 'connected':
-            return self.connected
-        if item == 'packet_loss_percentage':
-            return self.packet_loss_percentage
-        raise ValueError('Invalid key. Please use an attribute instead.')
-
-    def as_dict(self):
-        return {
-            'connected': 1 if self.connected else 0,
-            'rtt': list(self.rtts),
-            'time_stamp': list(self.timestamps),
-            'ping_interarrivals': list(self.ping_interarrivals),
-            'packet_loss_percentage': self.packet_loss_percentage
-        }
-
-
-class PingTransmissionTimes(object):
-    """A class that holds the timestamps for a packet sent via the ping command.
-
-    Attributes:
-        rtt: The round trip time for the packet sent.
-        timestamp: The timestamp the packet started its trip.
-    """
-    def __init__(self, timestamp, rtt):
-        self.rtt = rtt
-        self.timestamp = timestamp
-
-
-class _ListWrap(object):
-    """A convenient helper class for treating list iterators as native lists."""
-    def __init__(self, wrapped_list, func):
-        self.__wrapped_list = wrapped_list
-        self.__func = func
-
-    def __getitem__(self, key):
-        return self.__func(self.__wrapped_list[key])
-
-    def __iter__(self):
-        for item in self.__wrapped_list:
-            yield self.__func(item)
-
-    def __len__(self):
-        return len(self.__wrapped_list)
-
-
-class _PingInterarrivals(object):
-    """A helper class for treating ping interarrivals as a native list."""
-    def __init__(self, ping_entries):
-        self.__ping_entries = ping_entries
-
-    def __getitem__(self, key):
-        return (self.__ping_entries[key + 1].timestamp -
-                self.__ping_entries[key].timestamp)
-
-    def __iter__(self):
-        for index in range(len(self.__ping_entries) - 1):
-            yield self[index]
-
-    def __len__(self):
-        return max(0, len(self.__ping_entries) - 1)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
deleted file mode 100644
index 1383f6a..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
+++ /dev/null
@@ -1,467 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import hashlib
-import logging
-import math
-import os
-import re
-import statistics
-import time
-from antlion import asserts
-
-SHORT_SLEEP = 1
-MED_SLEEP = 6
-STATION_DUMP = 'iw {} station dump'
-SCAN = 'wpa_cli scan'
-SCAN_RESULTS = 'wpa_cli scan_results'
-SIGNAL_POLL = 'wpa_cli signal_poll'
-WPA_CLI_STATUS = 'wpa_cli status'
-RSSI_ERROR_VAL = float('nan')
-FW_REGEX = re.compile(r'FW:(?P<firmware>\S+) HW:')
-
-
-# Rssi Utilities
-def empty_rssi_result():
-    return collections.OrderedDict([('data', []), ('mean', None),
-                                    ('stdev', None)])
-
-
-def get_connected_rssi(dut,
-                       num_measurements=1,
-                       polling_frequency=SHORT_SLEEP,
-                       first_measurement_delay=0,
-                       disconnect_warning=True,
-                       ignore_samples=0,
-                       interface='wlan0'):
-    # yapf: disable
-    connected_rssi = collections.OrderedDict(
-        [('time_stamp', []),
-         ('bssid', []), ('ssid', []), ('frequency', []),
-         ('signal_poll_rssi', empty_rssi_result()),
-         ('signal_poll_avg_rssi', empty_rssi_result()),
-         ('chain_0_rssi', empty_rssi_result()),
-         ('chain_1_rssi', empty_rssi_result())])
-    # yapf: enable
-    previous_bssid = 'disconnected'
-    t0 = time.time()
-    time.sleep(first_measurement_delay)
-    for idx in range(num_measurements):
-        measurement_start_time = time.time()
-        connected_rssi['time_stamp'].append(measurement_start_time - t0)
-        # Get signal poll RSSI
-        try:
-            status_output = dut.adb.shell(
-                'wpa_cli -i {} status'.format(interface))
-        except:
-            status_output = ''
-        match = re.search('bssid=.*', status_output)
-        if match:
-            current_bssid = match.group(0).split('=')[1]
-            connected_rssi['bssid'].append(current_bssid)
-        else:
-            current_bssid = 'disconnected'
-            connected_rssi['bssid'].append(current_bssid)
-            if disconnect_warning and previous_bssid != 'disconnected':
-                logging.warning('WIFI DISCONNECT DETECTED!')
-        previous_bssid = current_bssid
-        match = re.search('\s+ssid=.*', status_output)
-        if match:
-            ssid = match.group(0).split('=')[1]
-            connected_rssi['ssid'].append(ssid)
-        else:
-            connected_rssi['ssid'].append('disconnected')
-        try:
-            signal_poll_output = dut.adb.shell(
-                'wpa_cli -i {} signal_poll'.format(interface))
-        except:
-            signal_poll_output = ''
-        match = re.search('FREQUENCY=.*', signal_poll_output)
-        if match:
-            frequency = int(match.group(0).split('=')[1])
-            connected_rssi['frequency'].append(frequency)
-        else:
-            connected_rssi['frequency'].append(RSSI_ERROR_VAL)
-        match = re.search('RSSI=.*', signal_poll_output)
-        if match:
-            temp_rssi = int(match.group(0).split('=')[1])
-            if temp_rssi == -9999 or temp_rssi == 0:
-                connected_rssi['signal_poll_rssi']['data'].append(
-                    RSSI_ERROR_VAL)
-            else:
-                connected_rssi['signal_poll_rssi']['data'].append(temp_rssi)
-        else:
-            connected_rssi['signal_poll_rssi']['data'].append(RSSI_ERROR_VAL)
-        match = re.search('AVG_RSSI=.*', signal_poll_output)
-        if match:
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                int(match.group(0).split('=')[1]))
-        else:
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                RSSI_ERROR_VAL)
-
-        # Get per chain RSSI
-        try:
-            per_chain_rssi = dut.adb.shell(STATION_DUMP.format(interface))
-        except:
-            per_chain_rssi = ''
-        match = re.search('.*signal avg:.*', per_chain_rssi)
-        if match:
-            per_chain_rssi = per_chain_rssi[per_chain_rssi.find('[') +
-                                            1:per_chain_rssi.find(']')]
-            per_chain_rssi = per_chain_rssi.split(', ')
-            connected_rssi['chain_0_rssi']['data'].append(
-                int(per_chain_rssi[0]))
-            connected_rssi['chain_1_rssi']['data'].append(
-                int(per_chain_rssi[1]))
-        else:
-            connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL)
-            connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL)
-        measurement_elapsed_time = time.time() - measurement_start_time
-        time.sleep(max(0, polling_frequency - measurement_elapsed_time))
-
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no valid connected readings found.
-    for key, val in connected_rssi.copy().items():
-        if 'data' not in val:
-            continue
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
-        if len(filtered_rssi_values) > ignore_samples:
-            filtered_rssi_values = filtered_rssi_values[ignore_samples:]
-        if filtered_rssi_values:
-            connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                connected_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
-            else:
-                connected_rssi[key]['stdev'] = 0
-        else:
-            connected_rssi[key]['mean'] = RSSI_ERROR_VAL
-            connected_rssi[key]['stdev'] = RSSI_ERROR_VAL
-    return connected_rssi
-
-
-def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
-    scan_rssi = collections.OrderedDict()
-    for bssid in tracked_bssids:
-        scan_rssi[bssid] = empty_rssi_result()
-    for idx in range(num_measurements):
-        scan_output = dut.adb.shell(SCAN)
-        time.sleep(MED_SLEEP)
-        scan_output = dut.adb.shell(SCAN_RESULTS)
-        for bssid in tracked_bssids:
-            bssid_result = re.search(bssid + '.*',
-                                     scan_output,
-                                     flags=re.IGNORECASE)
-            if bssid_result:
-                bssid_result = bssid_result.group(0).split('\t')
-                scan_rssi[bssid]['data'].append(int(bssid_result[2]))
-            else:
-                scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL)
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no readings found.
-    for key, val in scan_rssi.items():
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
-        if filtered_rssi_values:
-            scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                scan_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
-            else:
-                scan_rssi[key]['stdev'] = 0
-        else:
-            scan_rssi[key]['mean'] = RSSI_ERROR_VAL
-            scan_rssi[key]['stdev'] = RSSI_ERROR_VAL
-    return scan_rssi
-
-
-def get_sw_signature(dut):
-    bdf_output = dut.adb.shell('cksum /vendor/firmware/bdwlan*')
-    logging.debug('BDF Checksum output: {}'.format(bdf_output))
-    bdf_signature = sum(
-        [int(line.split(' ')[0]) for line in bdf_output.splitlines()]) % 1000
-
-    fw_output = dut.adb.shell('halutil -logger -get fw')
-    logging.debug('Firmware version output: {}'.format(fw_output))
-    fw_version = re.search(FW_REGEX, fw_output).group('firmware')
-    fw_signature = fw_version.split('.')[-3:-1]
-    fw_signature = float('.'.join(fw_signature))
-    serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
-    return {
-        'config_signature': bdf_signature,
-        'fw_signature': fw_signature,
-        'serial_hash': serial_hash
-    }
-
-
-def get_country_code(dut):
-    country_code = dut.adb.shell('iw reg get | grep country | head -1')
-    country_code = country_code.split(':')[0].split(' ')[1]
-    if country_code == '00':
-        country_code = 'WW'
-    return country_code
-
-
-def push_config(dut, config_file):
-    config_files_list = dut.adb.shell(
-        'ls /vendor/firmware/bdwlan*').splitlines()
-    for dst_file in config_files_list:
-        dut.push_system_file(config_file, dst_file)
-    dut.reboot()
-
-
-def start_wifi_logging(dut):
-    dut.droid.wifiEnableVerboseLogging(1)
-    msg = "Failed to enable WiFi verbose logging."
-    asserts.assert_equal(dut.droid.wifiGetVerboseLoggingLevel(), 1, msg)
-    logging.info('Starting CNSS logs')
-    dut.adb.shell("find /data/vendor/wifi/wlan_logs/ -type f -delete",
-                  ignore_status=True)
-    dut.adb.shell_nb('cnss_diag -f -s')
-
-
-def stop_wifi_logging(dut):
-    logging.info('Stopping CNSS logs')
-    dut.adb.shell('killall cnss_diag')
-    logs = dut.get_file_names("/data/vendor/wifi/wlan_logs/")
-    if logs:
-        dut.log.info("Pulling cnss_diag logs %s", logs)
-        log_path = os.path.join(dut.device_log_path,
-                                "CNSS_DIAG_%s" % dut.serial)
-        os.makedirs(log_path, exist_ok=True)
-        dut.pull_files(logs, log_path)
-
-
-def push_firmware(dut, firmware_files):
-    """Function to push Wifi firmware files
-
-    Args:
-        dut: dut to push bdf file to
-        firmware_files: path to wlanmdsp.mbn file
-        datamsc_file: path to Data.msc file
-    """
-    for file in firmware_files:
-        dut.push_system_file(file, '/vendor/firmware/')
-    dut.reboot()
-
-
-def _set_ini_fields(ini_file_path, ini_field_dict):
-    template_regex = r'^{}=[0-9,.x-]+'
-    with open(ini_file_path, 'r') as f:
-        ini_lines = f.read().splitlines()
-        for idx, line in enumerate(ini_lines):
-            for field_name, field_value in ini_field_dict.items():
-                line_regex = re.compile(template_regex.format(field_name))
-                if re.match(line_regex, line):
-                    ini_lines[idx] = '{}={}'.format(field_name, field_value)
-                    print(ini_lines[idx])
-    with open(ini_file_path, 'w') as f:
-        f.write('\n'.join(ini_lines) + '\n')
-
-
-def _edit_dut_ini(dut, ini_fields):
-    """Function to edit Wifi ini files."""
-    dut_ini_path = '/vendor/firmware/wlan/qca_cld/WCNSS_qcom_cfg.ini'
-    local_ini_path = os.path.expanduser('~/WCNSS_qcom_cfg.ini')
-    dut.pull_files(dut_ini_path, local_ini_path)
-
-    _set_ini_fields(local_ini_path, ini_fields)
-
-    dut.push_system_file(local_ini_path, dut_ini_path)
-    dut.reboot()
-
-
-def set_chain_mask(dut, chain_mask):
-    curr_mask = getattr(dut, 'chain_mask', '2x2')
-    if curr_mask == chain_mask:
-        return
-    dut.chain_mask = chain_mask
-    if chain_mask == '2x2':
-        ini_fields = {
-            'gEnable2x2': 2,
-            'gSetTxChainmask1x1': 1,
-            'gSetRxChainmask1x1': 1,
-            'gDualMacFeatureDisable': 6,
-            'gDot11Mode': 0
-        }
-    else:
-        ini_fields = {
-            'gEnable2x2': 0,
-            'gSetTxChainmask1x1': chain_mask + 1,
-            'gSetRxChainmask1x1': chain_mask + 1,
-            'gDualMacFeatureDisable': 1,
-            'gDot11Mode': 0
-        }
-    _edit_dut_ini(dut, ini_fields)
-
-
-def set_wifi_mode(dut, mode):
-    TX_MODE_DICT = {
-        'Auto': 0,
-        '11n': 4,
-        '11ac': 9,
-        '11abg': 1,
-        '11b': 2,
-        '11': 3,
-        '11g only': 5,
-        '11n only': 6,
-        '11b only': 7,
-        '11ac only': 8
-    }
-
-    ini_fields = {
-        'gEnable2x2': 2,
-        'gSetTxChainmask1x1': 1,
-        'gSetRxChainmask1x1': 1,
-        'gDualMacFeatureDisable': 6,
-        'gDot11Mode': TX_MODE_DICT[mode]
-    }
-    _edit_dut_ini(dut, ini_fields)
-
-
-class LinkLayerStats():
-
-    LLSTATS_CMD = 'cat /d/wlan0/ll_stats'
-    PEER_REGEX = 'LL_STATS_PEER_ALL'
-    MCS_REGEX = re.compile(
-        r'preamble: (?P<mode>\S+), nss: (?P<num_streams>\S+), bw: (?P<bw>\S+), '
-        'mcs: (?P<mcs>\S+), bitrate: (?P<rate>\S+), txmpdu: (?P<txmpdu>\S+), '
-        'rxmpdu: (?P<rxmpdu>\S+), mpdu_lost: (?P<mpdu_lost>\S+), '
-        'retries: (?P<retries>\S+), retries_short: (?P<retries_short>\S+), '
-        'retries_long: (?P<retries_long>\S+)')
-    MCS_ID = collections.namedtuple(
-        'mcs_id', ['mode', 'num_streams', 'bandwidth', 'mcs', 'rate'])
-    MODE_MAP = {'0': '11a/g', '1': '11b', '2': '11n', '3': '11ac'}
-    BW_MAP = {'0': 20, '1': 40, '2': 80}
-
-    def __init__(self, dut, llstats_enabled=True):
-        self.dut = dut
-        self.llstats_enabled = llstats_enabled
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def update_stats(self):
-        if self.llstats_enabled:
-            try:
-                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD,
-                                                    timeout=0.1)
-            except:
-                llstats_output = ''
-        else:
-            llstats_output = ''
-        self._update_stats(llstats_output)
-
-    def reset_stats(self):
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def _empty_llstats(self):
-        return collections.OrderedDict(mcs_stats=collections.OrderedDict(),
-                                       summary=collections.OrderedDict())
-
-    def _empty_mcs_stat(self):
-        return collections.OrderedDict(txmpdu=0,
-                                       rxmpdu=0,
-                                       mpdu_lost=0,
-                                       retries=0,
-                                       retries_short=0,
-                                       retries_long=0)
-
-    def _mcs_id_to_string(self, mcs_id):
-        mcs_string = '{} {}MHz Nss{} MCS{} {}Mbps'.format(
-            mcs_id.mode, mcs_id.bandwidth, mcs_id.num_streams, mcs_id.mcs,
-            mcs_id.rate)
-        return mcs_string
-
-    def _parse_mcs_stats(self, llstats_output):
-        llstats_dict = {}
-        # Look for per-peer stats
-        match = re.search(self.PEER_REGEX, llstats_output)
-        if not match:
-            self.reset_stats()
-            return collections.OrderedDict()
-        # Find and process all matches for per stream stats
-        match_iter = re.finditer(self.MCS_REGEX, llstats_output)
-        for match in match_iter:
-            current_mcs = self.MCS_ID(self.MODE_MAP[match.group('mode')],
-                                      int(match.group('num_streams')) + 1,
-                                      self.BW_MAP[match.group('bw')],
-                                      int(match.group('mcs')),
-                                      int(match.group('rate'), 16) / 1000)
-            current_stats = collections.OrderedDict(
-                txmpdu=int(match.group('txmpdu')),
-                rxmpdu=int(match.group('rxmpdu')),
-                mpdu_lost=int(match.group('mpdu_lost')),
-                retries=int(match.group('retries')),
-                retries_short=int(match.group('retries_short')),
-                retries_long=int(match.group('retries_long')))
-            llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats
-        return llstats_dict
-
-    def _diff_mcs_stats(self, new_stats, old_stats):
-        stats_diff = collections.OrderedDict()
-        for stat_key in new_stats.keys():
-            stats_diff[stat_key] = new_stats[stat_key] - old_stats[stat_key]
-        return stats_diff
-
-    def _generate_stats_summary(self, llstats_dict):
-        llstats_summary = collections.OrderedDict(common_tx_mcs=None,
-                                                  common_tx_mcs_count=0,
-                                                  common_tx_mcs_freq=0,
-                                                  common_rx_mcs=None,
-                                                  common_rx_mcs_count=0,
-                                                  common_rx_mcs_freq=0,
-                                                  rx_per=float('nan'))
-
-        txmpdu_count = 0
-        rxmpdu_count = 0
-        for mcs_id, mcs_stats in llstats_dict['mcs_stats'].items():
-            if mcs_stats['txmpdu'] > llstats_summary['common_tx_mcs_count']:
-                llstats_summary['common_tx_mcs'] = mcs_id
-                llstats_summary['common_tx_mcs_count'] = mcs_stats['txmpdu']
-            if mcs_stats['rxmpdu'] > llstats_summary['common_rx_mcs_count']:
-                llstats_summary['common_rx_mcs'] = mcs_id
-                llstats_summary['common_rx_mcs_count'] = mcs_stats['rxmpdu']
-            txmpdu_count += mcs_stats['txmpdu']
-            rxmpdu_count += mcs_stats['rxmpdu']
-        if txmpdu_count:
-            llstats_summary['common_tx_mcs_freq'] = (
-                llstats_summary['common_tx_mcs_count'] / txmpdu_count)
-        if rxmpdu_count:
-            llstats_summary['common_rx_mcs_freq'] = (
-                llstats_summary['common_rx_mcs_count'] / rxmpdu_count)
-        return llstats_summary
-
-    def _update_stats(self, llstats_output):
-        # Parse stats
-        new_llstats = self._empty_llstats()
-        new_llstats['mcs_stats'] = self._parse_mcs_stats(llstats_output)
-        # Save old stats and set new cumulative stats
-        old_llstats = self.llstats_cumulative.copy()
-        self.llstats_cumulative = new_llstats.copy()
-        # Compute difference between new and old stats
-        self.llstats_incremental = self._empty_llstats()
-        for mcs_id, new_mcs_stats in new_llstats['mcs_stats'].items():
-            old_mcs_stats = old_llstats['mcs_stats'].get(
-                mcs_id, self._empty_mcs_stat())
-            self.llstats_incremental['mcs_stats'][
-                mcs_id] = self._diff_mcs_stats(new_mcs_stats, old_mcs_stats)
-        # Generate llstats summary
-        self.llstats_incremental['summary'] = self._generate_stats_summary(
-            self.llstats_incremental)
-        self.llstats_cumulative['summary'] = self._generate_stats_summary(
-            self.llstats_cumulative)
diff --git a/src/antlion/test_utils/wifi/wifi_power_test_utils.py b/src/antlion/test_utils/wifi/wifi_power_test_utils.py
deleted file mode 100644
index cbd51fc..0000000
--- a/src/antlion/test_utils/wifi/wifi_power_test_utils.py
+++ /dev/null
@@ -1,405 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from antlion import utils
-from antlion.libs.proc import job
-from antlion.controllers.ap_lib import bridge_interface as bi
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.controllers.adb_lib.error import AdbCommandError
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.controllers.ap_lib import hostapd_ap_preset
-
-# http://www.secdev.org/projects/scapy/
-# On ubuntu, sudo pip3 install scapy
-import scapy.all as scapy
-
-GET_FROM_PHONE = 'get_from_dut'
-GET_FROM_AP = 'get_from_ap'
-ENABLED_MODULATED_DTIM = 'gEnableModulatedDTIM='
-MAX_MODULATED_DTIM = 'gMaxLIModulatedDTIM='
-
-
-def change_dtim(ad, gEnableModulatedDTIM, gMaxLIModulatedDTIM=10):
-    """Function to change the DTIM setting in the phone.
-
-    Args:
-        ad: the target android device, AndroidDevice object
-        gEnableModulatedDTIM: Modulated DTIM, int
-        gMaxLIModulatedDTIM: Maximum modulated DTIM, int
-    """
-    ad.log.info('Sets dtim to {}'.format(gEnableModulatedDTIM))
-
-    # In P21 the dtim setting method changed and an AdbCommandError will take
-    # place to get ini_file_phone. Thus add try/except block for the old method.
-    # If error occurs, use change_dtim_adb method later. Otherwise, first trying
-    # to find the ini file with DTIM settings
-    try:
-        ini_file_phone = ad.adb.shell('ls /vendor/firmware/wlan/*/*.ini')
-
-    except AdbCommandError as e:
-
-        # Gets AdbCommandError, change dtim later with change_dtim_adb merthod.
-        # change_dtim_adb requires that wifi connection is on.
-        ad.log.info('Gets AdbCommandError, change dtim with change_dtim_adb.')
-        change_dtim_adb(ad, gEnableModulatedDTIM)
-        return 0
-
-    ini_file_local = ini_file_phone.split('/')[-1]
-
-    # Pull the file and change the DTIM to desired value
-    ad.adb.pull('{} {}'.format(ini_file_phone, ini_file_local))
-
-    with open(ini_file_local, 'r') as fin:
-        for line in fin:
-            if ENABLED_MODULATED_DTIM in line:
-                gE_old = line.strip('\n')
-                gEDTIM_old = line.strip(ENABLED_MODULATED_DTIM).strip('\n')
-            if MAX_MODULATED_DTIM in line:
-                gM_old = line.strip('\n')
-                gMDTIM_old = line.strip(MAX_MODULATED_DTIM).strip('\n')
-    fin.close()
-    if int(gEDTIM_old) == gEnableModulatedDTIM and int(
-            gMDTIM_old) == gMaxLIModulatedDTIM:
-        ad.log.info('Current DTIM is already the desired value,'
-                    'no need to reset it')
-        return 0
-
-    gE_new = ENABLED_MODULATED_DTIM + str(gEnableModulatedDTIM)
-    gM_new = MAX_MODULATED_DTIM + str(gMaxLIModulatedDTIM)
-
-    sed_gE = 'sed -i \'s/{}/{}/g\' {}'.format(gE_old, gE_new, ini_file_local)
-    sed_gM = 'sed -i \'s/{}/{}/g\' {}'.format(gM_old, gM_new, ini_file_local)
-    job.run(sed_gE)
-    job.run(sed_gM)
-
-    # Push the file to the phone
-    push_file_to_phone(ad, ini_file_local, ini_file_phone)
-    ad.log.info('DTIM changes checked in and rebooting...')
-    ad.reboot()
-    # Wait for auto-wifi feature to start
-    time.sleep(20)
-    ad.adb.shell('dumpsys battery set level 100')
-    ad.log.info('DTIM updated and device back from reboot')
-    return 1
-
-def change_dtim_adb(ad, gEnableModulatedDTIM):
-    """Function to change the DTIM setting in the P21 phone.
-
-        This method should be run after connecting wifi.
-
-    Args:
-        ad: the target android device, AndroidDevice object
-        gEnableModulatedDTIM: Modulated DTIM, int
-    """
-    ad.log.info('Changes DTIM to {} with adb'.format(gEnableModulatedDTIM))
-    ad.adb.root()
-    screen_status = ad.adb.shell('dumpsys nfc | grep Screen')
-    screen_is_on = 'ON_UNLOCKED' in screen_status
-
-    # To read the dtim with 'adb shell wl bcn_li_dtim', the screen should be off
-    if screen_is_on:
-        ad.log.info('The screen is on. Set it to off before change dtim')
-        ad.droid.goToSleepNow()
-        time_limit_seconds = 60
-        _wait_screen_off(ad, time_limit_seconds)
-
-    old_dtim = _read_dtim_adb(ad)
-    ad.log.info('The dtim before change is {}'.format(old_dtim))
-    try:
-        if int(old_dtim) == gEnableModulatedDTIM:
-            ad.log.info('Current DTIM is already the desired value,'
-                        'no need to reset it')
-            if screen_is_on:
-                ad.log.info('Changes the screen to the original on status')
-                ad.droid.wakeUpNow()
-            return
-    except Exception as e:
-        ad.log.info('old_dtim is not available from adb')
-
-    current_dtim = _set_dtim(ad, gEnableModulatedDTIM)
-    ad.log.info(
-        'Old DTIM is {}, current DTIM is {}'.format(old_dtim, current_dtim))
-    if screen_is_on:
-        ad.log.info('Changes the screen to the original on status')
-        ad.droid.wakeUpNow()
-
-def _set_dtim(ad, gEnableModulatedDTIM):
-    out = ad.adb.shell("halutil -dtim_config {}".format(gEnableModulatedDTIM))
-    ad.log.info('set dtim to {}, stdout: {}'.format(
-        gEnableModulatedDTIM, out))
-    return _read_dtim_adb(ad)
-
-def _read_dtim_adb(ad):
-    try:
-        old_dtim = ad.adb.shell('wl bcn_li_dtim')
-        return old_dtim
-    except Exception as e:
-        ad.log.info('When reading dtim get error {}'.format(e))
-        return 'The dtim value is not available from adb'
-
-def _wait_screen_off(ad, time_limit_seconds):
-    while time_limit_seconds > 0:
-        screen_status = ad.adb.shell('dumpsys nfc | grep Screen')
-        if 'OFF_UNLOCKED' in screen_status:
-            ad.log.info('The screen status is {}'.format(screen_status))
-            return
-        time.sleep(1)
-        time_limit_seconds -= 1
-    raise TimeoutError('Timed out while waiting the screen off after {} '
-                       'seconds.'.format(time_limit_seconds))
-
-
-def push_file_to_phone(ad, file_local, file_phone):
-    """Function to push local file to android phone.
-
-    Args:
-        ad: the target android device
-        file_local: the locla file to push
-        file_phone: the file/directory on the phone to be pushed
-    """
-    ad.adb.root()
-    cmd_out = ad.adb.remount()
-    if 'Permission denied' in cmd_out:
-        ad.log.info('Need to disable verity first and reboot')
-        ad.adb.disable_verity()
-        time.sleep(1)
-        ad.reboot()
-        ad.log.info('Verity disabled and device back from reboot')
-        ad.adb.root()
-        ad.adb.remount()
-    time.sleep(1)
-    ad.adb.push('{} {}'.format(file_local, file_phone))
-
-
-def ap_setup(ap, network, bandwidth=80, dtim_period=None):
-    """Set up the whirlwind AP with provided network info.
-
-    Args:
-        ap: access_point object of the AP
-        network: dict with information of the network, including ssid, password
-                 bssid, channel etc.
-        bandwidth: the operation bandwidth for the AP, default 80MHz
-        dtim_period: the dtim period of access point
-    Returns:
-        brconfigs: the bridge interface configs
-    """
-    log = logging.getLogger()
-    bss_settings = []
-    ssid = network[wutils.WifiEnums.SSID_KEY]
-    if "password" in network.keys():
-        password = network["password"]
-        security = hostapd_security.Security(
-            security_mode="wpa", password=password)
-    else:
-        security = hostapd_security.Security(security_mode=None, password=None)
-    channel = network["channel"]
-    config = hostapd_ap_preset.create_ap_preset(
-        channel=channel,
-        ssid=ssid,
-        dtim_period=dtim_period,
-        security=security,
-        bss_settings=bss_settings,
-        vht_bandwidth=bandwidth,
-        profile_name='whirlwind',
-        iface_wlan_2g=ap.wlan_2g,
-        iface_wlan_5g=ap.wlan_5g)
-    config_bridge = ap.generate_bridge_configs(channel)
-    brconfigs = bi.BridgeInterfaceConfigs(config_bridge[0], config_bridge[1],
-                                          config_bridge[2])
-    ap.bridge.startup(brconfigs)
-    ap.start_ap(config)
-    log.info("AP started on channel {} with SSID {}".format(channel, ssid))
-    return brconfigs
-
-
-def run_iperf_client_nonblocking(ad, server_host, extra_args=""):
-    """Start iperf client on the device with nohup.
-
-    Return status as true if iperf client start successfully.
-    And data flow information as results.
-
-    Args:
-        ad: the android device under test
-        server_host: Address of the iperf server.
-        extra_args: A string representing extra arguments for iperf client,
-            e.g. "-i 1 -t 30".
-
-    """
-    log = logging.getLogger()
-    ad.adb.shell_nb("nohup >/dev/null 2>&1 sh -c 'iperf3 -c {} {} &'".format(
-        server_host, extra_args))
-    log.info("IPerf client started")
-
-
-def get_wifi_rssi(ad):
-    """Get the RSSI of the device.
-
-    Args:
-        ad: the android device under test
-    Returns:
-        RSSI: the rssi level of the device
-    """
-    RSSI = ad.droid.wifiGetConnectionInfo()['rssi']
-    return RSSI
-
-
-def get_phone_ip(ad):
-    """Get the WiFi IP address of the phone.
-
-    Args:
-        ad: the android device under test
-    Returns:
-        IP: IP address of the phone for WiFi, as a string
-    """
-    IP = ad.droid.connectivityGetIPv4Addresses('wlan0')[0]
-
-    return IP
-
-
-def get_phone_mac(ad):
-    """Get the WiFi MAC address of the phone.
-
-    Args:
-        ad: the android device under test
-    Returns:
-        mac: MAC address of the phone for WiFi, as a string
-    """
-    mac = ad.droid.wifiGetConnectionInfo()["mac_address"]
-
-    return mac
-
-
-def get_phone_ipv6(ad):
-    """Get the WiFi IPV6 address of the phone.
-
-    Args:
-        ad: the android device under test
-    Returns:
-        IPv6: IPv6 address of the phone for WiFi, as a string
-    """
-    IPv6 = ad.droid.connectivityGetLinkLocalIpv6Address('wlan0')[:-6]
-
-    return IPv6
-
-
-def wait_for_dhcp(interface_name):
-    """Wait the DHCP address assigned to desired interface.
-
-    Getting DHCP address takes time and the wait time isn't constant. Utilizing
-    utils.timeout to keep trying until success
-
-    Args:
-        interface_name: desired interface name
-    Returns:
-        ip: ip address of the desired interface name
-    Raise:
-        TimeoutError: After timeout, if no DHCP assigned, raise
-    """
-    log = logging.getLogger()
-    reset_host_interface(interface_name)
-    start_time = time.time()
-    time_limit_seconds = 60
-    ip = '0.0.0.0'
-    while start_time + time_limit_seconds > time.time():
-        ip = scapy.get_if_addr(interface_name)
-        if ip == '0.0.0.0':
-            time.sleep(1)
-        else:
-            log.info(
-                'DHCP address assigned to %s as %s' % (interface_name, ip))
-            return ip
-    raise TimeoutError('Timed out while getting if_addr after %s seconds.' %
-                       time_limit_seconds)
-
-
-def reset_host_interface(intferface_name):
-    """Reset the host interface.
-
-    Args:
-        intferface_name: the desired interface to reset
-    """
-    log = logging.getLogger()
-    intf_down_cmd = 'ifconfig %s down' % intferface_name
-    intf_up_cmd = 'ifconfig %s up' % intferface_name
-    try:
-        job.run(intf_down_cmd)
-        time.sleep(10)
-        job.run(intf_up_cmd)
-        log.info('{} has been reset'.format(intferface_name))
-    except job.Error:
-        raise Exception('No such interface')
-
-
-def bringdown_host_interface(intferface_name):
-    """Reset the host interface.
-
-    Args:
-        intferface_name: the desired interface to reset
-    """
-    log = logging.getLogger()
-    intf_down_cmd = 'ifconfig %s down' % intferface_name
-    try:
-        job.run(intf_down_cmd)
-        time.sleep(2)
-        log.info('{} has been brought down'.format(intferface_name))
-    except job.Error:
-        raise Exception('No such interface')
-
-
-def create_pkt_config(test_class):
-    """Creates the config for generating multicast packets
-
-    Args:
-        test_class: object with all networking paramters
-
-    Returns:
-        Dictionary with the multicast packet config
-    """
-    addr_type = (scapy.IPV6_ADDR_LINKLOCAL
-                 if test_class.ipv6_src_type == 'LINK_LOCAL' else
-                 scapy.IPV6_ADDR_GLOBAL)
-
-    mac_dst = test_class.mac_dst
-    if GET_FROM_PHONE in test_class.mac_dst:
-        mac_dst = get_phone_mac(test_class.dut)
-
-    ipv4_dst = test_class.ipv4_dst
-    if GET_FROM_PHONE in test_class.ipv4_dst:
-        ipv4_dst = get_phone_ip(test_class.dut)
-
-    ipv6_dst = test_class.ipv6_dst
-    if GET_FROM_PHONE in test_class.ipv6_dst:
-        ipv6_dst = get_phone_ipv6(test_class.dut)
-
-    ipv4_gw = test_class.ipv4_gwt
-    if GET_FROM_AP in test_class.ipv4_gwt:
-        ipv4_gw = test_class.access_point.ssh_settings.hostname
-
-    pkt_gen_config = {
-        'interf': test_class.pkt_sender.interface,
-        'subnet_mask': test_class.sub_mask,
-        'src_mac': test_class.mac_src,
-        'dst_mac': mac_dst,
-        'src_ipv4': test_class.ipv4_src,
-        'dst_ipv4': ipv4_dst,
-        'src_ipv6': test_class.ipv6_src,
-        'src_ipv6_type': addr_type,
-        'dst_ipv6': ipv6_dst,
-        'gw_ipv4': ipv4_gw
-    }
-    return pkt_gen_config
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/__init__.py b/src/antlion/test_utils/wifi/wifi_retail_ap/__init__.py
deleted file mode 100644
index 0046ed6..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/__init__.py
+++ /dev/null
@@ -1,552 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections.abc
-import copy
-import fcntl
-import importlib
-import os
-import selenium
-import splinter
-import time
-from antlion import logger
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-def create(configs):
-    """Factory method for retail AP class.
-
-    Args:
-        configs: list of dicts containing ap settings. ap settings must contain
-        the following: brand, model, ip_address, username and password
-    """
-    SUPPORTED_APS = {
-        ('Netgear', 'R7000'): {
-            'name': 'NetgearR7000AP',
-            'package': 'netgear_r7000'
-        },
-        ('Netgear', 'R7000NA'): {
-            'name': 'NetgearR7000NAAP',
-            'package': 'netgear_r7000'
-        },
-        ('Netgear', 'R7500'): {
-            'name': 'NetgearR7500AP',
-            'package': 'netgear_r7500'
-        },
-        ('Netgear', 'R7500NA'): {
-            'name': 'NetgearR7500NAAP',
-            'package': 'netgear_r7500'
-        },
-        ('Netgear', 'R7800'): {
-            'name': 'NetgearR7800AP',
-            'package': 'netgear_r7800'
-        },
-        ('Netgear', 'R8000'): {
-            'name': 'NetgearR8000AP',
-            'package': 'netgear_r8000'
-        },
-        ('Netgear', 'RAX80'): {
-            'name': 'NetgearRAX80AP',
-            'package': 'netgear_rax80'
-        },
-        ('Netgear', 'RAX120'): {
-            'name': 'NetgearRAX120AP',
-            'package': 'netgear_rax120'
-        },
-        ('Netgear', 'RAX200'): {
-            'name': 'NetgearRAX200AP',
-            'package': 'netgear_rax200'
-        },
-        ('Netgear', 'RAXE500'): {
-            'name': 'NetgearRAXE500AP',
-            'package': 'netgear_raxe500'
-        },
-        ('Brcm', 'Reference'): {
-            'name': 'BrcmRefAP',
-            'package': 'brcm_ref'
-        },
-        ('Google', 'Wifi'): {
-            'name': 'GoogleWifiAP',
-            'package': 'google_wifi'
-        },
-    }
-    objs = []
-    for config in configs:
-        ap_id = (config['brand'], config['model'])
-        if ap_id not in SUPPORTED_APS:
-            raise KeyError('Invalid retail AP brand and model combination.')
-        ap_class_dict = SUPPORTED_APS[ap_id]
-        ap_package = 'acts_contrib.test_utils.wifi.wifi_retail_ap.{}'.format(
-            ap_class_dict['package'])
-        ap_package = importlib.import_module(ap_package)
-        ap_class = getattr(ap_package, ap_class_dict['name'])
-        objs.append(ap_class(config))
-    return objs
-
-
-def destroy(objs):
-    for obj in objs:
-        obj.teardown()
-
-
-class BlockingBrowser(splinter.driver.webdriver.chrome.WebDriver):
-    """Class that implements a blocking browser session on top of selenium.
-
-    The class inherits from and builds upon splinter/selenium's webdriver class
-    and makes sure that only one such webdriver is active on a machine at any
-    single time. The class ensures single session operation using a lock file.
-    The class is to be used within context managers (e.g. with statements) to
-    ensure locks are always properly released.
-    """
-
-    def __init__(self, headless, timeout):
-        """Constructor for BlockingBrowser class.
-
-        Args:
-            headless: boolean to control visible/headless browser operation
-            timeout: maximum time allowed to launch browser
-        """
-        self.log = logger.create_tagged_trace_logger('ChromeDriver')
-        self.chrome_options = splinter.driver.webdriver.chrome.Options()
-        self.chrome_options.add_argument('--no-proxy-server')
-        self.chrome_options.add_argument('--no-sandbox')
-        self.chrome_options.add_argument('--allow-running-insecure-content')
-        self.chrome_options.add_argument('--ignore-certificate-errors')
-        self.chrome_capabilities = selenium.webdriver.common.desired_capabilities.DesiredCapabilities.CHROME.copy(
-        )
-        self.chrome_capabilities['acceptSslCerts'] = True
-        self.chrome_capabilities['acceptInsecureCerts'] = True
-        if headless:
-            self.chrome_options.add_argument('--headless')
-            self.chrome_options.add_argument('--disable-gpu')
-        self.lock_file_path = '/usr/local/bin/chromedriver'
-        self.timeout = timeout
-
-    def __enter__(self):
-        """Entry context manager for BlockingBrowser.
-
-        The enter context manager for BlockingBrowser attempts to lock the
-        browser file. If successful, it launches and returns a chromedriver
-        session. If an exception occurs while starting the browser, the lock
-        file is released.
-        """
-        self.lock_file = open(self.lock_file_path, 'r')
-        start_time = time.time()
-        while time.time() < start_time + self.timeout:
-            try:
-                fcntl.flock(self.lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
-            except BlockingIOError:
-                time.sleep(BROWSER_WAIT_SHORT)
-                continue
-            try:
-                self.driver = selenium.webdriver.Chrome(
-                    options=self.chrome_options,
-                    desired_capabilities=self.chrome_capabilities)
-                self.element_class = splinter.driver.webdriver.WebDriverElement
-                self._cookie_manager = splinter.driver.webdriver.cookie_manager.CookieManager(
-                    self.driver)
-                super(splinter.driver.webdriver.chrome.WebDriver,
-                      self).__init__(2)
-                return super(BlockingBrowser, self).__enter__()
-            except:
-                fcntl.flock(self.lock_file, fcntl.LOCK_UN)
-                self.lock_file.close()
-                raise RuntimeError('Error starting browser. '
-                                   'Releasing lock file.')
-        raise TimeoutError('Could not start chrome browser in time.')
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        """Exit context manager for BlockingBrowser.
-
-        The exit context manager simply calls the parent class exit and
-        releases the lock file.
-        """
-        try:
-            super(BlockingBrowser, self).__exit__(exc_type, exc_value,
-                                                  traceback)
-        except:
-            raise RuntimeError('Failed to quit browser. Releasing lock file.')
-        finally:
-            fcntl.flock(self.lock_file, fcntl.LOCK_UN)
-            self.lock_file.close()
-
-    def restart(self):
-        """Method to restart browser session without releasing lock file."""
-        self.quit()
-        self.__enter__()
-
-    def visit_persistent(self,
-                         url,
-                         page_load_timeout,
-                         num_tries,
-                         backup_url='about:blank',
-                         check_for_element=None):
-        """Method to visit webpages and retry upon failure.
-
-        The function visits a URL and checks that the resulting URL matches
-        the intended URL, i.e. no redirects have happened
-
-        Args:
-            url: the intended url
-            page_load_timeout: timeout for page visits
-            num_tries: number of tries before url is declared unreachable
-            backup_url: url to visit if first url is not reachable. This can be
-            used to simply refresh the browser and try again or to re-login to
-            the AP
-            check_for_element: element id to check for existence on page
-        """
-        self.driver.set_page_load_timeout(page_load_timeout)
-        for idx in range(num_tries):
-            try:
-                self.visit(url)
-            except:
-                self.restart()
-
-            page_reached = self.url.split('/')[-1] == url.split('/')[-1]
-            if check_for_element:
-                time.sleep(BROWSER_WAIT_MED)
-                element = self.find_by_id(check_for_element)
-                if not element:
-                    page_reached = 0
-            if page_reached:
-                break
-            else:
-                try:
-                    self.visit(backup_url)
-                except:
-                    self.restart()
-
-            if idx == num_tries - 1:
-                self.log.error('URL unreachable. Current URL: {}'.format(
-                    self.url))
-                raise RuntimeError('URL unreachable.')
-
-
-class WifiRetailAP(object):
-    """Base class implementation for retail ap.
-
-    Base class provides functions whose implementation is shared by all aps.
-    If some functions such as set_power not supported by ap, checks will raise
-    exceptions.
-    """
-
-    def __init__(self, ap_settings):
-        self.ap_settings = ap_settings.copy()
-        self.log = logger.create_tagged_trace_logger('AccessPoint|{}'.format(
-            self._get_control_ip_address()))
-        # Capabilities variable describing AP capabilities
-        self.capabilities = {
-            'interfaces': [],
-            'channels': {},
-            'modes': {},
-            'default_mode': None
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings.setdefault(interface, {})
-        # Lock AP
-        if self.ap_settings.get('lock_ap', 0):
-            self.lock_timeout = self.ap_settings.get('lock_timeout', 3600)
-            self._lock_ap()
-
-    def teardown(self):
-        """Function to perform destroy operations."""
-        if self.ap_settings.get('lock_ap', 0):
-            self._unlock_ap()
-
-    def reset(self):
-        """Function that resets AP.
-
-        Function implementation is AP dependent and intended to perform any
-        necessary reset operations as part of controller destroy.
-        """
-
-    def read_ap_settings(self):
-        """Function that reads current ap settings.
-
-        Function implementation is AP dependent and thus base class raises exception
-        if function not implemented in child class.
-        """
-        raise NotImplementedError
-
-    def validate_ap_settings(self):
-        """Function to validate ap settings.
-
-        This function compares the actual ap settings read from the web GUI
-        with the assumed settings saved in the AP object. When called after AP
-        configuration, this method helps ensure that our configuration was
-        successful.
-        Note: Calling this function updates the stored ap_settings
-
-        Raises:
-            ValueError: If read AP settings do not match stored settings.
-        """
-        assumed_ap_settings = copy.deepcopy(self.ap_settings)
-        actual_ap_settings = self.read_ap_settings()
-
-        if assumed_ap_settings != actual_ap_settings:
-            self.log.warning(
-                'Discrepancy in AP settings. Some settings may have been overwritten.'
-            )
-
-    def configure_ap(self, **config_flags):
-        """Function that configures ap based on values of ap_settings.
-
-        Function implementation is AP dependent and thus base class raises exception
-        if function not implemented in child class.
-
-        Args:
-            config_flags: optional configuration flags
-        """
-        raise NotImplementedError
-
-    def set_region(self, region):
-        """Function that sets AP region.
-
-        This function sets the region for the AP. Note that this may overwrite
-        channel and bandwidth settings in cases where the new region does not
-        support the current wireless configuration.
-
-        Args:
-            region: string indicating AP region
-        """
-        if region != self.ap_settings['region']:
-            self.log.warning(
-                'Updating region may overwrite wireless settings.')
-        setting_to_update = {'region': region}
-        self.update_ap_settings(setting_to_update)
-
-    def set_radio_on_off(self, network, status):
-        """Function that turns the radio on or off.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            status: boolean indicating on or off (0: off, 1: on)
-        """
-        setting_to_update = {network: {'status': int(status)}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_ssid(self, network, ssid):
-        """Function that sets network SSID.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            ssid: string containing ssid
-        """
-        setting_to_update = {network: {'ssid': str(ssid)}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel(self, network, channel):
-        """Function that sets network channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string or int containing channel
-        """
-        if channel not in self.capabilities['channels'][network]:
-            self.log.error('Ch{} is not supported on {} interface.'.format(
-                channel, network))
-        setting_to_update = {network: {'channel': channel}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        elif isinstance(bandwidth, int):
-            bandwidth = str(bandwidth) + self.capabilities['default_mode']
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update = {network: {'bandwidth': bandwidth}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel_and_bandwidth(self, network, channel, bandwidth):
-        """Function that sets network bandwidth/mode and channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string containing desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        elif isinstance(bandwidth, int):
-            bandwidth = str(bandwidth) + self.capabilities['default_mode']
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        if channel not in self.capabilities['channels'][network]:
-            self.log.error('Ch{} is not supported on {} interface.'.format(
-                channel, network))
-        setting_to_update = {
-            network: {
-                'bandwidth': bandwidth,
-                'channel': channel
-            }
-        }
-        self.update_ap_settings(setting_to_update)
-
-    def set_power(self, network, power):
-        """Function that sets network transmit power.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            power: string containing power level, e.g., 25%, 100%
-        """
-        if 'power' not in self.ap_settings[network].keys():
-            self.log.error(
-                'Cannot configure power on {} interface.'.format(network))
-        setting_to_update = {network: {'power': power}}
-        self.update_ap_settings(setting_to_update)
-
-    def set_security(self, network, security_type, *password):
-        """Function that sets network security setting and password.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            security: string containing security setting, e.g., WPA2-PSK
-            password: optional argument containing password
-        """
-        if (len(password) == 1) and (type(password[0]) == str):
-            setting_to_update = {
-                network: {
-                    'security_type': str(security_type),
-                    'password': str(password[0])
-                }
-            }
-        else:
-            setting_to_update = {
-                network: {
-                    'security_type': str(security_type)
-                }
-            }
-        self.update_ap_settings(setting_to_update)
-
-    def set_rate(self):
-        """Function that configures rate used by AP.
-
-        Function implementation is not supported by most APs and thus base
-        class raises exception if function not implemented in child class.
-        """
-        raise NotImplementedError
-
-    def _update_settings_dict(self,
-                              settings,
-                              updates,
-                              updates_requested=False,
-                              status_toggle_flag=False):
-        new_settings = copy.deepcopy(settings)
-        for key, value in updates.items():
-            if key not in new_settings.keys():
-                raise KeyError('{} is an invalid settings key.'.format(key))
-            elif isinstance(value, collections.abc.Mapping):
-                new_settings[
-                    key], updates_requested, status_toggle_flag = self._update_settings_dict(
-                        new_settings.get(key, {}), value, updates_requested,
-                        status_toggle_flag)
-            elif new_settings[key] != value:
-                new_settings[key] = value
-                updates_requested = True
-                if 'status' in key:
-                    status_toggle_flag = True
-        return new_settings, updates_requested, status_toggle_flag
-
-    def update_ap_settings(self, dict_settings={}, **named_settings):
-        """Function to update settings of existing AP.
-
-        Function copies arguments into ap_settings and calls configure_retail_ap
-        to apply them.
-
-        Args:
-            *dict_settings accepts single dictionary of settings to update
-            **named_settings accepts named settings to update
-            Note: dict and named_settings cannot contain the same settings.
-        """
-        settings_to_update = dict(dict_settings, **named_settings)
-        if len(settings_to_update) != len(dict_settings) + len(named_settings):
-            raise KeyError('The following keys were passed twice: {}'.format(
-                (set(dict_settings.keys()).intersection(
-                    set(named_settings.keys())))))
-
-        self.ap_settings, updates_requested, status_toggle_flag = self._update_settings_dict(
-            self.ap_settings, settings_to_update)
-
-        if updates_requested:
-            self.configure_ap(status_toggled=status_toggle_flag)
-
-    def band_lookup_by_channel(self, channel):
-        """Function that gives band name by channel number.
-
-        Args:
-            channel: channel number to lookup
-        Returns:
-            band: name of band which this channel belongs to on this ap, False
-            if not supported
-        """
-        for key, value in self.capabilities['channels'].items():
-            if channel in value:
-                return key
-        return False
-
-    def _get_control_ip_address(self):
-        """Function to get AP's Control Interface IP address."""
-        if 'ssh_config' in self.ap_settings.keys():
-            return self.ap_settings['ssh_config']['host']
-        else:
-            return self.ap_settings['ip_address']
-
-    def _lock_ap(self):
-        """Function to lock the ap while tests are running."""
-        self.lock_file_path = '/tmp/{}_{}_{}.lock'.format(
-            self.ap_settings['brand'], self.ap_settings['model'],
-            self._get_control_ip_address())
-        if not os.path.exists(self.lock_file_path):
-            with open(self.lock_file_path, 'w'):
-                pass
-        self.lock_file = open(self.lock_file_path, 'r')
-        start_time = time.time()
-        self.log.info('Trying to acquire AP lock.')
-        while time.time() < start_time + self.lock_timeout:
-            try:
-                fcntl.flock(self.lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
-            except BlockingIOError:
-                time.sleep(BROWSER_WAIT_SHORT)
-                continue
-            self.log.info('AP lock acquired.')
-            return
-        raise RuntimeError('Could not lock AP in time.')
-
-    def _unlock_ap(self):
-        """Function to unlock the AP when tests are done."""
-        self.log.info('Releasing AP lock.')
-        if hasattr(self, 'lock_file'):
-            try:
-                fcntl.flock(self.lock_file, fcntl.LOCK_UN)
-                self.lock_file.close()
-                self.log.info('Succussfully released AP lock file.')
-            except:
-                raise RuntimeError('Error occurred while unlocking AP.')
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/brcm_ref.py b/src/antlion/test_utils/wifi/wifi_retail_ap/brcm_ref.py
deleted file mode 100644
index d5461f8..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/brcm_ref.py
+++ /dev/null
@@ -1,243 +0,0 @@
-import collections
-import numpy
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 10
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class BrcmRefAP(WifiRetailAP):
-    """Class that implements Netgear RAX200 AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        self.config_page = ('{protocol}://{username}:{password}@'
-                            '{ip_address}:{port}/info.html').format(
-                                protocol=self.ap_settings['protocol'],
-                                username=self.ap_settings['admin_username'],
-                                password=self.ap_settings['admin_password'],
-                                ip_address=self.ap_settings['ip_address'],
-                                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'wlrouter/radio.asp').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-
-        self.capabilities = {
-            'interfaces': ['2G_5G', '6G'],
-            'channels': {
-                '2G_5G': [
-                    1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 52, 56,
-                    60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 136,
-                    140, 144, 149, 153, 157, 161, 165
-                ],
-                '6G': ['6g' + str(ch) for ch in numpy.arange(1, 222, 4)]
-            },
-            'modes': {
-                '2G_5G': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ],
-                '6G': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ]
-            },
-            'default_mode': 'HE'
-        }
-        self.ap_settings['region'] = 'United States'
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {
-                'ssid': 'BrcmAP0' if interface == '6G' else 'BrcmAP1',
-                'security_type': 'Open',
-                'password': '1234567890'
-            }
-        self.config_page_fields = collections.OrderedDict({
-            ('2G_5G', 'interface'): ('wl_unit', 1),
-            ('2G_5G', 'band'):
-            'wl_nband',
-            ('2G_5G', 'bandwidth'):
-            'wl_bw_cap',
-            ('2G_5G', 'channel'):
-            'wl_chanspec',
-            ('6G', 'interface'): ('wl_unit', 0),
-            ('6G', 'band'):
-            'wl_nband',
-            ('6G', 'bandwidth'):
-            'wl_bw_cap',
-            ('6G', 'channel'):
-            'wl_chanspec',
-        })
-
-        self.band_mode_values = {'1': '5 GHz', '2': '2.4 GHz', '4': '6 GHz'}
-
-        self.band_values = {'5 GHz': 1, '2.4 GHz': 2, '6 GHz': 4}
-
-        self.bandwidth_mode_values = {
-            '1': 'HE20',
-            '3': 'HE40',
-            '7': 'HE80',
-            '15': 'HE160'
-        }
-
-    def _decode_channel_string(self, channel_string):
-        if channel_string == '0':
-            return 'Auto'
-        if 'u' in channel_string or 'l' in channel_string:
-            channel_string = channel_string[0:-1]
-        elif len(channel_string.split('/')) > 1:
-            channel_string = channel_string.split('/')[0]
-        if '6g' in channel_string:
-            return channel_string
-        else:
-            return int(channel_string)
-
-    def _get_channel_str(self, interface, channel, bandwidth):
-        bandwidth = int(''.join([x for x in bandwidth if x.isdigit()]))
-        if bandwidth == 20:
-            channel_str = str(channel)
-        elif bandwidth in [80, 160]:
-            channel_str = str(channel) + '/' + str(bandwidth)
-        elif interface == '6G' and bandwidth == 40:
-            channel_str = str(channel) + '/' + str(bandwidth)
-        elif interface == '2G_5G' and bandwidth == 40:
-            lower_lookup = [
-                36, 44, 52, 60, 100, 108, 116, 124, 132, 140, 149, 157
-            ]
-            if int(channel) in lower_lookup:
-                channel_str = str(channel) + 'l'
-            else:
-                channel_str = str(channel) + 'u'
-        return channel_str
-
-    def read_ap_settings(self):
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_nologin,
-                                     BROWSER_WAIT_MED, 10, self.config_page)
-
-            for key in self.config_page_fields.keys():
-                if 'interface' in key:
-                    config_item = browser.find_by_name(
-                        self.config_page_fields[key][0]).first
-                    config_item.select(self.config_page_fields[key][1])
-                    time.sleep(BROWSER_WAIT_SHORT)
-                else:
-                    config_item = browser.find_by_name(
-                        self.config_page_fields[key]).first
-                    if 'band' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self.band_mode_values[config_item.value]
-                    elif 'bandwidth' in key:
-                        self.ap_settings[key[0]][key[
-                            1]] = self.bandwidth_mode_values[config_item.value]
-                    elif 'channel' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self._decode_channel_string(
-                                config_item.value)
-                    else:
-                        self.ap_settings[key[0]][key[1]] = config_item.value
-
-    def update_ap_settings(self, dict_settings={}, **named_settings):
-        """Function to update settings of existing AP.
-
-        Function copies arguments into ap_settings and calls configure_ap
-        to apply them.
-
-        Args:
-            dict_settings: single dictionary of settings to update
-            **named_settings: named settings to update
-            Note: dict and named_settings cannot contain the same settings.
-        """
-
-        settings_to_update = dict(dict_settings, **named_settings)
-        if len(settings_to_update) != len(dict_settings) + len(named_settings):
-            raise KeyError('The following keys were passed twice: {}'.format(
-                (set(dict_settings.keys()).intersection(
-                    set(named_settings.keys())))))
-
-        updating_6G = '6G' in settings_to_update.keys()
-        updating_2G_5G = '2G_5G' in settings_to_update.keys()
-
-        if updating_2G_5G:
-            if 'channel' in settings_to_update['2G_5G']:
-                band = '2.4 GHz' if int(
-                    settings_to_update['2G_5G']['channel']) < 13 else '5 GHz'
-                if band == '2.4 GHz':
-                    settings_to_update['2G_5G']['bandwidth'] = 'HE20'
-                settings_to_update['2G_5G']['band'] = band
-        self.ap_settings, updates_requested, status_toggle_flag = self._update_settings_dict(
-            self.ap_settings, settings_to_update)
-        if updates_requested:
-            self.configure_ap(updating_2G_5G, updating_6G)
-
-    def configure_ap(self, updating_2G_5G, updating_6G):
-
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-
-            interfaces_to_update = []
-            if updating_2G_5G:
-                interfaces_to_update.append('2G_5G')
-            if updating_6G:
-                interfaces_to_update.append('6G')
-            for interface in interfaces_to_update:
-                # Visit URL
-                browser.visit_persistent(self.config_page, BROWSER_WAIT_MED,
-                                         10)
-                browser.visit_persistent(self.config_page_nologin,
-                                         BROWSER_WAIT_MED, 10,
-                                         self.config_page)
-
-                config_item = browser.find_by_name(
-                    self.config_page_fields[(interface, 'interface')][0]).first
-                config_item.select(self.config_page_fields[(interface,
-                                                            'interface')][1])
-                time.sleep(BROWSER_WAIT_SHORT)
-
-                for key, value in self.config_page_fields.items():
-                    if 'interface' in key or interface not in key:
-                        continue
-                    config_item = browser.find_by_name(
-                        self.config_page_fields[key]).first
-                    if 'band' in key:
-                        config_item.select(
-                            self.band_values[self.ap_settings[key[0]][key[1]]])
-                    elif 'bandwidth' in key:
-                        config_item.select_by_text(
-                            str(self.ap_settings[key[0]][key[1]])[2:] + ' MHz')
-                    elif 'channel' in key:
-                        channel_str = self._get_channel_str(
-                            interface, self.ap_settings[interface][key[1]],
-                            self.ap_settings[interface]['bandwidth'])
-                        config_item.select_by_text(channel_str)
-                    else:
-                        self.ap_settings[key[0]][key[1]] = config_item.value
-                    time.sleep(BROWSER_WAIT_SHORT)
-                # Apply
-                config_item = browser.find_by_name('action')
-                config_item.first.click()
-                time.sleep(BROWSER_WAIT_MED)
-                config_item = browser.find_by_name('action')
-                time.sleep(BROWSER_WAIT_SHORT)
-                config_item.first.click()
-                time.sleep(BROWSER_WAIT_LONG)
-                browser.visit_persistent(self.config_page, BROWSER_WAIT_LONG,
-                                     10)
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/google_wifi.py b/src/antlion/test_utils/wifi/wifi_retail_ap/google_wifi.py
deleted file mode 100644
index ee2a821..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/google_wifi.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers import access_point
-from antlion.controllers.ap_lib import bridge_interface
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-
-
-class GoogleWifiAP(WifiRetailAP):
-    """ Class that implements Google Wifi AP.
-
-    This class is a work in progress
-    """
-
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        # Initialize AP
-        if self.ap_settings['2G']['status'] and self.ap_settings['5G_1'][
-                'status']:
-            raise ValueError('Error initializing Google Wifi AP. '
-                             'Only one interface can be enabled at a time.')
-
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings.setdefault(interface, {})
-
-        self.BW_MODE_MAP = {
-            'legacy': 20,
-            'VHT20': 20,
-            'VHT40': 40,
-            'VHT80': 80
-        }
-        self.default_settings = {
-            'region': 'United States',
-            'brand': 'Google',
-            'model': 'Wifi',
-            'hostapd_profile': 'whirlwind',
-            '2G': {
-                'status': 0,
-                'ssid': 'GoogleWifi_2G',
-                'channel': 11,
-                'bandwidth': 'VHT20',
-                'power': 'auto',
-                'mode': None,
-                'num_streams': None,
-                'rate': 'auto',
-                'short_gi': 0,
-                'security_type': 'Open',
-                'password': 'password',
-                'subnet': '192.168.1.0/24'
-            },
-            '5G_1': {
-                'status': 0,
-                'ssid': 'GoogleWifi_2G',
-                'channel': 11,
-                'bandwidth': 'VHT20',
-                'power': 'auto',
-                'mode': None,
-                'num_streams': None,
-                'rate': 'auto',
-                'short_gi': 0,
-                'security_type': 'Open',
-                'password': 'password',
-                'subnet': '192.168.9.0/24'
-            }
-        }
-        for setting in self.default_settings.keys():
-            if setting in self.capabilities['interfaces']:
-                continue
-            elif setting not in self.ap_settings:
-                self.log.debug(
-                    '{0} {1} not found during init. Setting {0} = {1}'.format(
-                        setting, self.default_settings[setting]))
-                self.ap_settings[setting] = self.default_settings[setting]
-
-        for interface in self.capabilities['interfaces']:
-            for setting in self.default_settings[interface].keys():
-                if setting not in self.ap_settings[interface]:
-                    self.log.debug(
-                        '{0} {1} not found during init. Setting {1} = {2}'.
-                        format(interface, setting,
-                               self.default_settings[interface][setting]))
-                    self.ap_settings[interface][
-                        setting] = self.default_settings[interface][setting]
-        init_settings = self.ap_settings.copy()
-        init_settings['ap_subnet'] = {
-            '2g': self.ap_settings['2G']['subnet'],
-            '5g': self.ap_settings['5G_1']['subnet']
-        }
-        self.access_point = access_point.AccessPoint(init_settings)
-        self.configure_ap()
-
-    def read_ap_settings(self):
-        """Function that reads current ap settings."""
-        return self.ap_settings.copy()
-
-    def update_ap_settings(self, dict_settings={}, **named_settings):
-        """Function to update settings of existing AP.
-
-        Function copies arguments into ap_settings and calls configure_ap
-        to apply them.
-
-        Args:
-            dict_settings: single dictionary of settings to update
-            **named_settings: named settings to update
-            Note: dict and named_settings cannot contain the same settings.
-        """
-        settings_to_update = dict(dict_settings, **named_settings)
-        if len(settings_to_update) != len(dict_settings) + len(named_settings):
-            raise KeyError('The following keys were passed twice: {}'.format(
-                (set(dict_settings.keys()).intersection(
-                    set(named_settings.keys())))))
-
-        updating_2G = '2G' in settings_to_update.keys()
-        updating_5G_1 = '5G_1' in settings_to_update.keys()
-        if updating_2G and updating_5G_1:
-            raise ValueError(
-                'Error updating Google WiFi AP. '
-                'One interface can be activated and updated at a time')
-        elif updating_2G:
-            # If updating an interface and not explicitly setting its status,
-            # it is assumed that the interface is to be ENABLED and updated
-            if 'status' not in settings_to_update['2G']:
-                settings_to_update['2G']['status'] = 1
-                settings_to_update['5G_1'] = {'status': 0}
-        elif updating_5G_1:
-            if 'status' not in settings_to_update['5G_1']:
-                settings_to_update['2G'] = {'status': 0}
-                settings_to_update['5G_1']['status'] = 1
-        self.ap_settings, updates_requested, status_toggle_flag = self._update_settings_dict(
-            self.ap_settings, settings_to_update)
-        if updates_requested:
-            self.configure_ap()
-
-    def configure_ap(self):
-        """Function to configure Google Wifi."""
-        self.log.info('Stopping Google Wifi interfaces.')
-        print(self.ap_settings)
-        self.access_point.stop_all_aps()
-
-        if self.ap_settings['2G']['status'] == 1:
-            interface = '2G'
-            self.log.info('Bringing up 2.4 GHz interface.')
-        elif self.ap_settings['5G_1']['status'] == 1:
-            interface = '5G_1'
-            self.log.info('Bringing up 5 GHz interface.')
-        else:
-            return
-
-        bss_settings = []
-        ssid = self.ap_settings[interface]['ssid']
-        security_mode = self.ap_settings[interface]['security_type'].lower()
-        if 'wpa' in security_mode:
-            password = self.ap_settings[interface]['password']
-            security = hostapd_security.Security(security_mode=security_mode,
-                                                 password=password)
-        else:
-            security = hostapd_security.Security(security_mode=None,
-                                                 password=None)
-        channel = int(self.ap_settings[interface]['channel'])
-        bandwidth = self.BW_MODE_MAP[self.ap_settings[interface]['bandwidth']]
-        config = hostapd_ap_preset.create_ap_preset(
-            channel=channel,
-            ssid=ssid,
-            security=security,
-            bss_settings=bss_settings,
-            vht_bandwidth=bandwidth,
-            profile_name=self.ap_settings['hostapd_profile'],
-            iface_wlan_2g=self.access_point.wlan_2g,
-            iface_wlan_5g=self.access_point.wlan_5g)
-        config_bridge = self.access_point.generate_bridge_configs(channel)
-        brconfigs = bridge_interface.BridgeInterfaceConfigs(
-            config_bridge[0], 'lan0', config_bridge[2])
-        self.access_point.bridge.startup(brconfigs)
-        self.access_point.start_ap(config)
-        self.set_power(interface, self.ap_settings[interface]['power'])
-        self.set_rate(interface,
-                      mode=self.ap_settings[interface]['mode'],
-                      num_streams=self.ap_settings[interface]['num_streams'],
-                      rate=self.ap_settings[interface]['rate'],
-                      short_gi=self.ap_settings[interface]['short_gi'])
-        self.log.info('AP started on channel {} with SSID {}'.format(
-            channel, ssid))
-
-    def set_power(self, interface, power):
-        """Function that sets interface transmit power.
-
-        Args:
-            interface: string containing interface identifier (2G, 5G_1)
-            power: power level in dBm
-        """
-        if power == 'auto':
-            power_string = 'auto'
-        else:
-            if not float(power).is_integer():
-                self.log.info(
-                    'Power in dBm must be an integer. Setting to {}'.format(
-                        int(power)))
-            power = int(power)
-            power_string = 'fixed {}'.format(int(power) * 100)
-
-        if '2G' in interface:
-            interface_long = self.access_point.wlan_2g
-            self.ap_settings[interface]['power'] = power
-        elif '5G_1' in interface:
-            interface_long = self.access_point.wlan_5g
-            self.ap_settings[interface]['power'] = power
-        self.access_point.ssh.run('iw dev {} set txpower {}'.format(
-            interface_long, power_string))
-
-    def set_rate(self,
-                 interface,
-                 mode=None,
-                 num_streams=None,
-                 rate='auto',
-                 short_gi=0):
-        """Function that sets rate.
-
-        Args:
-            interface: string containing interface identifier (2G, 5G_1)
-            mode: string indicating the WiFi standard to use
-            num_streams: number of MIMO streams. used only for VHT
-            rate: data rate of MCS index to use
-            short_gi: boolean controlling the use of short guard interval
-        """
-        if '2G' in interface:
-            interface_long = self.access_point.wlan_2g
-            interface_short = '2.4'
-        elif '5G_1' in interface:
-            interface_long = self.access_point.wlan_5g
-            interface_short = '5'
-        self.ap_settings[interface]['mode'] = mode
-        self.ap_settings[interface]['num_streams'] = num_streams
-        self.ap_settings[interface]['rate'] = rate
-        self.ap_settings[interface]['short_gi'] = short_gi
-
-        if rate == 'auto':
-            cmd_string = 'iw dev {0} set bitrates'.format(interface_long)
-        elif 'legacy' in mode.lower():
-            cmd_string = 'iw dev {0} set bitrates legacy-{1} {2} ht-mcs-{1} vht-mcs-{1}'.format(
-                interface_long, interface_short, rate)
-        elif 'vht' in mode.lower():
-            cmd_string = 'iw dev {0} set bitrates legacy-{1} ht-mcs-{1} vht-mcs-{1} {2}:{3}'.format(
-                interface_long, interface_short, num_streams, rate)
-            if short_gi:
-                cmd_string = cmd_string + ' sgi-{}'.format(interface_short)
-        elif 'ht' in mode.lower():
-            cmd_string = 'iw dev {0} set bitrates legacy-{1} ht-mcs-{1} {2} vht-mcs-{1}'.format(
-                interface_long, interface_short, rate)
-            if short_gi:
-                cmd_string = cmd_string + ' sgi-{}'.format(interface_short)
-        self.access_point.ssh.run(cmd_string)
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7000.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7000.py
deleted file mode 100644
index 3f73aa6..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7000.py
+++ /dev/null
@@ -1,282 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearR7000AP(WifiRetailAP):
-    """Class that implements Netgear R7000 AP."""
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_dual_band_r10.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_dual_band_r10.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_dual_band2.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.region_map = {
-            '1': 'Africa',
-            '2': 'Asia',
-            '3': 'Australia',
-            '4': 'Canada',
-            '5': 'Europe',
-            '6': 'Israel',
-            '7': 'Japan',
-            '8': 'Korea',
-            '9': 'Mexico',
-            '10': 'South America',
-            '11': 'United States',
-            '12': 'Middle East(Algeria/Syria/Yemen)',
-            '14': 'Russia',
-            '16': 'China',
-            '17': 'India',
-            '18': 'Malaysia',
-            '19': 'Middle East(Iran/Labanon/Qatar)',
-            '20': 'Middle East(Turkey/Egypt/Tunisia/Kuwait)',
-            '21': 'Middle East(Saudi Arabia)',
-            '22': 'Middle East(United Arab Emirates)',
-            '23': 'Singapore',
-            '24': 'Taiwan'
-        }
-        self.config_page_fields = {
-            'region': 'WRegion',
-            ('2G', 'status'): 'enable_ap',
-            ('5G_1', 'status'): 'enable_ap_an',
-            ('2G', 'ssid'): 'ssid',
-            ('5G_1', 'ssid'): 'ssid_an',
-            ('2G', 'channel'): 'w_channel',
-            ('5G_1', 'channel'): 'w_channel_an',
-            ('2G', 'bandwidth'): 'opmode',
-            ('5G_1', 'bandwidth'): 'opmode_an',
-            ('2G', 'power'): 'enable_tpc',
-            ('5G_1', 'power'): 'enable_tpc_an',
-            ('2G', 'security_type'): 'security_type',
-            ('5G_1', 'security_type'): 'security_type_an',
-            ('2G', 'password'): 'passphrase',
-            ('5G_1', 'password'): 'passphrase_an'
-        }
-        self.bw_mode_values = {
-            'g and b': '11g',
-            '145Mbps': 'VHT20',
-            '300Mbps': 'VHT40',
-            'HT80': 'VHT80'
-        }
-        self.power_mode_values = {
-            '1': '100%',
-            '2': '75%',
-            '3': '50%',
-            '4': '25%'
-        }
-        self.bw_mode_text = {
-            '11g': 'Up to 54 Mbps',
-            'VHT20': 'Up to 289 Mbps',
-            'VHT40': 'Up to 600 Mbps',
-            'VHT80': 'Up to 1300 Mbps'
-        }
-
-    def read_ap_settings(self):
-        """Function to read ap settings."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    browser.visit_persistent(self.config_page_advanced,
-                                             BROWSER_WAIT_MED, 10)
-                    config_item = browser.find_by_name(value)
-                    self.ap_settings[key[0]][key[1]] = int(
-                        config_item.first.checked)
-                    browser.visit_persistent(self.config_page,
-                                             BROWSER_WAIT_MED, 10)
-                else:
-                    config_item = browser.find_by_name(value)
-                    if 'bandwidth' in key:
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            config_item.first.value]
-                    elif 'power' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self.power_mode_values[
-                                config_item.first.value]
-                    elif 'region' in key:
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.first.value]
-                    elif 'security_type' in key:
-                        for item in config_item:
-                            if item.checked:
-                                self.ap_settings[key[0]][key[1]] = item.value
-                    else:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][
-                            key[1]] = config_item.first.value
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_nologin,
-                                     BROWSER_WAIT_MED, 10, self.config_page)
-
-            # Update region, and power/bandwidth for each network
-            config_item = browser.find_by_name(
-                self.config_page_fields['region']).first
-            config_item.select_by_text(self.ap_settings['region'])
-            for key, value in self.config_page_fields.items():
-                if 'power' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.select_by_text(
-                        self.ap_settings[key[0]][key[1]])
-                elif 'bandwidth' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select_by_text(self.bw_mode_text[
-                            self.ap_settings[key[0]][key[1]]])
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select bandwidth. Keeping AP default.')
-
-            # Update security settings (passwords updated only if applicable)
-            for key, value in self.config_page_fields.items():
-                if 'security_type' in key:
-                    browser.choose(value, self.ap_settings[key[0]][key[1]])
-                    if self.ap_settings[key[0]][key[1]] == 'WPA2-PSK':
-                        config_item = browser.find_by_name(
-                            self.config_page_fields[(key[0],
-                                                     'password')]).first
-                        config_item.fill(self.ap_settings[key[0]]['password'])
-
-            # Update SSID and channel for each network
-            # NOTE: Update ordering done as such as workaround for R8000
-            # wherein channel and SSID get overwritten when some other
-            # variables are changed. However, region does have to be set before
-            # channel in all cases.
-            for key, value in self.config_page_fields.items():
-                if 'ssid' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.fill(self.ap_settings[key[0]][key[1]])
-                elif 'channel' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select(self.ap_settings[key[0]][key[1]])
-                        time.sleep(BROWSER_WAIT_SHORT)
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select channel. Keeping AP default.')
-                    try:
-                        alert = browser.get_alert()
-                        alert.accept()
-                    except:
-                        pass
-
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_SHORT)
-            try:
-                alert = browser.get_alert()
-                alert.accept()
-                time.sleep(BROWSER_WAIT_SHORT)
-            except:
-                time.sleep(BROWSER_WAIT_SHORT)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-    def configure_radio_on_off(self):
-        """Helper configuration function to turn radios on/off."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED, 10)
-
-            # Turn radios on or off
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings[key[0]][key[1]]:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-
-class NetgearR7000NAAP(NetgearR7000AP):
-    """Class that implements Netgear R7000 NA AP."""
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        super().init_gui_data()
-        self.region_map['11'] = 'North America'
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7500.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7500.py
deleted file mode 100644
index 4c2729e..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7500.py
+++ /dev/null
@@ -1,335 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import selenium
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearR7500AP(WifiRetailAP):
-    """Class that implements Netgear R7500 AP."""
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        self.config_page = ('{protocol}://{username}:{password}@'
-                            '{ip_address}:{port}/index.htm').format(
-                                protocol=self.ap_settings['protocol'],
-                                username=self.ap_settings['admin_username'],
-                                password=self.ap_settings['admin_password'],
-                                ip_address=self.ap_settings['ip_address'],
-                                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/adv_index.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.config_page_fields = {
-            'region': 'WRegion',
-            ('2G', 'status'): 'enable_ap',
-            ('5G_1', 'status'): 'enable_ap_an',
-            ('2G', 'ssid'): 'ssid',
-            ('5G_1', 'ssid'): 'ssid_an',
-            ('2G', 'channel'): 'w_channel',
-            ('5G_1', 'channel'): 'w_channel_an',
-            ('2G', 'bandwidth'): 'opmode',
-            ('5G_1', 'bandwidth'): 'opmode_an',
-            ('2G', 'security_type'): 'security_type',
-            ('5G_1', 'security_type'): 'security_type_an',
-            ('2G', 'password'): 'passphrase',
-            ('5G_1', 'password'): 'passphrase_an'
-        }
-        self.region_map = {
-            '0': 'Africa',
-            '1': 'Asia',
-            '2': 'Australia',
-            '3': 'Canada',
-            '4': 'Europe',
-            '5': 'Israel',
-            '6': 'Japan',
-            '7': 'Korea',
-            '8': 'Mexico',
-            '9': 'South America',
-            '10': 'United States',
-            '11': 'China',
-            '12': 'India',
-            '13': 'Malaysia',
-            '14': 'Middle East(Algeria/Syria/Yemen)',
-            '15': 'Middle East(Iran/Labanon/Qatar)',
-            '16': 'Middle East(Turkey/Egypt/Tunisia/Kuwait)',
-            '17': 'Middle East(Saudi Arabia)',
-            '18': 'Middle East(United Arab Emirates)',
-            '19': 'Russia',
-            '20': 'Singapore',
-            '21': 'Taiwan'
-        }
-        self.bw_mode_text = {
-            '2G': {
-                '11g': 'Up to 54 Mbps',
-                'VHT20': 'Up to 289 Mbps',
-                'VHT40': 'Up to 600 Mbps'
-            },
-            '5G_1': {
-                'VHT20': 'Up to 347 Mbps',
-                'VHT40': 'Up to 800 Mbps',
-                'VHT80': 'Up to 1733 Mbps'
-            }
-        }
-        self.bw_mode_values = {
-            '1': '11g',
-            '2': 'VHT20',
-            '3': 'VHT40',
-            '7': 'VHT20',
-            '8': 'VHT40',
-            '9': 'VHT80'
-        }
-        self.security_mode_values = {
-            '2G': {
-                'Disable': 'security_disable',
-                'WPA2-PSK': 'security_wpa2'
-            },
-            '5G_1': {
-                'Disable': 'security_an_disable',
-                'WPA2-PSK': 'security_an_wpa2'
-            }
-        }
-
-    def read_ap_settings(self):
-        """Function to read ap wireless settings."""
-        # Get radio status (on/off)
-        self.read_radio_on_off()
-        # Get radio configuration. Note that if both radios are off, the below
-        # code will result in an error
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='wireless')
-            wireless_button = browser.find_by_id('wireless').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                for key, value in self.config_page_fields.items():
-                    if 'bandwidth' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            config_item.value]
-                    elif 'region' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.value]
-                    elif 'password' in key:
-                        try:
-                            config_item = iframe.find_by_name(value).first
-                            self.ap_settings[key[0]][
-                                key[1]] = config_item.value
-                            self.ap_settings[
-                                key[0]]['security_type'] = 'WPA2-PSK'
-                        except:
-                            self.ap_settings[key[0]][
-                                key[1]] = 'defaultpassword'
-                            self.ap_settings[
-                                key[0]]['security_type'] = 'Disable'
-                    elif ('channel' in key) or ('ssid' in key):
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = config_item.value
-                    else:
-                        pass
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='wireless')
-            wireless_button = browser.find_by_id('wireless').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # Update AP region. Must be done before channel setting
-                config_item = iframe.find_by_name(
-                    self.config_page_fields['region']).first
-                config_item.select_by_text(self.ap_settings['region'])
-                # Update wireless settings for each network
-                for key, value in self.config_page_fields.items():
-                    if 'ssid' in key:
-                        config_item = iframe.find_by_name(value).first
-                        config_item.fill(self.ap_settings[key[0]][key[1]])
-                    elif 'channel' in key:
-                        channel = self.ap_settings[key[0]][key[1]]
-                        if int(channel) < 10:
-                            channel_string = '0' + str(channel)
-                        elif int(channel) > 48 and int(channel) < 149:
-                            channel_string = str(channel) + 'DFS'
-                        else:
-                            channel_string = str(channel)
-                        config_item = iframe.find_by_name(value).first
-                        try:
-                            config_item.select_by_text(channel_string)
-                        except AttributeError:
-                            self.log.warning(
-                                'Cannot select channel. Keeping AP default.')
-                    elif 'bandwidth' in key:
-                        config_item = iframe.find_by_name(value).first
-                        try:
-                            config_item.select_by_text(
-                                str(self.bw_mode_text[key[0]][self.ap_settings[
-                                    key[0]][key[1]]]))
-                        except AttributeError:
-                            self.log.warning(
-                                'Cannot select bandwidth. Keeping AP default.')
-                # Update passwords for WPA2-PSK protected networks
-                # (Must be done after security type is selected)
-                for key, value in self.config_page_fields.items():
-                    if 'security_type' in key:
-                        security_option = browser.driver.find_element_by_id(
-                            self.security_mode_values[key[0]][self.ap_settings[
-                                key[0]][key[1]]])
-                        action = selenium.webdriver.common.action_chains.ActionChains(
-                            browser.driver)
-                        action.move_to_element(
-                            security_option).click().perform()
-                        if self.ap_settings[key[0]][key[1]] == 'WPA2-PSK':
-                            config_item = iframe.find_by_name(
-                                self.config_page_fields[(key[0],
-                                                         'password')]).first
-                            config_item.fill(
-                                self.ap_settings[key[0]]['password'])
-
-                apply_button = iframe.find_by_name('Apply')
-                apply_button[0].click()
-                time.sleep(BROWSER_WAIT_SHORT)
-                try:
-                    alert = browser.get_alert()
-                    alert.accept()
-                except:
-                    pass
-                time.sleep(BROWSER_WAIT_SHORT)
-                try:
-                    alert = browser.get_alert()
-                    alert.accept()
-                except:
-                    pass
-                time.sleep(BROWSER_WAIT_SHORT)
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-    def configure_radio_on_off(self):
-        """Helper configuration function to turn radios on/off."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='advanced_bt')
-            advanced_button = browser.find_by_id('advanced_bt').first
-            advanced_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-            wireless_button = browser.find_by_id('wladv').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # Turn radios on or off
-                for key, value in self.config_page_fields.items():
-                    if 'status' in key:
-                        config_item = iframe.find_by_name(value).first
-                        if self.ap_settings[key[0]][key[1]]:
-                            config_item.check()
-                        else:
-                            config_item.uncheck()
-
-                time.sleep(BROWSER_WAIT_SHORT)
-                browser.find_by_name('Apply').first.click()
-                time.sleep(BROWSER_WAIT_EXTRA_LONG)
-                browser.visit_persistent(self.config_page,
-                                         BROWSER_WAIT_EXTRA_LONG, 10)
-
-    def read_radio_on_off(self):
-        """Helper configuration function to read radio status."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='advanced_bt')
-            advanced_button = browser.find_by_id('advanced_bt').first
-            advanced_button.click()
-            time.sleep(BROWSER_WAIT_SHORT)
-            wireless_button = browser.find_by_id('wladv').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # Turn radios on or off
-                for key, value in self.config_page_fields.items():
-                    if 'status' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = int(
-                            config_item.checked)
-
-
-class NetgearR7500NAAP(NetgearR7500AP):
-    """Class that implements Netgear R7500 NA AP."""
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        super().init_gui_data()
-        self.region_map['10'] = 'North America'
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7800.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7800.py
deleted file mode 100644
index 7528b9c..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r7800.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.wifi.wifi_retail_ap import NetgearR7500AP
-
-
-class NetgearR7800AP(NetgearR7500AP):
-    """Class that implements Netgear R7800 AP.
-
-    Since most of the class' implementation is shared with the R7500, this
-    class inherits from NetgearR7500AP and simply redefines config parameters
-    """
-    def init_gui_data(self):
-        super().init_gui_data()
-        # Overwrite minor differences from R7500 AP
-        self.bw_mode_text_2g['VHT20'] = 'Up to 347 Mbps'
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r8000.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r8000.py
deleted file mode 100644
index 81b96b8..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_r8000.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.wifi.wifi_retail_ap import NetgearR7000AP
-
-
-class NetgearR8000AP(NetgearR7000AP):
-    """Class that implements Netgear R8000 AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def init_gui_data(self):
-        super().init_gui_data()
-        # Overwrite minor differences from R7000 AP
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_dual_band_r8000.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_dual_band_r8000.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_dual_band2_r8000.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1', '5G_2'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [36, 40, 44, 48],
-                '5G_2': [149, 153, 157, 161, 165]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80'],
-                '5G_2': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.config_page_fields = {
-            'region': 'WRegion',
-            ('2G', 'status'): 'enable_ap',
-            ('5G_1', 'status'): 'enable_ap_an',
-            ('5G_2', 'status'): 'enable_ap_an_2',
-            ('2G', 'ssid'): 'ssid',
-            ('5G_1', 'ssid'): 'ssid_an',
-            ('5G_2', 'ssid'): 'ssid_an_2',
-            ('2G', 'channel'): 'w_channel',
-            ('5G_1', 'channel'): 'w_channel_an',
-            ('5G_2', 'channel'): 'w_channel_an_2',
-            ('2G', 'bandwidth'): 'opmode',
-            ('5G_1', 'bandwidth'): 'opmode_an',
-            ('5G_2', 'bandwidth'): 'opmode_an_2',
-            ('2G', 'security_type'): 'security_type',
-            ('5G_1', 'security_type'): 'security_type_an',
-            ('5G_2', 'security_type'): 'security_type_an_2',
-            ('2G', 'password'): 'passphrase',
-            ('5G_1', 'password'): 'passphrase_an',
-            ('5G_2', 'password'): 'passphrase_an_2'
-        }
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax120.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax120.py
deleted file mode 100644
index 36ed18d..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax120.py
+++ /dev/null
@@ -1,408 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import selenium
-import time
-from antlion.test_utils.wifi.wifi_retail_ap.netgear_r7500 import NetgearR7500AP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearRAX120AP(NetgearR7500AP):
-    """Class that implements Netgear RAX120 AP.
-
-    Since most of the class' implementation is shared with the R7500, this
-    class inherits from NetgearR7500AP and simply redefines config parameters
-    """
-    def init_gui_data(self):
-        """Function to initialize data used while interacting with web GUI"""
-        self.config_page = ('{protocol}://{username}:{password}@'
-                            '{ip_address}:{port}/index.htm').format(
-                                protocol=self.ap_settings['protocol'],
-                                username=self.ap_settings['admin_username'],
-                                password=self.ap_settings['admin_password'],
-                                ip_address=self.ap_settings['ip_address'],
-                                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/adv_index.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40', 'HE20', 'HE40'],
-                '5G_1': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ]
-            },
-            'default_mode': 'HE'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.config_page_fields = {
-            'region': 'WRegion',
-            'enable_ax': 'enable_ax_chec',
-            ('2G', 'status'): 'enable_ap',
-            ('5G_1', 'status'): 'enable_ap_an',
-            ('2G', 'ssid'): 'ssid',
-            ('5G_1', 'ssid'): 'ssid_an',
-            ('2G', 'channel'): 'w_channel',
-            ('5G_1', 'channel'): 'w_channel_an',
-            ('2G', 'bandwidth'): 'opmode',
-            ('5G_1', 'bandwidth'): 'opmode_an',
-            ('2G', 'security_type'): 'security_type',
-            ('5G_1', 'security_type'): 'security_type_an',
-            ('2G', 'password'): 'passphrase',
-            ('5G_1', 'password'): 'passphrase_an'
-        }
-        self.region_map = {
-            '0': 'Africa',
-            '1': 'Asia',
-            '2': 'Australia',
-            '3': 'Canada',
-            '4': 'Europe',
-            '5': 'Israel',
-            '6': 'Japan',
-            '7': 'Korea',
-            '8': 'Mexico',
-            '9': 'South America',
-            '10': 'United States',
-            '11': 'China',
-            '12': 'India',
-            '13': 'Malaysia',
-            '14': 'Middle East(Algeria/Syria/Yemen)',
-            '15': 'Middle East(Iran/Labanon/Qatar)',
-            '16': 'Middle East(Egypt/Tunisia/Kuwait)',
-            '17': 'Middle East(Turkey)',
-            '18': 'Middle East(Saudi Arabia/United Arab Emirates)',
-            '19': 'Russia',
-            '20': 'Singapore',
-            '21': 'Taiwan',
-            'Australia': 'Australia',
-            'Europe': 'Europe',
-            'Korea': 'Korea',
-            'Singapore': 'Singapore',
-            'Hong Kong': 'Hong Kong',
-            'United States': 'United States',
-        }
-        self.bw_mode_text = {
-            '2G': {
-                '11g': 'Up to 54 Mbps (11g)',
-                'HE20': 'Up to 573.5 Mbps (11ax, HT20, 1024-QAM)',
-                'HE40': 'Up to 1147 Mbps (11ax, HT40, 1024-QAM)',
-                'VHT20': 'Up to 481 Mbps (11ng, HT20, 1024-QAM)',
-                'VHT40': 'Up to 1000 Mbps (11ng, HT40, 1024-QAM)'
-            },
-            '5G_1': {
-                'HE20': 'Up to 1147 Mbps (11ax, HT20, 1024-QAM)',
-                'HE40': 'Up to 2294 Mbps (11ax, HT40, 1024-QAM)',
-                'HE80': 'Up to 4803 Mbps (80MHz) (11ax, HT80, 1024-QAM)',
-                'HE160': 'Up to 4803 Mbps (160MHz) (11ax, HT160, 1024-QAM)',
-                'VHT20': 'Up to 962 Mbps (11ac, HT20, 1024-QAM)',
-                'VHT40': 'Up to 2000 Mbps (11ac, HT40, 1024-QAM)',
-                'VHT80': 'Up to 4333 Mbps (80MHz) (11ac, HT80, 1024-QAM)',
-                'VHT160': 'Up to 4333 Mbps (160MHz) (11ac, HT160, 1024-QAM)'
-            }
-        }
-        self.bw_mode_values = {
-            # first key is a boolean indicating if 11ax is enabled
-            0: {
-                '1': '11g',
-                '2': 'VHT20',
-                '3': 'VHT40',
-                '7': 'VHT20',
-                '8': 'VHT40',
-                '9': 'VHT80',
-                '10': 'VHT160'
-            },
-            1: {
-                '1': '11g',
-                '2': 'HE20',
-                '3': 'HE40',
-                '7': 'HE20',
-                '8': 'HE40',
-                '9': 'HE80',
-                '10': 'HE160',
-                '54': '11g',
-                '573.5': 'HE20',
-                '1146': 'HE40',
-                '1147': 'HE20',
-                '2294': 'HE40',
-                '4803-HT80': 'HE80',
-                '4803-HT160': 'HE160'
-            }
-        }
-        self.security_mode_values = {
-            '2G': {
-                'Disable': 'security_disable',
-                'WPA2-PSK': 'security_wpa2'
-            },
-            '5G_1': {
-                'Disable': 'security_an_disable',
-                'WPA2-PSK': 'security_an_wpa2'
-            }
-        }
-
-    def _set_channel_and_bandwidth(self,
-                                   network,
-                                   channel=None,
-                                   bandwidth=None):
-        """Helper function that sets network bandwidth and channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = {network: {}}
-        if channel:
-            if channel not in self.capabilities['channels'][network]:
-                self.log.error('Ch{} is not supported on {} interface.'.format(
-                    channel, network))
-            setting_to_update[network]['channel'] = channel
-
-        if bandwidth is None:
-            return setting_to_update
-
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update[network]['bandwidth'] = str(bandwidth)
-        setting_to_update['enable_ax'] = int('HE' in bandwidth)
-        # Check if other interfaces need to be changed too
-        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
-        for other_network in self.capabilities['interfaces']:
-            if other_network == network:
-                continue
-            other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
-                'bandwidth'] else 'VHT'
-            other_bw = ''.join([
-                x for x in self.ap_settings[other_network]['bandwidth']
-                if x.isdigit()
-            ])
-            if other_mode != requested_mode:
-                updated_mode = '{}{}'.format(requested_mode, other_bw)
-                self.log.warning('All networks must be VHT or HE. '
-                                 'Updating {} to {}'.format(
-                                     other_network, updated_mode))
-                setting_to_update.setdefault(other_network, {})
-                setting_to_update[other_network]['bandwidth'] = updated_mode
-        return setting_to_update
-
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel(self, network, channel):
-        """Function that sets network channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string or int containing channel
-        """
-        setting_to_update = self._set_channel_and_bandwidth(network,
-                                                            channel=channel)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel_and_bandwidth(self, network, channel, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, channel=channel, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def read_ap_settings(self):
-        """Function to read ap wireless settings."""
-        # Get radio status (on/off)
-        self.read_radio_on_off()
-        # Get radio configuration. Note that if both radios are off, the below
-        # code will result in an error
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='wireless')
-            wireless_button = browser.find_by_id('wireless').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # read if 11ax is enabled first
-                config_item = iframe.find_by_name('enable_ax').first
-                self.ap_settings['enable_ax'] = int(config_item.checked)
-                # read rest of configuration
-                for key, value in self.config_page_fields.items():
-                    if 'bandwidth' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            self.ap_settings['enable_ax']][config_item.value]
-                    elif 'region' in key:
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.value]
-                    elif 'password' in key:
-                        try:
-                            config_item = iframe.find_by_name(value).first
-                            self.ap_settings[key[0]][
-                                key[1]] = config_item.value
-                            self.ap_settings[
-                                key[0]]['security_type'] = 'WPA2-PSK'
-                        except:
-                            self.ap_settings[key[0]][
-                                key[1]] = 'defaultpassword'
-                            self.ap_settings[
-                                key[0]]['security_type'] = 'Disable'
-                    elif ('ssid' in key):
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = config_item.value
-                    elif ('channel' in key):
-                        config_item = iframe.find_by_name(value).first
-                        self.ap_settings[key[0]][key[1]] = int(
-                            config_item.value)
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            browser.visit_persistent(self.config_page,
-                                     BROWSER_WAIT_MED,
-                                     10,
-                                     check_for_element='wireless')
-            wireless_button = browser.find_by_id('wireless').first
-            wireless_button.click()
-            time.sleep(BROWSER_WAIT_MED)
-
-            with browser.get_iframe('formframe') as iframe:
-                # Create action chain
-                action = selenium.webdriver.common.action_chains.ActionChains(
-                    browser.driver)
-                # Configure 11ax on or off
-                curr_ax_enabled = int(
-                    iframe.find_by_name('enable_ax').first.checked)
-                if self.ap_settings['enable_ax'] != curr_ax_enabled:
-                    ax_checkbox = browser.driver.find_element_by_id(
-                        'enable_ax_chec')
-                    action.move_to_element(ax_checkbox).click().perform()
-                # Update AP region. Must be done before channel setting
-                try:
-                    config_item = iframe.find_by_name(
-                        self.config_page_fields['region']).first
-                    config_item.select_by_text(self.ap_settings['region'])
-                except:
-                    self.log.warning('Could not set AP region to {}.'.format(
-                        self.ap_settings['region']))
-                # Update wireless settings for each network
-                for key, value in self.config_page_fields.items():
-                    if 'ssid' in key:
-                        config_item = iframe.find_by_name(value).first
-                        config_item.fill(self.ap_settings[key[0]][key[1]])
-                    elif 'channel' in key:
-                        channel = self.ap_settings[key[0]][key[1]]
-                        if int(channel) < 10:
-                            channel_string = '0' + str(channel)
-                        elif int(channel) > 48 and int(channel) < 149:
-                            channel_string = str(channel) + 'DFS'
-                        else:
-                            channel_string = str(channel)
-                        config_item = iframe.find_by_name(value).first
-                        try:
-                            config_item.select_by_text(channel_string)
-                        except AttributeError:
-                            self.log.warning(
-                                'Cannot select channel. Keeping AP default.')
-                    elif 'bandwidth' in key:
-                        config_item = iframe.find_by_name(value).first
-                        try:
-                            config_item.select_by_text(
-                                str(self.bw_mode_text[key[0]][self.ap_settings[
-                                    key[0]][key[1]]]))
-                        except AttributeError:
-                            self.log.warning(
-                                'Cannot select bandwidth. Keeping AP default.')
-                # Update passwords for WPA2-PSK protected networks
-                # (Must be done after security type is selected)
-                for key, value in self.config_page_fields.items():
-                    if 'security_type' in key:
-                        security_option = browser.driver.find_element_by_id(
-                            self.security_mode_values[key[0]][self.ap_settings[
-                                key[0]][key[1]]])
-                        action = selenium.webdriver.common.action_chains.ActionChains(
-                            browser.driver)
-                        action.move_to_element(
-                            security_option).click().perform()
-                        if self.ap_settings[key[0]][key[1]] == 'WPA2-PSK':
-                            config_item = iframe.find_by_name(
-                                self.config_page_fields[(key[0],
-                                                         'password')]).first
-                            config_item.fill(
-                                self.ap_settings[key[0]]['password'])
-
-                apply_button = iframe.find_by_name('Apply')
-                apply_button[0].click()
-                time.sleep(BROWSER_WAIT_SHORT)
-                try:
-                    alert = browser.get_alert()
-                    alert.accept()
-                except:
-                    pass
-                time.sleep(BROWSER_WAIT_SHORT)
-                try:
-                    alert = browser.get_alert()
-                    alert.accept()
-                except:
-                    pass
-                time.sleep(BROWSER_WAIT_SHORT)
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax200.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax200.py
deleted file mode 100644
index 0034bf3..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax200.py
+++ /dev/null
@@ -1,415 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearRAX200AP(WifiRetailAP):
-    """Class that implements Netgear RAX200 AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_tri_band2.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1', '5G_2'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [36, 40, 44, 48, 52, 56, 60, 64],
-                '5G_2': [
-                    100, 104, 108, 112, 116, 120, 124, 128, 132, 136, 140, 144,
-                    149, 153, 157, 161, 165
-                ]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40', 'HE20', 'HE40'],
-                '5G_1': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ],
-                '5G_2': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ]
-            },
-            'default_mode': 'HE'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.region_map = {
-            '3': 'Australia',
-            '4': 'Canada',
-            '5': 'Europe',
-            '7': 'Japan',
-            '8': 'Korea',
-            '11': 'North America',
-            '16': 'China',
-            '17': 'India',
-            '21': 'Middle East(Saudi Arabia/United Arab Emirates)',
-            '23': 'Singapore',
-            '25': 'Hong Kong',
-            '26': 'Vietnam'
-        }
-
-        self.bw_mode_text = {
-            '2G': {
-                '11g': 'Up to 54 Mbps',
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps'
-            },
-            '5G_1': {
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'HE80': 'Up to 2400 Mbps',
-                'HE160': 'Up to 4800 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps',
-                'VHT80': 'Up to 2165 Mbps',
-                'VHT160': 'Up to 4330 Mbps'
-            },
-            '5G_2': {
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'HE80': 'Up to 2400 Mbps',
-                'HE160': 'Up to 4800 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps',
-                'VHT80': 'Up to 2165 Mbps',
-                'VHT160': 'Up to 4330 Mbps'
-            }
-        }
-        self.bw_mode_values = {
-            # first key is a boolean indicating if 11ax is enabled
-            0: {
-                'g and b': '11g',
-                '145Mbps': 'VHT20',
-                '300Mbps': 'VHT40',
-                'HT80': 'VHT80',
-                'HT160': 'VHT160'
-            },
-            1: {
-                'g and b': '11g',
-                '145Mbps': 'HE20',
-                '300Mbps': 'HE40',
-                'HT80': 'HE80',
-                'HT160': 'HE160'
-            }
-        }
-
-        # Config ordering intentional to avoid GUI bugs
-        self.config_page_fields = collections.OrderedDict([
-            ('region', 'WRegion'), ('enable_ax', 'enable_he'),
-            (('2G', 'status'), 'enable_ap'),
-            (('5G_1', 'status'), 'enable_ap_an'),
-            (('5G_2', 'status'), 'enable_ap_an_2'), (('2G', 'ssid'), 'ssid'),
-            (('5G_1', 'ssid'), 'ssid_an'), (('5G_2', 'ssid'), 'ssid_an_2'),
-            (('2G', 'channel'), 'w_channel'),
-            (('5G_1', 'channel'), 'w_channel_an'),
-            (('5G_2', 'channel'), 'w_channel_an_2'),
-            (('2G', 'bandwidth'), 'opmode'),
-            (('5G_1', 'bandwidth'), 'opmode_an'),
-            (('5G_2', 'bandwidth'), 'opmode_an_2'),
-            (('2G', 'power'), 'enable_tpc'),
-            (('5G_1', 'power'), 'enable_tpc_an'),
-            (('5G_2', 'power'), 'enable_tpc_an_2'),
-            (('5G_2', 'security_type'), 'security_type_an_2'),
-            (('5G_1', 'security_type'), 'security_type_an'),
-            (('2G', 'security_type'), 'security_type'),
-            (('2G', 'password'), 'passphrase'),
-            (('5G_1', 'password'), 'passphrase_an'),
-            (('5G_2', 'password'), 'passphrase_an_2')
-        ])
-
-        self.power_mode_values = {
-            '1': '100%',
-            '2': '75%',
-            '3': '50%',
-            '4': '25%'
-        }
-
-    def _set_channel_and_bandwidth(self,
-                                   network,
-                                   channel=None,
-                                   bandwidth=None):
-        """Helper function that sets network bandwidth and channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = {network: {}}
-        if channel:
-            if channel not in self.capabilities['channels'][network]:
-                self.log.error('Ch{} is not supported on {} interface.'.format(
-                    channel, network))
-            setting_to_update[network]['channel'] = channel
-
-        if bandwidth is None:
-            return setting_to_update
-
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update[network]['bandwidth'] = str(bandwidth)
-        setting_to_update['enable_ax'] = int('HE' in bandwidth)
-        # Check if other interfaces need to be changed too
-        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
-        for other_network in self.capabilities['interfaces']:
-            if other_network == network:
-                continue
-            other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
-                'bandwidth'] else 'VHT'
-            other_bw = ''.join([
-                x for x in self.ap_settings[other_network]['bandwidth']
-                if x.isdigit()
-            ])
-            if other_mode != requested_mode:
-                updated_mode = '{}{}'.format(requested_mode, other_bw)
-                self.log.warning('All networks must be VHT or HE. '
-                                 'Updating {} to {}'.format(
-                                     other_network, updated_mode))
-                setting_to_update.setdefault(other_network, {})
-                setting_to_update[other_network]['bandwidth'] = updated_mode
-        return setting_to_update
-
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel(self, network, channel):
-        """Function that sets network channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string or int containing channel
-        """
-        setting_to_update = self._set_channel_and_bandwidth(network,
-                                                            channel=channel)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel_and_bandwidth(self, network, channel, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, channel=channel, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def read_ap_settings(self):
-        """Function to read ap settings."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    browser.visit_persistent(self.config_page_advanced,
-                                             BROWSER_WAIT_MED, 10)
-                    config_item = browser.find_by_name(value)
-                    self.ap_settings[key[0]][key[1]] = int(
-                        config_item.first.checked)
-                    browser.visit_persistent(self.config_page,
-                                             BROWSER_WAIT_MED, 10)
-                else:
-                    config_item = browser.find_by_name(value)
-                    if 'enable_ax' in key:
-                        self.ap_settings[key] = int(config_item.first.checked)
-                    elif 'bandwidth' in key:
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            self.ap_settings['enable_ax']][
-                                config_item.first.value]
-                    elif 'power' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self.power_mode_values[
-                                config_item.first.value]
-                    elif 'region' in key:
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.first.value]
-                    elif 'security_type' in key:
-                        for item in config_item:
-                            if item.checked:
-                                self.ap_settings[key[0]][key[1]] = item.value
-                    elif 'channel' in key:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][key[1]] = int(
-                            config_item.first.value)
-                    else:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][
-                            key[1]] = config_item.first.value
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_nologin,
-                                     BROWSER_WAIT_MED, 10, self.config_page)
-
-            # Update region, and power/bandwidth for each network
-            try:
-                config_item = browser.find_by_name(
-                    self.config_page_fields['region']).first
-                config_item.select_by_text(self.ap_settings['region'])
-            except:
-                self.log.warning('Cannot change region.')
-            for key, value in self.config_page_fields.items():
-                if 'enable_ax' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings['enable_ax']:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-                if 'power' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.select_by_text(
-                        self.ap_settings[key[0]][key[1]])
-                elif 'bandwidth' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select_by_text(self.bw_mode_text[key[0]][
-                            self.ap_settings[key[0]][key[1]]])
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select bandwidth. Keeping AP default.')
-
-            # Update security settings (passwords updated only if applicable)
-            for key, value in self.config_page_fields.items():
-                if 'security_type' in key:
-                    browser.choose(value, self.ap_settings[key[0]][key[1]])
-                    if 'WPA' in self.ap_settings[key[0]][key[1]]:
-                        config_item = browser.find_by_name(
-                            self.config_page_fields[(key[0],
-                                                     'password')]).first
-                        config_item.fill(self.ap_settings[key[0]]['password'])
-
-            for key, value in self.config_page_fields.items():
-                if 'ssid' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.fill(self.ap_settings[key[0]][key[1]])
-                elif 'channel' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select(self.ap_settings[key[0]][key[1]])
-                        time.sleep(BROWSER_WAIT_SHORT)
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select channel. Keeping AP default.')
-                    try:
-                        for idx in range(0, 2):
-                            alert = browser.get_alert()
-                            alert.accept()
-                            time.sleep(BROWSER_WAIT_SHORT)
-                    except:
-                        pass
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_SHORT)
-            try:
-                alert = browser.get_alert()
-                alert.accept()
-                time.sleep(BROWSER_WAIT_SHORT)
-            except:
-                time.sleep(BROWSER_WAIT_SHORT)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-    def configure_radio_on_off(self):
-        """Helper configuration function to turn radios on/off."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED, 10)
-
-            # Turn radios on or off
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings[key[0]][key[1]]:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax80.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax80.py
deleted file mode 100644
index 6c99a3b..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_rax80.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.wifi.wifi_retail_ap import NetgearR7000AP
-
-
-class NetgearRAX80AP(NetgearR7000AP):
-    """Class that implements Netgear RAX AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def init_gui_data(self):
-        super().init_gui_data()
-        # Overwrite minor differences from R7000 AP
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_dual_band_r10.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_dual_band_r10.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_dual_band2.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1', '5G_2'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [36, 40, 44, 48],
-                '5G_2': [149, 153, 157, 161, 165]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40'],
-                '5G_1': ['VHT20', 'VHT40', 'VHT80'],
-                '5G_2': ['VHT20', 'VHT40', 'VHT80']
-            },
-            'default_mode': 'VHT'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.bw_mode_values = {
-            'g and b': '11g',
-            '145Mbps': 'VHT20',
-            '300Mbps': 'VHT40',
-            'HT80': 'VHT80',
-            'HT160': 'VHT160'
-        }
-        self.bw_mode_text = {
-            '11g': 'Up to 54 Mbps',
-            'VHT20': 'Up to 600 Mbps',
-            'VHT40': 'Up to 1200 Mbps',
-            'VHT80': 'Up to 2400 Mbps',
-            'VHT160': 'Up to 4800 Mbps'
-        }
diff --git a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py b/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py
deleted file mode 100644
index 73aeaec..0000000
--- a/src/antlion/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py
+++ /dev/null
@@ -1,442 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import numpy
-import re
-import time
-from antlion.test_utils.wifi.wifi_retail_ap import WifiRetailAP
-from antlion.test_utils.wifi.wifi_retail_ap import BlockingBrowser
-
-BROWSER_WAIT_SHORT = 1
-BROWSER_WAIT_MED = 3
-BROWSER_WAIT_LONG = 30
-BROWSER_WAIT_EXTRA_LONG = 60
-
-
-class NetgearRAXE500AP(WifiRetailAP):
-    """Class that implements Netgear RAXE500 AP.
-
-    Since most of the class' implementation is shared with the R7000, this
-    class inherits from NetgearR7000AP and simply redefines config parameters
-    """
-    def __init__(self, ap_settings):
-        super().__init__(ap_settings)
-        self.init_gui_data()
-        # Read and update AP settings
-        self.read_ap_firmware()
-        self.read_ap_settings()
-        self.update_ap_settings(ap_settings)
-
-    def init_gui_data(self):
-        self.config_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_wireless_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_nologin = (
-            '{protocol}://{ip_address}:{port}/'
-            'WLG_wireless_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.config_page_advanced = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/WLG_adv_tri_band2.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.firmware_page = (
-            '{protocol}://{username}:{password}@'
-            '{ip_address}:{port}/ADVANCED_home2_tri_band.htm').format(
-                protocol=self.ap_settings['protocol'],
-                username=self.ap_settings['admin_username'],
-                password=self.ap_settings['admin_password'],
-                ip_address=self.ap_settings['ip_address'],
-                port=self.ap_settings['port'])
-        self.capabilities = {
-            'interfaces': ['2G', '5G_1', '6G'],
-            'channels': {
-                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
-                '5G_1': [
-                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
-                    120, 124, 128, 132, 136, 140, 144, 149, 153, 157, 161, 165
-                ],
-                '6G': ['6g' + str(ch) for ch in numpy.arange(37, 222, 16)]
-            },
-            'modes': {
-                '2G': ['VHT20', 'VHT40', 'HE20', 'HE40'],
-                '5G_1': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ],
-                '6G': [
-                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
-                    'HE80', 'HE160'
-                ]
-            },
-            'default_mode': 'HE'
-        }
-        for interface in self.capabilities['interfaces']:
-            self.ap_settings[interface] = {}
-
-        self.region_map = {
-            '3': 'Australia',
-            '4': 'Canada',
-            '5': 'Europe',
-            '7': 'Japan',
-            '8': 'Korea',
-            '11': 'North America',
-            '16': 'China',
-            '17': 'India',
-            '21': 'Middle East(Saudi Arabia/United Arab Emirates)',
-            '23': 'Singapore',
-            '25': 'Hong Kong',
-            '26': 'Vietnam'
-        }
-
-        self.bw_mode_text = {
-            '2G': {
-                'g and b': 'Up to 54 Mbps',
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps'
-            },
-            '5G_1': {
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'HE80': 'Up to 2400 Mbps',
-                'HE160': 'Up to 4800 Mbps',
-                'VHT20': 'Up to 433 Mbps',
-                'VHT40': 'Up to 1000 Mbps',
-                'VHT80': 'Up to 2165 Mbps',
-                'VHT160': 'Up to 4330 Mbps'
-            },
-            '6G': {
-                'HE20': 'Up to 600 Mbps',
-                'HE40': 'Up to 1200 Mbps',
-                'HE80': 'Up to 2400 Mbps',
-                'HE160': 'Up to 4800 Mbps',
-                'VHT20': 'Up to 600 Mbps',
-                'VHT40': 'Up to 1200 Mbps',
-                'VHT80': 'Up to 2400 Mbps',
-                'VHT160': 'Up to 4800 Mbps'
-            }
-        }
-        self.bw_mode_values = {
-            # first key is a boolean indicating if 11ax is enabled
-            0: {
-                'g and b': '11g',
-                'HT20': 'VHT20',
-                'HT40': 'VHT40',
-                'HT80': 'VHT80',
-                'HT160': 'VHT160'
-            },
-            1: {
-                'g and b': '11g',
-                'HT20': 'HE20',
-                'HT40': 'HE40',
-                'HT80': 'HE80',
-                'HT160': 'HE160'
-            }
-        }
-
-        # Config ordering intentional to avoid GUI bugs
-        self.config_page_fields = collections.OrderedDict([
-            ('region', 'WRegion'), ('enable_ax', 'enable_he'),
-            (('2G', 'status'), 'enable_ap'),
-            (('5G_1', 'status'), 'enable_ap_an'),
-            (('6G', 'status'), 'enable_ap_an_2'), (('2G', 'ssid'), 'ssid'),
-            (('5G_1', 'ssid'), 'ssid_an'), (('6G', 'ssid'), 'ssid_an_2'),
-            (('2G', 'channel'), 'w_channel'),
-            (('5G_1', 'channel'), 'w_channel_an'),
-            (('6G', 'channel'), 'w_channel_an_2'),
-            (('2G', 'bandwidth'), 'opmode'),
-            (('5G_1', 'bandwidth'), 'opmode_an'),
-            (('6G', 'bandwidth'), 'opmode_an_2'),
-            (('2G', 'power'), 'enable_tpc'),
-            (('5G_1', 'power'), 'enable_tpc_an'),
-            (('6G', 'security_type'), 'security_type_an_2'),
-            (('5G_1', 'security_type'), 'security_type_an'),
-            (('2G', 'security_type'), 'security_type'),
-            (('2G', 'password'), 'passphrase'),
-            (('5G_1', 'password'), 'passphrase_an'),
-            (('6G', 'password'), 'passphrase_an_2')
-        ])
-
-        self.power_mode_values = {
-            '1': '100%',
-            '2': '75%',
-            '3': '50%',
-            '4': '25%'
-        }
-
-    def _set_channel_and_bandwidth(self,
-                                   network,
-                                   channel=None,
-                                   bandwidth=None):
-        """Helper function that sets network bandwidth and channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-
-        setting_to_update = {network: {}}
-        if channel:
-            if channel not in self.capabilities['channels'][network]:
-                self.log.error('Ch{} is not supported on {} interface.'.format(
-                    channel, network))
-            if isinstance(channel, str) and '6g' in channel:
-                channel = int(channel[2:])
-            setting_to_update[network]['channel'] = channel
-
-        if bandwidth is None:
-            return setting_to_update
-
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update[network]['bandwidth'] = str(bandwidth)
-        setting_to_update['enable_ax'] = int('HE' in bandwidth)
-        # Check if other interfaces need to be changed too
-        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
-        for other_network in self.capabilities['interfaces']:
-            if other_network == network:
-                continue
-            other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
-                'bandwidth'] else 'VHT'
-            other_bw = ''.join([
-                x for x in self.ap_settings[other_network]['bandwidth']
-                if x.isdigit()
-            ])
-            if other_mode != requested_mode:
-                updated_mode = '{}{}'.format(requested_mode, other_bw)
-                self.log.warning('All networks must be VHT or HE. '
-                                 'Updating {} to {}'.format(
-                                     other_network, updated_mode))
-                setting_to_update.setdefault(other_network, {})
-                setting_to_update[other_network]['bandwidth'] = updated_mode
-        return setting_to_update
-
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel(self, network, channel):
-        """Function that sets network channel.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: string or int containing channel
-        """
-        setting_to_update = self._set_channel_and_bandwidth(network,
-                                                            channel=channel)
-        self.update_ap_settings(setting_to_update)
-
-    def set_channel_and_bandwidth(self, network, channel, bandwidth):
-        """Function that sets network bandwidth/mode.
-
-        Args:
-            network: string containing network identifier (2G, 5G_1, 5G_2)
-            channel: desired channel
-            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
-        """
-        setting_to_update = self._set_channel_and_bandwidth(
-            network, channel=channel, bandwidth=bandwidth)
-        self.update_ap_settings(setting_to_update)
-
-    def read_ap_firmware(self):
-        """Function to read ap settings."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-
-            # Visit URL
-            browser.visit_persistent(self.firmware_page, BROWSER_WAIT_MED, 10)
-            firmware_regex = re.compile(
-                r'Firmware Version[\s\S]+V(?P<version>[0-9._]+)')
-            firmware_version = re.search(firmware_regex, browser.html)
-            if firmware_version:
-                self.ap_settings['firmware_version'] = firmware_version.group(
-                    'version')
-            else:
-                self.ap_settings['firmware_version'] = -1
-
-    def read_ap_settings(self):
-        """Function to read ap settings."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    browser.visit_persistent(self.config_page_advanced,
-                                             BROWSER_WAIT_MED, 10)
-                    config_item = browser.find_by_name(value)
-                    self.ap_settings[key[0]][key[1]] = int(
-                        config_item.first.checked)
-                    browser.visit_persistent(self.config_page,
-                                             BROWSER_WAIT_MED, 10)
-                else:
-                    config_item = browser.find_by_name(value)
-                    if 'enable_ax' in key:
-                        self.ap_settings[key] = int(config_item.first.checked)
-                    elif 'bandwidth' in key:
-                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
-                            self.ap_settings['enable_ax']][
-                                config_item.first.value]
-                    elif 'power' in key:
-                        self.ap_settings[key[0]][
-                            key[1]] = self.power_mode_values[
-                                config_item.first.value]
-                    elif 'region' in key:
-                        self.ap_settings['region'] = self.region_map[
-                            config_item.first.value]
-                    elif 'security_type' in key:
-                        for item in config_item:
-                            if item.checked:
-                                self.ap_settings[key[0]][key[1]] = item.value
-                    elif 'channel' in key:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][key[1]] = int(
-                            config_item.first.value)
-                    else:
-                        config_item = browser.find_by_name(value)
-                        self.ap_settings[key[0]][
-                            key[1]] = config_item.first.value
-        return self.ap_settings.copy()
-
-    def configure_ap(self, **config_flags):
-        """Function to configure ap wireless settings."""
-        # Turn radios on or off
-        if config_flags['status_toggled']:
-            self.configure_radio_on_off()
-        # Configure radios
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_nologin,
-                                     BROWSER_WAIT_MED, 10, self.config_page)
-
-            # Update region, and power/bandwidth for each network
-            try:
-                config_item = browser.find_by_name(
-                    self.config_page_fields['region']).first
-                config_item.select_by_text(self.ap_settings['region'])
-            except:
-                self.log.warning('Cannot change region.')
-            for key, value in self.config_page_fields.items():
-                if 'enable_ax' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings['enable_ax']:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-                if 'power' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.select_by_text(
-                        self.ap_settings[key[0]][key[1]])
-                elif 'bandwidth' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select_by_text(self.bw_mode_text[key[0]][
-                            self.ap_settings[key[0]][key[1]]])
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select bandwidth. Keeping AP default.')
-
-            # Update security settings (passwords updated only if applicable)
-            for key, value in self.config_page_fields.items():
-                if 'security_type' in key:
-                    browser.choose(value, self.ap_settings[key[0]][key[1]])
-                    if 'WPA' in self.ap_settings[key[0]][key[1]]:
-                        config_item = browser.find_by_name(
-                            self.config_page_fields[(key[0],
-                                                     'password')]).first
-                        config_item.fill(self.ap_settings[key[0]]['password'])
-
-            for key, value in self.config_page_fields.items():
-                if 'ssid' in key:
-                    config_item = browser.find_by_name(value).first
-                    config_item.fill(self.ap_settings[key[0]][key[1]])
-                elif 'channel' in key:
-                    config_item = browser.find_by_name(value).first
-                    try:
-                        config_item.select(self.ap_settings[key[0]][key[1]])
-                        time.sleep(BROWSER_WAIT_SHORT)
-                    except AttributeError:
-                        self.log.warning(
-                            'Cannot select channel. Keeping AP default.')
-                    try:
-                        alert = browser.get_alert()
-                        alert.accept()
-                    except:
-                        pass
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_SHORT)
-            try:
-                alert = browser.get_alert()
-                alert.accept()
-                time.sleep(BROWSER_WAIT_SHORT)
-            except:
-                time.sleep(BROWSER_WAIT_SHORT)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
-
-    def configure_radio_on_off(self):
-        """Helper configuration function to turn radios on/off."""
-        with BlockingBrowser(self.ap_settings['headless_browser'],
-                             900) as browser:
-            # Visit URL
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
-            browser.visit_persistent(self.config_page_advanced,
-                                     BROWSER_WAIT_MED, 10)
-
-            # Turn radios on or off
-            for key, value in self.config_page_fields.items():
-                if 'status' in key:
-                    config_item = browser.find_by_name(value).first
-                    if self.ap_settings[key[0]][key[1]]:
-                        config_item.check()
-                    else:
-                        config_item.uncheck()
-
-            time.sleep(BROWSER_WAIT_SHORT)
-            browser.find_by_name('Apply').first.click()
-            time.sleep(BROWSER_WAIT_EXTRA_LONG)
-            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
-                                     10)
diff --git a/src/antlion/test_utils/wifi/wifi_test_utils.py b/src/antlion/test_utils/wifi/wifi_test_utils.py
deleted file mode 100755
index 3a6105e..0000000
--- a/src/antlion/test_utils/wifi/wifi_test_utils.py
+++ /dev/null
@@ -1,2835 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import re
-import shutil
-import time
-
-from collections import namedtuple
-from enum import IntEnum
-from queue import Empty
-
-from tenacity import retry, stop_after_attempt, wait_fixed
-
-from antlion import asserts
-from antlion import context
-from antlion import signals
-from antlion import utils
-from antlion.controllers import attenuator
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib.hostapd_constants import BAND_2G
-from antlion.controllers.ap_lib.hostapd_constants import BAND_5G
-from antlion.test_utils.net import connectivity_const as cconsts
-from antlion.test_utils.wifi import wifi_constants
-from antlion.test_utils.wifi.aware import aware_test_utils as autils
-
-# Default timeout used for reboot, toggle WiFi and Airplane mode,
-# for the system to settle down after the operation.
-DEFAULT_TIMEOUT = 10
-# Number of seconds to wait for events that are supposed to happen quickly.
-# Like onSuccess for start background scan and confirmation on wifi state
-# change.
-SHORT_TIMEOUT = 30
-ROAMING_TIMEOUT = 30
-WIFI_CONNECTION_TIMEOUT_DEFAULT = 30
-DEFAULT_SCAN_TRIES = 3
-DEFAULT_CONNECT_TRIES = 3
-# Speed of light in m/s.
-SPEED_OF_LIGHT = 299792458
-
-DEFAULT_PING_ADDR = "https://www.google.com/robots.txt"
-
-CNSS_DIAG_CONFIG_PATH = "/data/vendor/wifi/cnss_diag/"
-CNSS_DIAG_CONFIG_FILE = "cnss_diag.conf"
-
-ROAMING_ATTN = {
-    "AP1_on_AP2_off": [0, 0, 95, 95],
-    "AP1_off_AP2_on": [95, 95, 0, 0],
-    "default": [0, 0, 0, 0]
-}
-
-
-class WifiEnums():
-
-    SSID_KEY = "SSID"  # Used for Wifi & SoftAp
-    SSID_PATTERN_KEY = "ssidPattern"
-    NETID_KEY = "network_id"
-    BSSID_KEY = "BSSID"  # Used for Wifi & SoftAp
-    BSSID_PATTERN_KEY = "bssidPattern"
-    PWD_KEY = "password"  # Used for Wifi & SoftAp
-    frequency_key = "frequency"
-    HIDDEN_KEY = "hiddenSSID"  # Used for Wifi & SoftAp
-    IS_APP_INTERACTION_REQUIRED = "isAppInteractionRequired"
-    IS_USER_INTERACTION_REQUIRED = "isUserInteractionRequired"
-    IS_SUGGESTION_METERED = "isMetered"
-    PRIORITY = "priority"
-    SECURITY = "security"  # Used for Wifi & SoftAp
-
-    # Used for SoftAp
-    AP_BAND_KEY = "apBand"
-    AP_CHANNEL_KEY = "apChannel"
-    AP_BANDS_KEY = "apBands"
-    AP_CHANNEL_FREQUENCYS_KEY = "apChannelFrequencies"
-    AP_MAC_RANDOMIZATION_SETTING_KEY = "MacRandomizationSetting"
-    AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY = "BridgedModeOpportunisticShutdownEnabled"
-    AP_IEEE80211AX_ENABLED_KEY = "Ieee80211axEnabled"
-    AP_MAXCLIENTS_KEY = "MaxNumberOfClients"
-    AP_SHUTDOWNTIMEOUT_KEY = "ShutdownTimeoutMillis"
-    AP_SHUTDOWNTIMEOUTENABLE_KEY = "AutoShutdownEnabled"
-    AP_CLIENTCONTROL_KEY = "ClientControlByUserEnabled"
-    AP_ALLOWEDLIST_KEY = "AllowedClientList"
-    AP_BLOCKEDLIST_KEY = "BlockedClientList"
-
-    WIFI_CONFIG_SOFTAP_BAND_2G = 1
-    WIFI_CONFIG_SOFTAP_BAND_5G = 2
-    WIFI_CONFIG_SOFTAP_BAND_2G_5G = 3
-    WIFI_CONFIG_SOFTAP_BAND_6G = 4
-    WIFI_CONFIG_SOFTAP_BAND_2G_6G = 5
-    WIFI_CONFIG_SOFTAP_BAND_5G_6G = 6
-    WIFI_CONFIG_SOFTAP_BAND_ANY = 7
-
-    # DO NOT USE IT for new test case! Replaced by WIFI_CONFIG_SOFTAP_BAND_
-    WIFI_CONFIG_APBAND_2G = WIFI_CONFIG_SOFTAP_BAND_2G
-    WIFI_CONFIG_APBAND_5G = WIFI_CONFIG_SOFTAP_BAND_5G
-    WIFI_CONFIG_APBAND_AUTO = WIFI_CONFIG_SOFTAP_BAND_2G_5G
-
-    WIFI_CONFIG_APBAND_2G_OLD = 0
-    WIFI_CONFIG_APBAND_5G_OLD = 1
-    WIFI_CONFIG_APBAND_AUTO_OLD = -1
-
-    WIFI_WPS_INFO_PBC = 0
-    WIFI_WPS_INFO_DISPLAY = 1
-    WIFI_WPS_INFO_KEYPAD = 2
-    WIFI_WPS_INFO_LABEL = 3
-    WIFI_WPS_INFO_INVALID = 4
-
-    class SoftApSecurityType():
-        OPEN = "NONE"
-        WPA2 = "WPA2_PSK"
-        WPA3_SAE_TRANSITION = "WPA3_SAE_TRANSITION"
-        WPA3_SAE = "WPA3_SAE"
-
-    class CountryCode():
-        AUSTRALIA = "AU"
-        CHINA = "CN"
-        GERMANY = "DE"
-        JAPAN = "JP"
-        UK = "GB"
-        US = "US"
-        UNKNOWN = "UNKNOWN"
-
-    # Start of Macros for EAP
-    # EAP types
-    class Eap(IntEnum):
-        NONE = -1
-        PEAP = 0
-        TLS = 1
-        TTLS = 2
-        PWD = 3
-        SIM = 4
-        AKA = 5
-        AKA_PRIME = 6
-        UNAUTH_TLS = 7
-
-    # EAP Phase2 types
-    class EapPhase2(IntEnum):
-        NONE = 0
-        PAP = 1
-        MSCHAP = 2
-        MSCHAPV2 = 3
-        GTC = 4
-
-    class Enterprise:
-        # Enterprise Config Macros
-        EMPTY_VALUE = "NULL"
-        EAP = "eap"
-        PHASE2 = "phase2"
-        IDENTITY = "identity"
-        ANON_IDENTITY = "anonymous_identity"
-        PASSWORD = "password"
-        SUBJECT_MATCH = "subject_match"
-        ALTSUBJECT_MATCH = "altsubject_match"
-        DOM_SUFFIX_MATCH = "domain_suffix_match"
-        CLIENT_CERT = "client_cert"
-        CA_CERT = "ca_cert"
-        ENGINE = "engine"
-        ENGINE_ID = "engine_id"
-        PRIVATE_KEY_ID = "key_id"
-        REALM = "realm"
-        PLMN = "plmn"
-        FQDN = "FQDN"
-        FRIENDLY_NAME = "providerFriendlyName"
-        ROAMING_IDS = "roamingConsortiumIds"
-        OCSP = "ocsp"
-
-    # End of Macros for EAP
-
-    # Macros for wifi p2p.
-    WIFI_P2P_SERVICE_TYPE_ALL = 0
-    WIFI_P2P_SERVICE_TYPE_BONJOUR = 1
-    WIFI_P2P_SERVICE_TYPE_UPNP = 2
-    WIFI_P2P_SERVICE_TYPE_VENDOR_SPECIFIC = 255
-
-    class ScanResult:
-        CHANNEL_WIDTH_20MHZ = 0
-        CHANNEL_WIDTH_40MHZ = 1
-        CHANNEL_WIDTH_80MHZ = 2
-        CHANNEL_WIDTH_160MHZ = 3
-        CHANNEL_WIDTH_80MHZ_PLUS_MHZ = 4
-
-    # Macros for wifi rtt.
-    class RttType(IntEnum):
-        TYPE_ONE_SIDED = 1
-        TYPE_TWO_SIDED = 2
-
-    class RttPeerType(IntEnum):
-        PEER_TYPE_AP = 1
-        PEER_TYPE_STA = 2  # Requires NAN.
-        PEER_P2P_GO = 3
-        PEER_P2P_CLIENT = 4
-        PEER_NAN = 5
-
-    class RttPreamble(IntEnum):
-        PREAMBLE_LEGACY = 0x01
-        PREAMBLE_HT = 0x02
-        PREAMBLE_VHT = 0x04
-
-    class RttBW(IntEnum):
-        BW_5_SUPPORT = 0x01
-        BW_10_SUPPORT = 0x02
-        BW_20_SUPPORT = 0x04
-        BW_40_SUPPORT = 0x08
-        BW_80_SUPPORT = 0x10
-        BW_160_SUPPORT = 0x20
-
-    class Rtt(IntEnum):
-        STATUS_SUCCESS = 0
-        STATUS_FAILURE = 1
-        STATUS_FAIL_NO_RSP = 2
-        STATUS_FAIL_REJECTED = 3
-        STATUS_FAIL_NOT_SCHEDULED_YET = 4
-        STATUS_FAIL_TM_TIMEOUT = 5
-        STATUS_FAIL_AP_ON_DIFF_CHANNEL = 6
-        STATUS_FAIL_NO_CAPABILITY = 7
-        STATUS_ABORTED = 8
-        STATUS_FAIL_INVALID_TS = 9
-        STATUS_FAIL_PROTOCOL = 10
-        STATUS_FAIL_SCHEDULE = 11
-        STATUS_FAIL_BUSY_TRY_LATER = 12
-        STATUS_INVALID_REQ = 13
-        STATUS_NO_WIFI = 14
-        STATUS_FAIL_FTM_PARAM_OVERRIDE = 15
-
-        REASON_UNSPECIFIED = -1
-        REASON_NOT_AVAILABLE = -2
-        REASON_INVALID_LISTENER = -3
-        REASON_INVALID_REQUEST = -4
-
-    class RttParam:
-        device_type = "deviceType"
-        request_type = "requestType"
-        BSSID = "bssid"
-        channel_width = "channelWidth"
-        frequency = "frequency"
-        center_freq0 = "centerFreq0"
-        center_freq1 = "centerFreq1"
-        number_burst = "numberBurst"
-        interval = "interval"
-        num_samples_per_burst = "numSamplesPerBurst"
-        num_retries_per_measurement_frame = "numRetriesPerMeasurementFrame"
-        num_retries_per_FTMR = "numRetriesPerFTMR"
-        lci_request = "LCIRequest"
-        lcr_request = "LCRRequest"
-        burst_timeout = "burstTimeout"
-        preamble = "preamble"
-        bandwidth = "bandwidth"
-        margin = "margin"
-
-    RTT_MARGIN_OF_ERROR = {
-        RttBW.BW_80_SUPPORT: 2,
-        RttBW.BW_40_SUPPORT: 5,
-        RttBW.BW_20_SUPPORT: 5
-    }
-
-    # Macros as specified in the WifiScanner code.
-    WIFI_BAND_UNSPECIFIED = 0  # not specified
-    WIFI_BAND_24_GHZ = 1  # 2.4 GHz band
-    WIFI_BAND_5_GHZ = 2  # 5 GHz band without DFS channels
-    WIFI_BAND_5_GHZ_DFS_ONLY = 4  # 5 GHz band with DFS channels
-    WIFI_BAND_5_GHZ_WITH_DFS = 6  # 5 GHz band with DFS channels
-    WIFI_BAND_BOTH = 3  # both bands without DFS channels
-    WIFI_BAND_BOTH_WITH_DFS = 7  # both bands with DFS channels
-
-    REPORT_EVENT_AFTER_BUFFER_FULL = 0
-    REPORT_EVENT_AFTER_EACH_SCAN = 1
-    REPORT_EVENT_FULL_SCAN_RESULT = 2
-
-    SCAN_TYPE_LOW_LATENCY = 0
-    SCAN_TYPE_LOW_POWER = 1
-    SCAN_TYPE_HIGH_ACCURACY = 2
-
-    # US Wifi frequencies
-    ALL_2G_FREQUENCIES = [
-        2412, 2417, 2422, 2427, 2432, 2437, 2442, 2447, 2452, 2457, 2462
-    ]
-    DFS_5G_FREQUENCIES = [
-        5260, 5280, 5300, 5320, 5500, 5520, 5540, 5560, 5580, 5600, 5620, 5640,
-        5660, 5680, 5700, 5720
-    ]
-    NONE_DFS_5G_FREQUENCIES = [
-        5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825
-    ]
-    ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES
-
-    band_to_frequencies = {
-        WIFI_BAND_24_GHZ: ALL_2G_FREQUENCIES,
-        WIFI_BAND_5_GHZ: NONE_DFS_5G_FREQUENCIES,
-        WIFI_BAND_5_GHZ_DFS_ONLY: DFS_5G_FREQUENCIES,
-        WIFI_BAND_5_GHZ_WITH_DFS: ALL_5G_FREQUENCIES,
-        WIFI_BAND_BOTH: ALL_2G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES,
-        WIFI_BAND_BOTH_WITH_DFS: ALL_5G_FREQUENCIES + ALL_2G_FREQUENCIES
-    }
-
-    # TODO: add all of the band mapping.
-    softap_band_frequencies = {
-        WIFI_CONFIG_SOFTAP_BAND_2G: ALL_2G_FREQUENCIES,
-        WIFI_CONFIG_SOFTAP_BAND_5G: ALL_5G_FREQUENCIES
-    }
-
-    # All Wifi frequencies to channels lookup.
-    freq_to_channel = {
-        2412: 1,
-        2417: 2,
-        2422: 3,
-        2427: 4,
-        2432: 5,
-        2437: 6,
-        2442: 7,
-        2447: 8,
-        2452: 9,
-        2457: 10,
-        2462: 11,
-        2467: 12,
-        2472: 13,
-        2484: 14,
-        4915: 183,
-        4920: 184,
-        4925: 185,
-        4935: 187,
-        4940: 188,
-        4945: 189,
-        4960: 192,
-        4980: 196,
-        5035: 7,
-        5040: 8,
-        5045: 9,
-        5055: 11,
-        5060: 12,
-        5080: 16,
-        5170: 34,
-        5180: 36,
-        5190: 38,
-        5200: 40,
-        5210: 42,
-        5220: 44,
-        5230: 46,
-        5240: 48,
-        5260: 52,
-        5280: 56,
-        5300: 60,
-        5320: 64,
-        5500: 100,
-        5520: 104,
-        5540: 108,
-        5560: 112,
-        5580: 116,
-        5600: 120,
-        5620: 124,
-        5640: 128,
-        5660: 132,
-        5680: 136,
-        5700: 140,
-        5745: 149,
-        5765: 153,
-        5785: 157,
-        5795: 159,
-        5805: 161,
-        5825: 165,
-    }
-
-    # All Wifi channels to frequencies lookup.
-    channel_2G_to_freq = {
-        1: 2412,
-        2: 2417,
-        3: 2422,
-        4: 2427,
-        5: 2432,
-        6: 2437,
-        7: 2442,
-        8: 2447,
-        9: 2452,
-        10: 2457,
-        11: 2462,
-        12: 2467,
-        13: 2472,
-        14: 2484
-    }
-
-    channel_5G_to_freq = {
-        183: 4915,
-        184: 4920,
-        185: 4925,
-        187: 4935,
-        188: 4940,
-        189: 4945,
-        192: 4960,
-        196: 4980,
-        7: 5035,
-        8: 5040,
-        9: 5045,
-        11: 5055,
-        12: 5060,
-        16: 5080,
-        34: 5170,
-        36: 5180,
-        38: 5190,
-        40: 5200,
-        42: 5210,
-        44: 5220,
-        46: 5230,
-        48: 5240,
-        50: 5250,
-        52: 5260,
-        56: 5280,
-        60: 5300,
-        64: 5320,
-        100: 5500,
-        104: 5520,
-        108: 5540,
-        112: 5560,
-        116: 5580,
-        120: 5600,
-        124: 5620,
-        128: 5640,
-        132: 5660,
-        136: 5680,
-        140: 5700,
-        149: 5745,
-        151: 5755,
-        153: 5765,
-        155: 5775,
-        157: 5785,
-        159: 5795,
-        161: 5805,
-        165: 5825
-    }
-
-    channel_6G_to_freq = {4 * x + 1: 5955 + 20 * x for x in range(59)}
-
-    channel_to_freq = {
-        '2G': channel_2G_to_freq,
-        '5G': channel_5G_to_freq,
-        '6G': channel_6G_to_freq
-    }
-
-
-class WifiChannelBase:
-    ALL_2G_FREQUENCIES = []
-    DFS_5G_FREQUENCIES = []
-    NONE_DFS_5G_FREQUENCIES = []
-    ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES
-    MIX_CHANNEL_SCAN = []
-
-    def band_to_freq(self, band):
-        _band_to_frequencies = {
-            WifiEnums.WIFI_BAND_24_GHZ:
-            self.ALL_2G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ:
-            self.NONE_DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ_DFS_ONLY:
-            self.DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ_WITH_DFS:
-            self.ALL_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_BOTH:
-            self.ALL_2G_FREQUENCIES + self.NONE_DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_BOTH_WITH_DFS:
-            self.ALL_5G_FREQUENCIES + self.ALL_2G_FREQUENCIES
-        }
-        return _band_to_frequencies[band]
-
-
-class WifiChannelUS(WifiChannelBase):
-    # US Wifi frequencies
-    ALL_2G_FREQUENCIES = [
-        2412, 2417, 2422, 2427, 2432, 2437, 2442, 2447, 2452, 2457, 2462
-    ]
-    NONE_DFS_5G_FREQUENCIES = [
-        5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825
-    ]
-    MIX_CHANNEL_SCAN = [
-        2412, 2437, 2462, 5180, 5200, 5280, 5260, 5300, 5500, 5320, 5520, 5560,
-        5700, 5745, 5805
-    ]
-
-    def __init__(self, model=None, support_addition_channel=[]):
-        if model in support_addition_channel:
-            self.ALL_2G_FREQUENCIES = [
-                2412, 2417, 2422, 2427, 2432, 2437, 2442, 2447, 2452, 2457,
-                2462, 2467, 2472
-                ]
-        self.DFS_5G_FREQUENCIES = [
-            5260, 5280, 5300, 5320, 5500, 5520, 5540, 5560, 5580, 5600, 5620,
-            5640, 5660, 5680, 5700, 5720
-            ]
-        self.ALL_5G_FREQUENCIES = self.DFS_5G_FREQUENCIES + self.NONE_DFS_5G_FREQUENCIES
-
-
-class WifiReferenceNetworks:
-    """ Class to parse and return networks of different band and
-        auth type from reference_networks
-    """
-    def __init__(self, obj):
-        self.reference_networks = obj
-        self.WIFI_2G = "2g"
-        self.WIFI_5G = "5g"
-
-        self.secure_networks_2g = []
-        self.secure_networks_5g = []
-        self.open_networks_2g = []
-        self.open_networks_5g = []
-        self._parse_networks()
-
-    def _parse_networks(self):
-        for network in self.reference_networks:
-            for key in network:
-                if key == self.WIFI_2G:
-                    if "password" in network[key]:
-                        self.secure_networks_2g.append(network[key])
-                    else:
-                        self.open_networks_2g.append(network[key])
-                else:
-                    if "password" in network[key]:
-                        self.secure_networks_5g.append(network[key])
-                    else:
-                        self.open_networks_5g.append(network[key])
-
-    def return_2g_secure_networks(self):
-        return self.secure_networks_2g
-
-    def return_5g_secure_networks(self):
-        return self.secure_networks_5g
-
-    def return_2g_open_networks(self):
-        return self.open_networks_2g
-
-    def return_5g_open_networks(self):
-        return self.open_networks_5g
-
-    def return_secure_networks(self):
-        return self.secure_networks_2g + self.secure_networks_5g
-
-    def return_open_networks(self):
-        return self.open_networks_2g + self.open_networks_5g
-
-
-def _assert_on_fail_handler(func, assert_on_fail, *args, **kwargs):
-    """Wrapper function that handles the bahevior of assert_on_fail.
-
-    When assert_on_fail is True, let all test signals through, which can
-    terminate test cases directly. When assert_on_fail is False, the wrapper
-    raises no test signals and reports operation status by returning True or
-    False.
-
-    Args:
-        func: The function to wrap. This function reports operation status by
-              raising test signals.
-        assert_on_fail: A boolean that specifies if the output of the wrapper
-                        is test signal based or return value based.
-        args: Positional args for func.
-        kwargs: Name args for func.
-
-    Returns:
-        If assert_on_fail is True, returns True/False to signal operation
-        status, otherwise return nothing.
-    """
-    try:
-        func(*args, **kwargs)
-        if not assert_on_fail:
-            return True
-    except signals.TestSignal:
-        if assert_on_fail:
-            raise
-        return False
-
-
-def assert_network_in_list(target, network_list):
-    """Makes sure a specified target Wi-Fi network exists in a list of Wi-Fi
-    networks.
-
-    Args:
-        target: A dict representing a Wi-Fi network.
-                E.g. {WifiEnums.SSID_KEY: "SomeNetwork"}
-        network_list: A list of dicts, each representing a Wi-Fi network.
-    """
-    match_results = match_networks(target, network_list)
-    asserts.assert_true(
-        match_results, "Target network %s, does not exist in network list %s" %
-        (target, network_list))
-
-
-def match_networks(target_params, networks):
-    """Finds the WiFi networks that match a given set of parameters in a list
-    of WiFi networks.
-
-    To be considered a match, the network should contain every key-value pair
-    of target_params
-
-    Args:
-        target_params: A dict with 1 or more key-value pairs representing a Wi-Fi network.
-                       E.g { 'SSID': 'wh_ap1_5g', 'BSSID': '30:b5:c2:33:e4:47' }
-        networks: A list of dict objects representing WiFi networks.
-
-    Returns:
-        The networks that match the target parameters.
-    """
-    results = []
-    asserts.assert_true(target_params,
-                        "Expected networks object 'target_params' is empty")
-    for n in networks:
-        add_network = 1
-        for k, v in target_params.items():
-            if k not in n:
-                add_network = 0
-                break
-            if n[k] != v:
-                add_network = 0
-                break
-        if add_network:
-            results.append(n)
-    return results
-
-
-def wait_for_wifi_state(ad, state, assert_on_fail=True):
-    """Waits for the device to transition to the specified wifi state
-
-    Args:
-        ad: An AndroidDevice object.
-        state: Wifi state to wait for.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        If assert_on_fail is False, function returns True if the device transitions
-        to the specified state, False otherwise. If assert_on_fail is True, no return value.
-    """
-    return _assert_on_fail_handler(_wait_for_wifi_state,
-                                   assert_on_fail,
-                                   ad,
-                                   state=state)
-
-
-def _wait_for_wifi_state(ad, state):
-    """Toggles the state of wifi.
-
-    TestFailure signals are raised when something goes wrong.
-
-    Args:
-        ad: An AndroidDevice object.
-        state: Wifi state to wait for.
-    """
-    if state == ad.droid.wifiCheckState():
-        # Check if the state is already achieved, so we don't wait for the
-        # state change event by mistake.
-        return
-    ad.droid.wifiStartTrackingStateChange()
-    fail_msg = "Device did not transition to Wi-Fi state to %s on %s." % (
-        state, ad.serial)
-    try:
-        ad.ed.wait_for_event(wifi_constants.WIFI_STATE_CHANGED,
-                             lambda x: x["data"]["enabled"] == state,
-                             SHORT_TIMEOUT)
-    except Empty:
-        asserts.assert_equal(state, ad.droid.wifiCheckState(), fail_msg)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wifi_toggle_state(ad, new_state=None, assert_on_fail=True):
-    """Toggles the state of wifi.
-
-    Args:
-        ad: An AndroidDevice object.
-        new_state: Wifi state to set to. If None, opposite of the current state.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        If assert_on_fail is False, function returns True if the toggle was
-        successful, False otherwise. If assert_on_fail is True, no return value.
-    """
-    return _assert_on_fail_handler(_wifi_toggle_state,
-                                   assert_on_fail,
-                                   ad,
-                                   new_state=new_state)
-
-
-def _wifi_toggle_state(ad, new_state=None):
-    """Toggles the state of wifi.
-
-    TestFailure signals are raised when something goes wrong.
-
-    Args:
-        ad: An AndroidDevice object.
-        new_state: The state to set Wi-Fi to. If None, opposite of the current
-                   state will be set.
-    """
-    if new_state is None:
-        new_state = not ad.droid.wifiCheckState()
-    elif new_state == ad.droid.wifiCheckState():
-        # Check if the new_state is already achieved, so we don't wait for the
-        # state change event by mistake.
-        return
-    ad.droid.wifiStartTrackingStateChange()
-    ad.log.info("Setting Wi-Fi state to %s.", new_state)
-    ad.ed.clear_all_events()
-    # Setting wifi state.
-    ad.droid.wifiToggleState(new_state)
-    time.sleep(2)
-    fail_msg = "Failed to set Wi-Fi state to %s on %s." % (new_state,
-                                                           ad.serial)
-    try:
-        ad.ed.wait_for_event(wifi_constants.WIFI_STATE_CHANGED,
-                             lambda x: x["data"]["enabled"] == new_state,
-                             SHORT_TIMEOUT)
-    except Empty:
-        asserts.assert_equal(new_state, ad.droid.wifiCheckState(), fail_msg)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def reset_wifi(ad):
-    """Clears all saved Wi-Fi networks on a device.
-
-    This will turn Wi-Fi on.
-
-    Args:
-        ad: An AndroidDevice object.
-
-    """
-    networks = ad.droid.wifiGetConfiguredNetworks()
-    if not networks:
-        return
-    removed = []
-    for n in networks:
-        if n['networkId'] not in removed:
-            ad.droid.wifiForgetNetwork(n['networkId'])
-            removed.append(n['networkId'])
-        else:
-            continue
-        try:
-            event = ad.ed.pop_event(wifi_constants.WIFI_FORGET_NW_SUCCESS,
-                                    SHORT_TIMEOUT)
-        except Empty:
-            logging.warning("Could not confirm the removal of network %s.", n)
-    # Check again to see if there's any network left.
-    asserts.assert_true(
-        not ad.droid.wifiGetConfiguredNetworks(),
-        "Failed to remove these configured Wi-Fi networks: %s" % networks)
-
-
-
-def toggle_airplane_mode_on_and_off(ad):
-    """Turn ON and OFF Airplane mode.
-
-    ad: An AndroidDevice object.
-    Returns: Assert if turning on/off Airplane mode fails.
-
-    """
-    ad.log.debug("Toggling Airplane mode ON.")
-    asserts.assert_true(utils.force_airplane_mode(ad, True),
-                        "Can not turn on airplane mode on: %s" % ad.serial)
-    time.sleep(DEFAULT_TIMEOUT)
-    ad.log.debug("Toggling Airplane mode OFF.")
-    asserts.assert_true(utils.force_airplane_mode(ad, False),
-                        "Can not turn on airplane mode on: %s" % ad.serial)
-    time.sleep(DEFAULT_TIMEOUT)
-
-
-def toggle_wifi_off_and_on(ad):
-    """Turn OFF and ON WiFi.
-
-    ad: An AndroidDevice object.
-    Returns: Assert if turning off/on WiFi fails.
-
-    """
-    ad.log.debug("Toggling wifi OFF.")
-    wifi_toggle_state(ad, False)
-    time.sleep(DEFAULT_TIMEOUT)
-    ad.log.debug("Toggling wifi ON.")
-    wifi_toggle_state(ad, True)
-    time.sleep(DEFAULT_TIMEOUT)
-
-
-def wifi_forget_network(ad, net_ssid):
-    """Remove configured Wifi network on an android device.
-
-    Args:
-        ad: android_device object for forget network.
-        net_ssid: ssid of network to be forget
-
-    """
-    networks = ad.droid.wifiGetConfiguredNetworks()
-    if not networks:
-        return
-    removed = []
-    for n in networks:
-        if net_ssid in n[WifiEnums.SSID_KEY] and n['networkId'] not in removed:
-            ad.droid.wifiForgetNetwork(n['networkId'])
-            removed.append(n['networkId'])
-            try:
-                event = ad.ed.pop_event(wifi_constants.WIFI_FORGET_NW_SUCCESS,
-                                        SHORT_TIMEOUT)
-            except Empty:
-                asserts.fail("Failed to remove network %s." % n)
-            break
-
-
-def wifi_test_device_init(ad, country_code=WifiEnums.CountryCode.US):
-    """Initializes an android device for wifi testing.
-
-    0. Make sure SL4A connection is established on the android device.
-    1. Disable location service's WiFi scan.
-    2. Turn WiFi on.
-    3. Clear all saved networks.
-    4. Set country code to US.
-    5. Enable WiFi verbose logging.
-    6. Sync device time with computer time.
-    7. Turn off cellular data.
-    8. Turn off ambient display.
-    """
-    utils.require_sl4a((ad, ))
-    ad.droid.wifiScannerToggleAlwaysAvailable(False)
-    msg = "Failed to turn off location service's scan."
-    asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
-    wifi_toggle_state(ad, True)
-    reset_wifi(ad)
-    ad.droid.wifiEnableVerboseLogging(1)
-    msg = "Failed to enable WiFi verbose logging."
-    asserts.assert_equal(ad.droid.wifiGetVerboseLoggingLevel(), 1, msg)
-    # We don't verify the following settings since they are not critical.
-    # Set wpa_supplicant log level to EXCESSIVE.
-    output = ad.adb.shell(
-        "wpa_cli -i wlan0 -p -g@android:wpa_wlan0 IFNAME="
-        "wlan0 log_level EXCESSIVE",
-        ignore_status=True)
-    ad.log.info("wpa_supplicant log change status: %s", output)
-    utils.sync_device_time(ad)
-    ad.droid.telephonyToggleDataConnection(False)
-    set_wifi_country_code(ad, country_code)
-    utils.set_ambient_display(ad, False)
-
-
-def set_wifi_country_code(ad, country_code):
-    """Sets the wifi country code on the device.
-
-    Args:
-        ad: An AndroidDevice object.
-        country_code: 2 letter ISO country code
-
-    Raises:
-        An RpcException if unable to set the country code.
-    """
-    try:
-        ad.adb.shell("cmd wifi force-country-code enabled %s" % country_code)
-    except Exception as e:
-        ad.droid.wifiSetCountryCode(WifiEnums.CountryCode.US)
-
-
-def start_wifi_connection_scan(ad):
-    """Starts a wifi connection scan and wait for results to become available.
-
-    Args:
-        ad: An AndroidDevice object.
-    """
-    ad.ed.clear_all_events()
-    ad.droid.wifiStartScan()
-    try:
-        ad.ed.pop_event("WifiManagerScanResultsAvailable", 60)
-    except Empty:
-        asserts.fail("Wi-Fi results did not become available within 60s.")
-
-
-def start_wifi_connection_scan_and_return_status(ad):
-    """
-    Starts a wifi connection scan and wait for results to become available
-    or a scan failure to be reported.
-
-    Args:
-        ad: An AndroidDevice object.
-    Returns:
-        True: if scan succeeded & results are available
-        False: if scan failed
-    """
-    ad.ed.clear_all_events()
-    ad.droid.wifiStartScan()
-    try:
-        events = ad.ed.pop_events("WifiManagerScan(ResultsAvailable|Failure)",
-                                  60)
-    except Empty:
-        asserts.fail(
-            "Wi-Fi scan results/failure did not become available within 60s.")
-    # If there are multiple matches, we check for atleast one success.
-    for event in events:
-        if event["name"] == "WifiManagerScanResultsAvailable":
-            return True
-        elif event["name"] == "WifiManagerScanFailure":
-            ad.log.debug("Scan failure received")
-    return False
-
-
-def start_wifi_connection_scan_and_check_for_network(ad,
-                                                     network_ssid,
-                                                     max_tries=3):
-    """
-    Start connectivity scans & checks if the |network_ssid| is seen in
-    scan results. The method performs a max of |max_tries| connectivity scans
-    to find the network.
-
-    Args:
-        ad: An AndroidDevice object.
-        network_ssid: SSID of the network we are looking for.
-        max_tries: Number of scans to try.
-    Returns:
-        True: if network_ssid is found in scan results.
-        False: if network_ssid is not found in scan results.
-    """
-    start_time = time.time()
-    for num_tries in range(max_tries):
-        if start_wifi_connection_scan_and_return_status(ad):
-            scan_results = ad.droid.wifiGetScanResults()
-            match_results = match_networks({WifiEnums.SSID_KEY: network_ssid},
-                                           scan_results)
-            if len(match_results) > 0:
-                ad.log.debug("Found network in %s seconds." %
-                             (time.time() - start_time))
-                return True
-    ad.log.debug("Did not find network in %s seconds." %
-                 (time.time() - start_time))
-    return False
-
-
-def start_wifi_connection_scan_and_ensure_network_found(
-        ad, network_ssid, max_tries=3):
-    """
-    Start connectivity scans & ensure the |network_ssid| is seen in
-    scan results. The method performs a max of |max_tries| connectivity scans
-    to find the network.
-    This method asserts on failure!
-
-    Args:
-        ad: An AndroidDevice object.
-        network_ssid: SSID of the network we are looking for.
-        max_tries: Number of scans to try.
-    """
-    ad.log.info("Starting scans to ensure %s is present", network_ssid)
-    assert_msg = "Failed to find " + network_ssid + " in scan results" \
-        " after " + str(max_tries) + " tries"
-    asserts.assert_true(
-        start_wifi_connection_scan_and_check_for_network(
-            ad, network_ssid, max_tries), assert_msg)
-
-
-def start_wifi_connection_scan_and_ensure_network_not_found(
-        ad, network_ssid, max_tries=3):
-    """
-    Start connectivity scans & ensure the |network_ssid| is not seen in
-    scan results. The method performs a max of |max_tries| connectivity scans
-    to find the network.
-    This method asserts on failure!
-
-    Args:
-        ad: An AndroidDevice object.
-        network_ssid: SSID of the network we are looking for.
-        max_tries: Number of scans to try.
-    """
-    ad.log.info("Starting scans to ensure %s is not present", network_ssid)
-    assert_msg = "Found " + network_ssid + " in scan results" \
-        " after " + str(max_tries) + " tries"
-    asserts.assert_false(
-        start_wifi_connection_scan_and_check_for_network(
-            ad, network_ssid, max_tries), assert_msg)
-
-
-def start_wifi_background_scan(ad, scan_setting):
-    """Starts wifi background scan.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        scan_setting: A dict representing the settings of the scan.
-
-    Returns:
-        If scan was started successfully, event data of success event is returned.
-    """
-    idx = ad.droid.wifiScannerStartBackgroundScan(scan_setting)
-    event = ad.ed.pop_event("WifiScannerScan{}onSuccess".format(idx),
-                            SHORT_TIMEOUT)
-    return event['data']
-
-
-def save_wifi_soft_ap_config(ad,
-                             wifi_config,
-                             band=None,
-                             hidden=None,
-                             security=None,
-                             password=None,
-                             channel=None,
-                             max_clients=None,
-                             shutdown_timeout_enable=None,
-                             shutdown_timeout_millis=None,
-                             client_control_enable=None,
-                             allowedList=None,
-                             blockedList=None,
-                             bands=None,
-                             channel_frequencys=None,
-                             mac_randomization_setting=None,
-                             bridged_opportunistic_shutdown_enabled=None,
-                             ieee80211ax_enabled=None):
-    """ Save a soft ap configuration and verified
-    Args:
-        ad: android_device to set soft ap configuration.
-        wifi_config: a soft ap configuration object, at least include SSID.
-        band: specifies the band for the soft ap.
-        hidden: specifies the soft ap need to broadcast its SSID or not.
-        security: specifies the security type for the soft ap.
-        password: specifies the password for the soft ap.
-        channel: specifies the channel for the soft ap.
-        max_clients: specifies the maximum connected client number.
-        shutdown_timeout_enable: specifies the auto shut down enable or not.
-        shutdown_timeout_millis: specifies the shut down timeout value.
-        client_control_enable: specifies the client control enable or not.
-        allowedList: specifies allowed clients list.
-        blockedList: specifies blocked clients list.
-        bands: specifies the band list for the soft ap.
-        channel_frequencys: specifies the channel frequency list for soft ap.
-        mac_randomization_setting: specifies the mac randomization setting.
-        bridged_opportunistic_shutdown_enabled: specifies the opportunistic
-                shutdown enable or not.
-        ieee80211ax_enabled: specifies the ieee80211ax enable or not.
-    """
-    if security and password:
-        wifi_config[WifiEnums.SECURITY] = security
-        wifi_config[WifiEnums.PWD_KEY] = password
-    if hidden is not None:
-        wifi_config[WifiEnums.HIDDEN_KEY] = hidden
-    if max_clients is not None:
-        wifi_config[WifiEnums.AP_MAXCLIENTS_KEY] = max_clients
-    if shutdown_timeout_enable is not None:
-        wifi_config[
-            WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY] = shutdown_timeout_enable
-    if shutdown_timeout_millis is not None:
-        wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY] = shutdown_timeout_millis
-    if client_control_enable is not None:
-        wifi_config[WifiEnums.AP_CLIENTCONTROL_KEY] = client_control_enable
-    if allowedList is not None:
-        wifi_config[WifiEnums.AP_ALLOWEDLIST_KEY] = allowedList
-    if blockedList is not None:
-        wifi_config[WifiEnums.AP_BLOCKEDLIST_KEY] = blockedList
-    if mac_randomization_setting is not None:
-        wifi_config[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY
-                ] = mac_randomization_setting
-    if bridged_opportunistic_shutdown_enabled is not None:
-        wifi_config[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY
-                ] = bridged_opportunistic_shutdown_enabled
-    if ieee80211ax_enabled is not None:
-       wifi_config[WifiEnums.AP_IEEE80211AX_ENABLED_KEY]= ieee80211ax_enabled
-    if channel_frequencys is not None:
-        wifi_config[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY] = channel_frequencys
-    elif bands is not None:
-        wifi_config[WifiEnums.AP_BANDS_KEY] = bands
-    elif band is not None:
-        if channel is not None:
-            wifi_config[WifiEnums.AP_BAND_KEY] = band
-            wifi_config[WifiEnums.AP_CHANNEL_KEY] = channel
-        else:
-             wifi_config[WifiEnums.AP_BAND_KEY] = band
-
-    if WifiEnums.AP_CHANNEL_KEY in wifi_config and wifi_config[
-            WifiEnums.AP_CHANNEL_KEY] == 0:
-        del wifi_config[WifiEnums.AP_CHANNEL_KEY]
-
-    if WifiEnums.SECURITY in wifi_config and wifi_config[
-            WifiEnums.SECURITY] == WifiEnums.SoftApSecurityType.OPEN:
-        del wifi_config[WifiEnums.SECURITY]
-        del wifi_config[WifiEnums.PWD_KEY]
-
-    asserts.assert_true(ad.droid.wifiSetWifiApConfiguration(wifi_config),
-                        "Failed to set WifiAp Configuration")
-
-    wifi_ap = ad.droid.wifiGetApConfiguration()
-    asserts.assert_true(
-        wifi_ap[WifiEnums.SSID_KEY] == wifi_config[WifiEnums.SSID_KEY],
-        "Hotspot SSID doesn't match")
-    if WifiEnums.SECURITY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.SECURITY] == wifi_config[WifiEnums.SECURITY],
-            "Hotspot Security doesn't match")
-    if WifiEnums.PWD_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.PWD_KEY] == wifi_config[WifiEnums.PWD_KEY],
-            "Hotspot Password doesn't match")
-
-    if WifiEnums.HIDDEN_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.HIDDEN_KEY] == wifi_config[WifiEnums.HIDDEN_KEY],
-            "Hotspot hidden setting doesn't match")
-
-    if WifiEnums.AP_CHANNEL_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CHANNEL_KEY] == wifi_config[
-                WifiEnums.AP_CHANNEL_KEY], "Hotspot Channel doesn't match")
-    if WifiEnums.AP_MAXCLIENTS_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_MAXCLIENTS_KEY] == wifi_config[
-                WifiEnums.AP_MAXCLIENTS_KEY],
-            "Hotspot Max Clients doesn't match")
-    if WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY] == wifi_config[
-                WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY],
-            "Hotspot ShutDown feature flag doesn't match")
-    if WifiEnums.AP_SHUTDOWNTIMEOUT_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY] == wifi_config[
-                WifiEnums.AP_SHUTDOWNTIMEOUT_KEY],
-            "Hotspot ShutDown timeout setting doesn't match")
-    if WifiEnums.AP_CLIENTCONTROL_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CLIENTCONTROL_KEY] == wifi_config[
-                WifiEnums.AP_CLIENTCONTROL_KEY],
-            "Hotspot Client control flag doesn't match")
-    if WifiEnums.AP_ALLOWEDLIST_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_ALLOWEDLIST_KEY] == wifi_config[
-                WifiEnums.AP_ALLOWEDLIST_KEY],
-            "Hotspot Allowed List doesn't match")
-    if WifiEnums.AP_BLOCKEDLIST_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_BLOCKEDLIST_KEY] == wifi_config[
-                WifiEnums.AP_BLOCKEDLIST_KEY],
-            "Hotspot Blocked List doesn't match")
-
-    if WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY] == wifi_config[
-                  WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY],
-            "Hotspot Mac randomization setting doesn't match")
-
-    if WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY] == wifi_config[
-                  WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY],
-            "Hotspot bridged shutdown enable setting doesn't match")
-
-    if WifiEnums.AP_IEEE80211AX_ENABLED_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_IEEE80211AX_ENABLED_KEY] == wifi_config[
-                  WifiEnums.AP_IEEE80211AX_ENABLED_KEY],
-            "Hotspot 80211 AX enable setting doesn't match")
-
-    if WifiEnums.AP_CHANNEL_FREQUENCYS_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY] == wifi_config[
-                  WifiEnums.AP_CHANNEL_FREQUENCYS_KEY],
-            "Hotspot channels setting doesn't match")
-
-def toggle_wifi_and_wait_for_reconnection(ad,
-                                          network,
-                                          num_of_tries=1,
-                                          assert_on_fail=True):
-    """Toggle wifi state and then wait for Android device to reconnect to
-    the provided wifi network.
-
-    This expects the device to be already connected to the provided network.
-
-    Logic steps are
-     1. Ensure that we're connected to the network.
-     2. Turn wifi off.
-     3. Wait for 10 seconds.
-     4. Turn wifi on.
-     5. Wait for the "connected" event, then confirm the connected ssid is the
-        one requested.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to await connection. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        If assert_on_fail is False, function returns True if the toggle was
-        successful, False otherwise. If assert_on_fail is True, no return value.
-    """
-    return _assert_on_fail_handler(_toggle_wifi_and_wait_for_reconnection,
-                                   assert_on_fail,
-                                   ad,
-                                   network,
-                                   num_of_tries=num_of_tries)
-
-
-def _toggle_wifi_and_wait_for_reconnection(ad, network, num_of_tries=3):
-    """Toggle wifi state and then wait for Android device to reconnect to
-    the provided wifi network.
-
-    This expects the device to be already connected to the provided network.
-
-    Logic steps are
-     1. Ensure that we're connected to the network.
-     2. Turn wifi off.
-     3. Wait for 10 seconds.
-     4. Turn wifi on.
-     5. Wait for the "connected" event, then confirm the connected ssid is the
-        one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to await connection. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-    """
-    expected_ssid = network[WifiEnums.SSID_KEY]
-    # First ensure that we're already connected to the provided network.
-    verify_con = {WifiEnums.SSID_KEY: expected_ssid}
-    verify_wifi_connection_info(ad, verify_con)
-    # Now toggle wifi state and wait for the connection event.
-    wifi_toggle_state(ad, False)
-    time.sleep(10)
-    wifi_toggle_state(ad, True)
-    ad.droid.wifiStartTrackingStateChange()
-    try:
-        connect_result = None
-        for i in range(num_of_tries):
-            try:
-                connect_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED,
-                                                 30)
-                break
-            except Empty:
-                pass
-        asserts.assert_true(
-            connect_result, "Failed to connect to Wi-Fi network %s on %s" %
-            (network, ad.serial))
-        logging.debug("Connection result on %s: %s.", ad.serial,
-                      connect_result)
-        actual_ssid = connect_result['data'][WifiEnums.SSID_KEY]
-        asserts.assert_equal(
-            actual_ssid, expected_ssid, "Connected to the wrong network on %s."
-            "Expected %s, but got %s." %
-            (ad.serial, expected_ssid, actual_ssid))
-        logging.info("Connected to Wi-Fi network %s on %s", actual_ssid,
-                     ad.serial)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wait_for_connect(ad,
-                     expected_ssid=None,
-                     expected_id=None,
-                     tries=2,
-                     assert_on_fail=True):
-    """Wait for a connect event.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: An Android device object.
-        expected_ssid: SSID of the network to connect to.
-        expected_id: Network Id of the network to connect to.
-        tries: An integer that is the number of times to try before failing.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        Returns a value only if assert_on_fail is false.
-        Returns True if the connection was successful, False otherwise.
-    """
-    return _assert_on_fail_handler(_wait_for_connect, assert_on_fail, ad,
-                                   expected_ssid, expected_id, tries)
-
-
-def _wait_for_connect(ad, expected_ssid=None, expected_id=None, tries=2):
-    """Wait for a connect event.
-
-    Args:
-        ad: An Android device object.
-        expected_ssid: SSID of the network to connect to.
-        expected_id: Network Id of the network to connect to.
-        tries: An integer that is the number of times to try before failing.
-    """
-    ad.droid.wifiStartTrackingStateChange()
-    try:
-        connect_result = _wait_for_connect_event(ad,
-                                                 ssid=expected_ssid,
-                                                 id=expected_id,
-                                                 tries=tries)
-        asserts.assert_true(
-            connect_result,
-            "Failed to connect to Wi-Fi network %s" % expected_ssid)
-        ad.log.debug("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result['data'][WifiEnums.SSID_KEY]
-        if expected_ssid:
-            asserts.assert_equal(actual_ssid, expected_ssid,
-                                 "Connected to the wrong network")
-        actual_id = connect_result['data'][WifiEnums.NETID_KEY]
-        if expected_id:
-            asserts.assert_equal(actual_id, expected_id,
-                                 "Connected to the wrong network")
-        ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
-    except Empty:
-        asserts.fail("Failed to start connection process to %s" %
-                     expected_ssid)
-    except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s", expected_ssid,
-                     error)
-        raise signals.TestFailure("Failed to connect to %s network" %
-                                  expected_ssid)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def _wait_for_connect_event(ad, ssid=None, id=None, tries=1):
-    """Wait for a connect event on queue and pop when available.
-
-    Args:
-        ad: An Android device object.
-        ssid: SSID of the network to connect to.
-        id: Network Id of the network to connect to.
-        tries: An integer that is the number of times to try before failing.
-
-    Returns:
-        A dict with details of the connection data, which looks like this:
-        {
-         'time': 1485460337798,
-         'name': 'WifiNetworkConnected',
-         'data': {
-                  'rssi': -27,
-                  'is_24ghz': True,
-                  'mac_address': '02:00:00:00:00:00',
-                  'network_id': 1,
-                  'BSSID': '30:b5:c2:33:d3:fc',
-                  'ip_address': 117483712,
-                  'link_speed': 54,
-                  'supplicant_state': 'completed',
-                  'hidden_ssid': False,
-                  'SSID': 'wh_ap1_2g',
-                  'is_5ghz': False}
-        }
-
-    """
-    conn_result = None
-
-    # If ssid and network id is None, just wait for any connect event.
-    if id is None and ssid is None:
-        for i in range(tries):
-            try:
-                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED,
-                                              30)
-                break
-            except Empty:
-                pass
-    else:
-        # If ssid or network id is specified, wait for specific connect event.
-        for i in range(tries):
-            try:
-                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED,
-                                              30)
-                if id and conn_result['data'][WifiEnums.NETID_KEY] == id:
-                    break
-                elif ssid and conn_result['data'][WifiEnums.SSID_KEY] == ssid:
-                    break
-            except Empty:
-                pass
-
-    return conn_result
-
-
-def wait_for_disconnect(ad, timeout=10):
-    """Wait for a disconnect event within the specified timeout.
-
-    Args:
-        ad: Android device object.
-        timeout: Timeout in seconds.
-
-    """
-    try:
-        ad.droid.wifiStartTrackingStateChange()
-        event = ad.ed.pop_event("WifiNetworkDisconnected", timeout)
-    except Empty:
-        raise signals.TestFailure("Device did not disconnect from the network")
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def ensure_no_disconnect(ad, duration=10):
-    """Ensure that there is no disconnect for the specified duration.
-
-    Args:
-        ad: Android device object.
-        duration: Duration in seconds.
-
-    """
-    try:
-        ad.droid.wifiStartTrackingStateChange()
-        event = ad.ed.pop_event("WifiNetworkDisconnected", duration)
-        raise signals.TestFailure("Device disconnected from the network")
-    except Empty:
-        pass
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def connect_to_wifi_network(ad, network, assert_on_fail=True,
-                            check_connectivity=True, hidden=False,
-                            num_of_scan_tries=DEFAULT_SCAN_TRIES,
-                            num_of_connect_tries=DEFAULT_CONNECT_TRIES):
-    """Connection logic for open and psk wifi networks.
-
-    Args:
-        ad: AndroidDevice to use for connection
-        network: network info of the network to connect to
-        assert_on_fail: If true, errors from wifi_connect will raise
-                        test failure signals.
-        hidden: Is the Wifi network hidden.
-        num_of_scan_tries: The number of times to try scan
-                           interface before declaring failure.
-        num_of_connect_tries: The number of times to try
-                              connect wifi before declaring failure.
-    """
-    if hidden:
-        start_wifi_connection_scan_and_ensure_network_not_found(
-            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries)
-    else:
-        start_wifi_connection_scan_and_ensure_network_found(
-            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries)
-    wifi_connect(ad,
-                 network,
-                 num_of_tries=num_of_connect_tries,
-                 assert_on_fail=assert_on_fail,
-                 check_connectivity=check_connectivity)
-
-
-def connect_to_wifi_network_with_id(ad, network_id, network_ssid):
-    """Connect to the given network using network id and verify SSID.
-
-    Args:
-        network_id: int Network Id of the network.
-        network_ssid: string SSID of the network.
-
-    Returns: True if connect using network id was successful;
-             False otherwise.
-
-    """
-    start_wifi_connection_scan_and_ensure_network_found(ad, network_ssid)
-    wifi_connect_by_id(ad, network_id)
-    connect_data = ad.droid.wifiGetConnectionInfo()
-    connect_ssid = connect_data[WifiEnums.SSID_KEY]
-    ad.log.debug("Expected SSID = %s Connected SSID = %s" %
-                 (network_ssid, connect_ssid))
-    if connect_ssid != network_ssid:
-        return False
-    return True
-
-
-def wifi_connect(ad,
-                 network,
-                 num_of_tries=1,
-                 assert_on_fail=True,
-                 check_connectivity=True):
-    """Connect an Android device to a wifi network.
-
-    Initiate connection to a wifi network, wait for the "connected" event, then
-    confirm the connected ssid is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        Returns a value only if assert_on_fail is false.
-        Returns True if the connection was successful, False otherwise.
-    """
-    return _assert_on_fail_handler(_wifi_connect,
-                                   assert_on_fail,
-                                   ad,
-                                   network,
-                                   num_of_tries=num_of_tries,
-                                   check_connectivity=check_connectivity)
-
-
-def _wifi_connect(ad, network, num_of_tries=1, check_connectivity=True):
-    """Connect an Android device to a wifi network.
-
-    Initiate connection to a wifi network, wait for the "connected" event, then
-    confirm the connected ssid is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-    """
-    asserts.assert_true(
-        WifiEnums.SSID_KEY in network,
-        "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY)
-    ad.droid.wifiStartTrackingStateChange()
-    expected_ssid = network[WifiEnums.SSID_KEY]
-    ad.droid.wifiConnectByConfig(network)
-    ad.log.info("Starting connection process to %s", expected_ssid)
-    try:
-        event = ad.ed.pop_event(wifi_constants.CONNECT_BY_CONFIG_SUCCESS, 30)
-        connect_result = _wait_for_connect_event(ad,
-                                                 ssid=expected_ssid,
-                                                 tries=num_of_tries)
-        asserts.assert_true(
-            connect_result, "Failed to connect to Wi-Fi network %s on %s" %
-            (network, ad.serial))
-        ad.log.debug("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result['data'][WifiEnums.SSID_KEY]
-        asserts.assert_equal(
-            actual_ssid, expected_ssid,
-            "Connected to the wrong network on %s." % ad.serial)
-        ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
-
-        if check_connectivity:
-            internet = validate_connection(ad, DEFAULT_PING_ADDR)
-            if not internet:
-                raise signals.TestFailure(
-                    "Failed to connect to internet on %s" % expected_ssid)
-    except Empty:
-        asserts.fail("Failed to start connection process to %s on %s" %
-                     (network, ad.serial))
-    except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s", expected_ssid,
-                     error)
-        raise signals.TestFailure("Failed to connect to %s network" % network)
-
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wifi_connect_by_id(ad, network_id, num_of_tries=3, assert_on_fail=True):
-    """Connect an Android device to a wifi network using network Id.
-
-    Start connection to the wifi network, with the given network Id, wait for
-    the "connected" event, then verify the connected network is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network_id: Integer specifying the network id of the network.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        Returns a value only if assert_on_fail is false.
-        Returns True if the connection was successful, False otherwise.
-    """
-    _assert_on_fail_handler(_wifi_connect_by_id, assert_on_fail, ad,
-                            network_id, num_of_tries)
-
-
-def _wifi_connect_by_id(ad, network_id, num_of_tries=1):
-    """Connect an Android device to a wifi network using it's network id.
-
-    Start connection to the wifi network, with the given network id, wait for
-    the "connected" event, then verify the connected network is the one requested.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network_id: Integer specifying the network id of the network.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-    """
-    ad.droid.wifiStartTrackingStateChange()
-    # Clear all previous events.
-    ad.ed.clear_all_events()
-    ad.droid.wifiConnectByNetworkId(network_id)
-    ad.log.info("Starting connection to network with id %d", network_id)
-    try:
-        event = ad.ed.pop_event(wifi_constants.CONNECT_BY_NETID_SUCCESS, 60)
-        connect_result = _wait_for_connect_event(ad,
-                                                 id=network_id,
-                                                 tries=num_of_tries)
-        asserts.assert_true(
-            connect_result,
-            "Failed to connect to Wi-Fi network using network id")
-        ad.log.debug("Wi-Fi connection result: %s", connect_result)
-        actual_id = connect_result['data'][WifiEnums.NETID_KEY]
-        asserts.assert_equal(
-            actual_id, network_id, "Connected to the wrong network on %s."
-            "Expected network id = %d, but got %d." %
-            (ad.serial, network_id, actual_id))
-        expected_ssid = connect_result['data'][WifiEnums.SSID_KEY]
-        ad.log.info("Connected to Wi-Fi network %s with %d network id.",
-                    expected_ssid, network_id)
-
-        internet = validate_connection(ad, DEFAULT_PING_ADDR)
-        if not internet:
-            raise signals.TestFailure("Failed to connect to internet on %s" %
-                                      expected_ssid)
-    except Empty:
-        asserts.fail("Failed to connect to network with id %d on %s" %
-                     (network_id, ad.serial))
-    except Exception as error:
-        ad.log.error("Failed to connect to network with id %d with error %s",
-                     network_id, error)
-        raise signals.TestFailure("Failed to connect to network with network"
-                                  " id %d" % network_id)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wifi_connect_using_network_request(ad,
-                                       network,
-                                       network_specifier,
-                                       num_of_tries=3):
-    """Connect an Android device to a wifi network using network request.
-
-    Trigger a network request with the provided network specifier,
-    wait for the "onMatch" event, ensure that the scan results in "onMatch"
-    event contain the specified network, then simulate the user granting the
-    request with the specified network selected. Then wait for the "onAvailable"
-    network callback indicating successful connection to network.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network_specifier: A dictionary representing the network specifier to
-                           use.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure.
-    Returns:
-        key: Key corresponding to network request.
-    """
-    key = ad.droid.connectivityRequestWifiNetwork(network_specifier, 0)
-    ad.log.info("Sent network request %s with %s " % (key, network_specifier))
-    # Need a delay here because UI interaction should only start once wifi
-    # starts processing the request.
-    time.sleep(wifi_constants.NETWORK_REQUEST_CB_REGISTER_DELAY_SEC)
-    _wait_for_wifi_connect_after_network_request(ad, network, key,
-                                                 num_of_tries)
-    return key
-
-
-def wait_for_wifi_connect_after_network_request(ad,
-                                                network,
-                                                key,
-                                                num_of_tries=3,
-                                                assert_on_fail=True):
-    """
-    Simulate and verify the connection flow after initiating the network
-    request.
-
-    Wait for the "onMatch" event, ensure that the scan results in "onMatch"
-    event contain the specified network, then simulate the user granting the
-    request with the specified network selected. Then wait for the "onAvailable"
-    network callback indicating successful connection to network.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        key: Key corresponding to network request.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        Returns a value only if assert_on_fail is false.
-        Returns True if the connection was successful, False otherwise.
-    """
-    _assert_on_fail_handler(_wait_for_wifi_connect_after_network_request,
-                            assert_on_fail, ad, network, key, num_of_tries)
-
-
-def _wait_for_wifi_connect_after_network_request(ad,
-                                                 network,
-                                                 key,
-                                                 num_of_tries=3):
-    """
-    Simulate and verify the connection flow after initiating the network
-    request.
-
-    Wait for the "onMatch" event, ensure that the scan results in "onMatch"
-    event contain the specified network, then simulate the user granting the
-    request with the specified network selected. Then wait for the "onAvailable"
-    network callback indicating successful connection to network.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        key: Key corresponding to network request.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure.
-    """
-    asserts.assert_true(
-        WifiEnums.SSID_KEY in network,
-        "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY)
-    ad.droid.wifiStartTrackingStateChange()
-    expected_ssid = network[WifiEnums.SSID_KEY]
-    ad.droid.wifiRegisterNetworkRequestMatchCallback()
-    # Wait for the platform to scan and return a list of networks
-    # matching the request
-    try:
-        matched_network = None
-        for _ in [0, num_of_tries]:
-            on_match_event = ad.ed.pop_event(
-                wifi_constants.WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH, 60)
-            asserts.assert_true(on_match_event,
-                                "Network request on match not received.")
-            matched_scan_results = on_match_event["data"]
-            ad.log.debug("Network request on match results %s",
-                         matched_scan_results)
-            matched_network = match_networks(
-                {WifiEnums.SSID_KEY: network[WifiEnums.SSID_KEY]},
-                matched_scan_results)
-            ad.log.debug("Network request on match %s", matched_network)
-            if matched_network:
-                break
-
-        asserts.assert_true(matched_network,
-                            "Target network %s not found" % network)
-
-        ad.droid.wifiSendUserSelectionForNetworkRequestMatch(network)
-        ad.log.info("Sent user selection for network request %s",
-                    expected_ssid)
-
-        # Wait for the platform to connect to the network.
-        autils.wait_for_event_with_keys(
-            ad, cconsts.EVENT_NETWORK_CALLBACK, 60,
-            (cconsts.NETWORK_CB_KEY_ID, key),
-            (cconsts.NETWORK_CB_KEY_EVENT, cconsts.NETWORK_CB_AVAILABLE))
-        on_capabilities_changed = autils.wait_for_event_with_keys(
-            ad, cconsts.EVENT_NETWORK_CALLBACK, 10,
-            (cconsts.NETWORK_CB_KEY_ID, key),
-            (cconsts.NETWORK_CB_KEY_EVENT,
-             cconsts.NETWORK_CB_CAPABILITIES_CHANGED))
-        connected_network = None
-        # WifiInfo is attached to TransportInfo only in S.
-        if ad.droid.isSdkAtLeastS():
-            connected_network = (
-                on_capabilities_changed["data"][
-                    cconsts.NETWORK_CB_KEY_TRANSPORT_INFO]
-            )
-        else:
-            connected_network = ad.droid.wifiGetConnectionInfo()
-        ad.log.info("Connected to network %s", connected_network)
-        asserts.assert_equal(
-            connected_network[WifiEnums.SSID_KEY], expected_ssid,
-            "Connected to the wrong network."
-            "Expected %s, but got %s." % (network, connected_network))
-    except Empty:
-        asserts.fail("Failed to connect to %s" % expected_ssid)
-    except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s" %
-                     (expected_ssid, error))
-        raise signals.TestFailure("Failed to connect to %s network" % network)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wifi_passpoint_connect(ad,
-                           passpoint_network,
-                           num_of_tries=1,
-                           assert_on_fail=True):
-    """Connect an Android device to a wifi network.
-
-    Initiate connection to a wifi network, wait for the "connected" event, then
-    confirm the connected ssid is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        passpoint_network: SSID of the Passpoint network to connect to.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        If assert_on_fail is False, function returns network id, if the connect was
-        successful, False otherwise. If assert_on_fail is True, no return value.
-    """
-    _assert_on_fail_handler(_wifi_passpoint_connect,
-                            assert_on_fail,
-                            ad,
-                            passpoint_network,
-                            num_of_tries=num_of_tries)
-
-
-def _wifi_passpoint_connect(ad, passpoint_network, num_of_tries=1):
-    """Connect an Android device to a wifi network.
-
-    Initiate connection to a wifi network, wait for the "connected" event, then
-    confirm the connected ssid is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        passpoint_network: SSID of the Passpoint network to connect to.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-    """
-    ad.droid.wifiStartTrackingStateChange()
-    expected_ssid = passpoint_network
-    ad.log.info("Starting connection process to passpoint %s", expected_ssid)
-
-    try:
-        connect_result = _wait_for_connect_event(ad, expected_ssid,
-                                                 num_of_tries)
-        asserts.assert_true(
-            connect_result, "Failed to connect to WiFi passpoint network %s on"
-            " %s" % (expected_ssid, ad.serial))
-        ad.log.info("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result['data'][WifiEnums.SSID_KEY]
-        asserts.assert_equal(
-            actual_ssid, expected_ssid,
-            "Connected to the wrong network on %s." % ad.serial)
-        ad.log.info("Connected to Wi-Fi passpoint network %s.", actual_ssid)
-
-        internet = validate_connection(ad, DEFAULT_PING_ADDR)
-        if not internet:
-            raise signals.TestFailure("Failed to connect to internet on %s" %
-                                      expected_ssid)
-    except Exception as error:
-        ad.log.error("Failed to connect to passpoint network %s with error %s",
-                     expected_ssid, error)
-        raise signals.TestFailure("Failed to connect to %s passpoint network" %
-                                  expected_ssid)
-
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def delete_passpoint(ad, fqdn):
-    """Delete a required Passpoint configuration."""
-    try:
-        ad.droid.removePasspointConfig(fqdn)
-        return True
-    except Exception as error:
-        ad.log.error(
-            "Failed to remove passpoint configuration with FQDN=%s "
-            "and error=%s", fqdn, error)
-        return False
-
-
-def start_wifi_single_scan(ad, scan_setting):
-    """Starts wifi single shot scan.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        scan_setting: A dict representing the settings of the scan.
-
-    Returns:
-        If scan was started successfully, event data of success event is returned.
-    """
-    idx = ad.droid.wifiScannerStartScan(scan_setting)
-    event = ad.ed.pop_event("WifiScannerScan%sonSuccess" % idx, SHORT_TIMEOUT)
-    ad.log.debug("Got event %s", event)
-    return event['data']
-
-
-def track_connection(ad, network_ssid, check_connection_count):
-    """Track wifi connection to network changes for given number of counts
-
-    Args:
-        ad: android_device object for forget network.
-        network_ssid: network ssid to which connection would be tracked
-        check_connection_count: Integer for maximum number network connection
-                                check.
-    Returns:
-        True if connection to given network happen, else return False.
-    """
-    ad.droid.wifiStartTrackingStateChange()
-    while check_connection_count > 0:
-        connect_network = ad.ed.pop_event("WifiNetworkConnected", 120)
-        ad.log.info("Connected to network %s", connect_network)
-        if (WifiEnums.SSID_KEY in connect_network['data'] and
-                connect_network['data'][WifiEnums.SSID_KEY] == network_ssid):
-            return True
-        check_connection_count -= 1
-    ad.droid.wifiStopTrackingStateChange()
-    return False
-
-
-def get_scan_time_and_channels(wifi_chs, scan_setting, stime_channel):
-    """Calculate the scan time required based on the band or channels in scan
-    setting
-
-    Args:
-        wifi_chs: Object of channels supported
-        scan_setting: scan setting used for start scan
-        stime_channel: scan time per channel
-
-    Returns:
-        scan_time: time required for completing a scan
-        scan_channels: channel used for scanning
-    """
-    scan_time = 0
-    scan_channels = []
-    if "band" in scan_setting and "channels" not in scan_setting:
-        scan_channels = wifi_chs.band_to_freq(scan_setting["band"])
-    elif "channels" in scan_setting and "band" not in scan_setting:
-        scan_channels = scan_setting["channels"]
-    scan_time = len(scan_channels) * stime_channel
-    for channel in scan_channels:
-        if channel in WifiEnums.DFS_5G_FREQUENCIES:
-            scan_time += 132  #passive scan time on DFS
-    return scan_time, scan_channels
-
-
-def start_wifi_track_bssid(ad, track_setting):
-    """Start tracking Bssid for the given settings.
-
-    Args:
-      ad: android_device object.
-      track_setting: Setting for which the bssid tracking should be started
-
-    Returns:
-      If tracking started successfully, event data of success event is returned.
-    """
-    idx = ad.droid.wifiScannerStartTrackingBssids(
-        track_setting["bssidInfos"], track_setting["apLostThreshold"])
-    event = ad.ed.pop_event("WifiScannerBssid{}onSuccess".format(idx),
-                            SHORT_TIMEOUT)
-    return event['data']
-
-
-def convert_pem_key_to_pkcs8(in_file, out_file):
-    """Converts the key file generated by us to the format required by
-    Android using openssl.
-
-    The input file must have the extension "pem". The output file must
-    have the extension "der".
-
-    Args:
-        in_file: The original key file.
-        out_file: The full path to the converted key file, including
-        filename.
-    """
-    asserts.assert_true(in_file.endswith(".pem"), "Input file has to be .pem.")
-    asserts.assert_true(out_file.endswith(".der"),
-                        "Output file has to be .der.")
-    cmd = ("openssl pkcs8 -inform PEM -in {} -outform DER -out {} -nocrypt"
-           " -topk8").format(in_file, out_file)
-    utils.exe_cmd(cmd)
-
-
-def validate_connection(ad,
-                        ping_addr=DEFAULT_PING_ADDR,
-                        wait_time=15,
-                        ping_gateway=True):
-    """Validate internet connection by pinging the address provided.
-
-    Args:
-        ad: android_device object.
-        ping_addr: address on internet for pinging.
-        wait_time: wait for some time before validating connection
-
-    Returns:
-        ping output if successful, NULL otherwise.
-    """
-    android_version = int(ad.adb.shell("getprop ro.vendor.build.version.release"))
-    # wait_time to allow for DHCP to complete.
-    for i in range(wait_time):
-        if ad.droid.connectivityNetworkIsConnected():
-            if (android_version > 10 and ad.droid.connectivityGetIPv4DefaultGateway()) or android_version < 11:
-                break
-        time.sleep(1)
-    ping = False
-    try:
-        ping = ad.droid.httpPing(ping_addr)
-        ad.log.info("Http ping result: %s.", ping)
-    except:
-        pass
-    if android_version > 10 and not ping and ping_gateway:
-        ad.log.info("Http ping failed. Pinging default gateway")
-        gw = ad.droid.connectivityGetIPv4DefaultGateway()
-        result = ad.adb.shell("ping -c 6 {}".format(gw))
-        ad.log.info("Default gateway ping result: %s" % result)
-        ping = False if "100% packet loss" in result else True
-    return ping
-
-
-#TODO(angli): This can only verify if an actual value is exactly the same.
-# Would be nice to be able to verify an actual value is one of serveral.
-def verify_wifi_connection_info(ad, expected_con):
-    """Verifies that the information of the currently connected wifi network is
-    as expected.
-
-    Args:
-        expected_con: A dict representing expected key-value pairs for wifi
-            connection. e.g. {"SSID": "test_wifi"}
-    """
-    current_con = ad.droid.wifiGetConnectionInfo()
-    case_insensitive = ["BSSID", "supplicant_state"]
-    ad.log.debug("Current connection: %s", current_con)
-    for k, expected_v in expected_con.items():
-        # Do not verify authentication related fields.
-        if k == "password":
-            continue
-        msg = "Field %s does not exist in wifi connection info %s." % (
-            k, current_con)
-        if k not in current_con:
-            raise signals.TestFailure(msg)
-        actual_v = current_con[k]
-        if k in case_insensitive:
-            actual_v = actual_v.lower()
-            expected_v = expected_v.lower()
-        msg = "Expected %s to be %s, actual %s is %s." % (k, expected_v, k,
-                                                          actual_v)
-        if actual_v != expected_v:
-            raise signals.TestFailure(msg)
-
-
-def check_autoconnect_to_open_network(
-        ad, conn_timeout=WIFI_CONNECTION_TIMEOUT_DEFAULT):
-    """Connects to any open WiFI AP
-     Args:
-         timeout value in sec to wait for UE to connect to a WiFi AP
-     Returns:
-         True if UE connects to WiFi AP (supplicant_state = completed)
-         False if UE fails to complete connection within WIFI_CONNECTION_TIMEOUT time.
-    """
-    if ad.droid.wifiCheckState():
-        return True
-    ad.droid.wifiToggleState()
-    wifi_connection_state = None
-    timeout = time.time() + conn_timeout
-    while wifi_connection_state != "completed":
-        wifi_connection_state = ad.droid.wifiGetConnectionInfo(
-        )['supplicant_state']
-        if time.time() > timeout:
-            ad.log.warning("Failed to connect to WiFi AP")
-            return False
-    return True
-
-
-def expand_enterprise_config_by_phase2(config):
-    """Take an enterprise config and generate a list of configs, each with
-    a different phase2 auth type.
-
-    Args:
-        config: A dict representing enterprise config.
-
-    Returns
-        A list of enterprise configs.
-    """
-    results = []
-    phase2_types = WifiEnums.EapPhase2
-    if config[WifiEnums.Enterprise.EAP] == WifiEnums.Eap.PEAP:
-        # Skip unsupported phase2 types for PEAP.
-        phase2_types = [WifiEnums.EapPhase2.GTC, WifiEnums.EapPhase2.MSCHAPV2]
-    for phase2_type in phase2_types:
-        # Skip a special case for passpoint TTLS.
-        if (WifiEnums.Enterprise.FQDN in config
-                and phase2_type == WifiEnums.EapPhase2.GTC):
-            continue
-        c = dict(config)
-        c[WifiEnums.Enterprise.PHASE2] = phase2_type.value
-        results.append(c)
-    return results
-
-
-def generate_eap_test_name(config, ad=None):
-    """ Generates a test case name based on an EAP configuration.
-
-    Args:
-        config: A dict representing an EAP credential.
-        ad object: Redundant but required as the same param is passed
-                   to test_func in run_generated_tests
-
-    Returns:
-        A string representing the name of a generated EAP test case.
-    """
-    eap = WifiEnums.Eap
-    eap_phase2 = WifiEnums.EapPhase2
-    Ent = WifiEnums.Enterprise
-    name = "test_connect-"
-    eap_name = ""
-    for e in eap:
-        if e.value == config[Ent.EAP]:
-            eap_name = e.name
-            break
-    if "peap0" in config[WifiEnums.SSID_KEY].lower():
-        eap_name = "PEAP0"
-    if "peap1" in config[WifiEnums.SSID_KEY].lower():
-        eap_name = "PEAP1"
-    name += eap_name
-    if Ent.PHASE2 in config:
-        for e in eap_phase2:
-            if e.value == config[Ent.PHASE2]:
-                name += "-{}".format(e.name)
-                break
-    return name
-
-
-def group_attenuators(attenuators):
-    """Groups a list of attenuators into attenuator groups for backward
-    compatibility reasons.
-
-    Most legacy Wi-Fi setups have two attenuators each connected to a separate
-    AP. The new Wi-Fi setup has four attenuators, each connected to one channel
-    on an AP, so two of them are connected to one AP.
-
-    To make the existing scripts work in the new setup, when the script needs
-    to attenuate one AP, it needs to set attenuation on both attenuators
-    connected to the same AP.
-
-    This function groups attenuators properly so the scripts work in both
-    legacy and new Wi-Fi setups.
-
-    Args:
-        attenuators: A list of attenuator objects, either two or four in length.
-
-    Raises:
-        signals.TestFailure is raised if the attenuator list does not have two
-        or four objects.
-    """
-    attn0 = attenuator.AttenuatorGroup("AP0")
-    attn1 = attenuator.AttenuatorGroup("AP1")
-    # Legacy testbed setup has two attenuation channels.
-    num_of_attns = len(attenuators)
-    if num_of_attns == 2:
-        attn0.add(attenuators[0])
-        attn1.add(attenuators[1])
-    elif num_of_attns == 4:
-        attn0.add(attenuators[0])
-        attn0.add(attenuators[1])
-        attn1.add(attenuators[2])
-        attn1.add(attenuators[3])
-    else:
-        asserts.fail(("Either two or four attenuators are required for this "
-                      "test, but found %s") % num_of_attns)
-    return [attn0, attn1]
-
-
-def set_attns(attenuator, attn_val_name, roaming_attn=ROAMING_ATTN):
-    """Sets attenuation values on attenuators used in this test.
-
-    Args:
-        attenuator: The attenuator object.
-        attn_val_name: Name of the attenuation value pair to use.
-        roaming_attn: Dictionary specifying the attenuation params.
-    """
-    logging.info("Set attenuation values to %s", roaming_attn[attn_val_name])
-    try:
-        attenuator[0].set_atten(roaming_attn[attn_val_name][0])
-        attenuator[1].set_atten(roaming_attn[attn_val_name][1])
-        attenuator[2].set_atten(roaming_attn[attn_val_name][2])
-        attenuator[3].set_atten(roaming_attn[attn_val_name][3])
-    except:
-        logging.exception("Failed to set attenuation values %s.",
-                          attn_val_name)
-        raise
-
-
-def set_attns_steps(attenuators,
-                    atten_val_name,
-                    roaming_attn=ROAMING_ATTN,
-                    steps=10,
-                    wait_time=12):
-    """Set attenuation values on attenuators used in this test. It will change
-    the attenuation values linearly from current value to target value step by
-    step.
-
-    Args:
-        attenuators: The list of attenuator objects that you want to change
-                     their attenuation value.
-        atten_val_name: Name of the attenuation value pair to use.
-        roaming_attn: Dictionary specifying the attenuation params.
-        steps: Number of attenuator changes to reach the target value.
-        wait_time: Sleep time for each change of attenuator.
-    """
-    logging.info("Set attenuation values to %s in %d step(s)",
-                 roaming_attn[atten_val_name], steps)
-    start_atten = [attenuator.get_atten() for attenuator in attenuators]
-    target_atten = roaming_attn[atten_val_name]
-    for current_step in range(steps):
-        progress = (current_step + 1) / steps
-        for i, attenuator in enumerate(attenuators):
-            amount_since_start = (target_atten[i] - start_atten[i]) * progress
-            attenuator.set_atten(round(start_atten[i] + amount_since_start))
-        time.sleep(wait_time)
-
-
-def trigger_roaming_and_validate(dut,
-                                 attenuator,
-                                 attn_val_name,
-                                 expected_con,
-                                 roaming_attn=ROAMING_ATTN):
-    """Sets attenuators to trigger roaming and validate the DUT connected
-    to the BSSID expected.
-
-    Args:
-        attenuator: The attenuator object.
-        attn_val_name: Name of the attenuation value pair to use.
-        expected_con: The network information of the expected network.
-        roaming_attn: Dictionary specifying the attenaution params.
-    """
-    expected_con = {
-        WifiEnums.SSID_KEY: expected_con[WifiEnums.SSID_KEY],
-        WifiEnums.BSSID_KEY: expected_con["bssid"],
-    }
-    set_attns_steps(attenuator, attn_val_name, roaming_attn)
-
-    verify_wifi_connection_info(dut, expected_con)
-    expected_bssid = expected_con[WifiEnums.BSSID_KEY]
-    logging.info("Roamed to %s successfully", expected_bssid)
-    if not validate_connection(dut):
-        raise signals.TestFailure("Fail to connect to internet on %s" %
-                                  expected_bssid)
-
-
-def create_softap_config():
-    """Create a softap config with random ssid and password."""
-    ap_ssid = "softap_" + utils.rand_ascii_str(8)
-    ap_password = utils.rand_ascii_str(8)
-    logging.info("softap setup: %s %s", ap_ssid, ap_password)
-    config = {
-        WifiEnums.SSID_KEY: ap_ssid,
-        WifiEnums.PWD_KEY: ap_password,
-    }
-    return config
-
-
-def wait_for_expected_number_of_softap_clients(ad, callbackId,
-                                               expected_num_of_softap_clients):
-    """Wait for the number of softap clients to be updated as expected.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        expected_num_of_softap_clients: expected number of softap clients.
-    """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED
-    clientData = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)['data']
-    clientCount = clientData[wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY]
-    clientMacAddresses = clientData[
-        wifi_constants.SOFTAP_CLIENTS_MACS_CALLBACK_KEY]
-    asserts.assert_equal(
-        clientCount, expected_num_of_softap_clients,
-        "The number of softap clients doesn't match the expected number")
-    asserts.assert_equal(
-        len(clientMacAddresses), expected_num_of_softap_clients,
-        "The number of mac addresses doesn't match the expected number")
-    for macAddress in clientMacAddresses:
-        asserts.assert_true(checkMacAddress(macAddress),
-                            "An invalid mac address was returned")
-
-
-def checkMacAddress(input):
-    """Validate whether a string is a valid mac address or not.
-
-    Args:
-        input: The string to validate.
-
-    Returns: True/False, returns true for a valid mac address and false otherwise.
-    """
-    macValidationRegex = "[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$"
-    if re.match(macValidationRegex, input.lower()):
-        return True
-    return False
-
-
-def wait_for_expected_softap_state(ad, callbackId, expected_softap_state):
-    """Wait for the expected softap state change.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        expected_softap_state: The expected softap state.
-    """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_STATE_CHANGED
-    asserts.assert_equal(
-        ad.ed.pop_event(eventStr, SHORT_TIMEOUT)['data'][
-            wifi_constants.SOFTAP_STATE_CHANGE_CALLBACK_KEY],
-        expected_softap_state,
-        "Softap state doesn't match with expected state")
-
-
-def get_current_number_of_softap_clients(ad, callbackId):
-    """pop up all of softap client updated event from queue.
-    Args:
-        callbackId: Id of the callback associated with registering.
-
-    Returns:
-        If exist aleast callback, returns last updated number_of_softap_clients.
-        Returns None when no any match callback event in queue.
-    """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED
-    events = ad.ed.pop_all(eventStr)
-    for event in events:
-        num_of_clients = event['data'][
-            wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY]
-    if len(events) == 0:
-        return None
-    return num_of_clients
-
-
-def get_current_softap_info(ad, callbackId, need_to_wait):
-    """pop up all of softap info changed event from queue.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        need_to_wait: Wait for the info callback event before pop all.
-    Returns:
-        Returns last updated information of softap.
-    """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_INFO_CHANGED
-    ad.log.debug("softap info dump from eventStr %s", eventStr)
-    frequency = 0
-    bandwidth = 0
-    if (need_to_wait):
-        event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        frequency = event['data'][
-            wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = event['data'][
-            wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-        ad.log.info("softap info updated, frequency is %s, bandwidth is %s",
-                    frequency, bandwidth)
-
-    events = ad.ed.pop_all(eventStr)
-    for event in events:
-        frequency = event['data'][
-            wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = event['data'][
-            wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-    ad.log.info("softap info, frequency is %s, bandwidth is %s", frequency,
-                bandwidth)
-    return frequency, bandwidth
-
-def get_current_softap_infos(ad, callbackId, need_to_wait):
-    """pop up all of softap info list changed event from queue.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        need_to_wait: Wait for the info callback event before pop all.
-    Returns:
-        Returns last updated informations of softap.
-    """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-        callbackId) + wifi_constants.SOFTAP_INFOLIST_CHANGED
-    ad.log.debug("softap info dump from eventStr %s", eventStr)
-
-    if (need_to_wait):
-        event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        infos = event['data']
-
-    events = ad.ed.pop_all(eventStr)
-    for event in events:
-        infos = event['data']
-
-    for info in infos:
-        frequency = info[
-            wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = info[
-            wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-        wifistandard = info[
-            wifi_constants.SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY]
-        bssid = info[
-            wifi_constants.SOFTAP_INFO_BSSID_CALLBACK_KEY]
-        ad.log.info(
-                "softap info, freq:%s, bw:%s, wifistandard:%s, bssid:%s",
-                frequency, bandwidth, wifistandard, bssid)
-
-    return infos
-
-def get_current_softap_capability(ad, callbackId, need_to_wait):
-    """pop up all of softap info list changed event from queue.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        need_to_wait: Wait for the info callback event before pop all.
-    Returns:
-        Returns last updated capability of softap.
-    """
-    eventStr = wifi_constants.SOFTAP_CALLBACK_EVENT + str(
-            callbackId) + wifi_constants.SOFTAP_CAPABILITY_CHANGED
-    ad.log.debug("softap capability dump from eventStr %s", eventStr)
-    if (need_to_wait):
-        event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        capability = event['data']
-
-    events = ad.ed.pop_all(eventStr)
-    for event in events:
-        capability = event['data']
-
-    return capability
-
-def get_ssrdumps(ad):
-    """Pulls dumps in the ssrdump dir
-    Args:
-        ad: android device object.
-    """
-    logs = ad.get_file_names("/data/vendor/ssrdump/")
-    if logs:
-        ad.log.info("Pulling ssrdumps %s", logs)
-        log_path = os.path.join(ad.device_log_path, "SSRDUMPS_%s" % ad.serial)
-        os.makedirs(log_path, exist_ok=True)
-        ad.pull_files(logs, log_path)
-    ad.adb.shell("find /data/vendor/ssrdump/ -type f -delete",
-                 ignore_status=True)
-
-
-def start_pcap(pcap, wifi_band, test_name):
-    """Start packet capture in monitor mode.
-
-    Args:
-        pcap: packet capture object
-        wifi_band: '2g' or '5g' or 'dual'
-        test_name: test name to be used for pcap file name
-
-    Returns:
-        Dictionary with wifi band as key and the tuple
-        (pcap Process object, log directory) as the value
-    """
-    log_dir = os.path.join(
-        context.get_current_context().get_full_output_path(), 'PacketCapture')
-    os.makedirs(log_dir, exist_ok=True)
-    if wifi_band == 'dual':
-        bands = [BAND_2G, BAND_5G]
-    else:
-        bands = [wifi_band]
-    procs = {}
-    for band in bands:
-        proc = pcap.start_packet_capture(band, log_dir, test_name)
-        procs[band] = (proc, os.path.join(log_dir, test_name))
-    return procs
-
-
-def stop_pcap(pcap, procs, test_status=None):
-    """Stop packet capture in monitor mode.
-
-    Since, the pcap logs in monitor mode can be very large, we will
-    delete them if they are not required. 'test_status' if True, will delete
-    the pcap files. If False, we will keep them.
-
-    Args:
-        pcap: packet capture object
-        procs: dictionary returned by start_pcap
-        test_status: status of the test case
-    """
-    for proc, fname in procs.values():
-        pcap.stop_packet_capture(proc)
-
-    if test_status:
-        shutil.rmtree(os.path.dirname(fname))
-
-
-def verify_mac_not_found_in_pcap(ad, mac, packets):
-    """Verify that a mac address is not found in the captured packets.
-
-    Args:
-        ad: android device object
-        mac: string representation of the mac address
-        packets: packets obtained by rdpcap(pcap_fname)
-    """
-    for pkt in packets:
-        logging.debug("Packet Summary = %s", pkt.summary())
-        if mac in pkt.summary():
-            asserts.fail("Device %s caught Factory MAC: %s in packet sniffer."
-                         "Packet = %s" % (ad.serial, mac, pkt.show()))
-
-
-def verify_mac_is_found_in_pcap(ad, mac, packets):
-    """Verify that a mac address is found in the captured packets.
-
-    Args:
-        ad: android device object
-        mac: string representation of the mac address
-        packets: packets obtained by rdpcap(pcap_fname)
-    """
-    for pkt in packets:
-        if mac in pkt.summary():
-            return
-    asserts.fail("Did not find MAC = %s in packet sniffer."
-                 "for device %s" % (mac, ad.serial))
-
-
-def start_cnss_diags(ads, cnss_diag_file, pixel_models):
-    for ad in ads:
-        start_cnss_diag(ad, cnss_diag_file, pixel_models)
-
-
-def start_cnss_diag(ad, cnss_diag_file, pixel_models):
-    """Start cnss_diag to record extra wifi logs
-
-    Args:
-        ad: android device object.
-        cnss_diag_file: cnss diag config file to push to device.
-        pixel_models: pixel devices.
-    """
-    if ad.model not in pixel_models:
-        ad.log.info("Device not supported to collect pixel logger")
-        return
-    if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
-        prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
-    else:
-        prop = wifi_constants.CNSS_DIAG_PROP
-    if ad.adb.getprop(prop) != 'true':
-        if not int(
-                ad.adb.shell("ls -l %s%s | wc -l" %
-                             (CNSS_DIAG_CONFIG_PATH, CNSS_DIAG_CONFIG_FILE))):
-            ad.adb.push("%s %s" % (cnss_diag_file, CNSS_DIAG_CONFIG_PATH))
-        ad.adb.shell(
-            "find /data/vendor/wifi/cnss_diag/wlan_logs/ -type f -delete",
-            ignore_status=True)
-        ad.adb.shell("setprop %s true" % prop, ignore_status=True)
-
-
-def stop_cnss_diags(ads, pixel_models):
-    for ad in ads:
-        stop_cnss_diag(ad, pixel_models)
-
-
-def stop_cnss_diag(ad, pixel_models):
-    """Stops cnss_diag
-
-    Args:
-        ad: android device object.
-        pixel_models: pixel devices.
-    """
-    if ad.model not in pixel_models:
-        ad.log.info("Device not supported to collect pixel logger")
-        return
-    if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
-        prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
-    else:
-        prop = wifi_constants.CNSS_DIAG_PROP
-    ad.adb.shell("setprop %s false" % prop, ignore_status=True)
-
-
-def get_cnss_diag_log(ad):
-    """Pulls the cnss_diag logs in the wlan_logs dir
-    Args:
-        ad: android device object.
-    """
-    logs = ad.get_file_names("/data/vendor/wifi/cnss_diag/wlan_logs/")
-    if logs:
-        ad.log.info("Pulling cnss_diag logs %s", logs)
-        log_path = os.path.join(ad.device_log_path, "CNSS_DIAG_%s" % ad.serial)
-        os.makedirs(log_path, exist_ok=True)
-        ad.pull_files(logs, log_path)
-
-
-LinkProbeResult = namedtuple(
-    'LinkProbeResult',
-    ('is_success', 'stdout', 'elapsed_time', 'failure_reason'))
-
-
-def send_link_probe(ad):
-    """Sends a link probe to the currently connected AP, and returns whether the
-    probe succeeded or not.
-
-    Args:
-         ad: android device object
-    Returns:
-        LinkProbeResult namedtuple
-    """
-    stdout = ad.adb.shell('cmd wifi send-link-probe')
-    asserts.assert_false('Error' in stdout or 'Exception' in stdout,
-                         'Exception while sending link probe: ' + stdout)
-
-    is_success = False
-    elapsed_time = None
-    failure_reason = None
-    if 'succeeded' in stdout:
-        is_success = True
-        elapsed_time = next(
-            (int(token) for token in stdout.split() if token.isdigit()), None)
-    elif 'failed with reason' in stdout:
-        failure_reason = next(
-            (int(token) for token in stdout.split() if token.isdigit()), None)
-    else:
-        asserts.fail('Unexpected link probe result: ' + stdout)
-
-    return LinkProbeResult(is_success=is_success,
-                           stdout=stdout,
-                           elapsed_time=elapsed_time,
-                           failure_reason=failure_reason)
-
-
-def send_link_probes(ad, num_probes, delay_sec):
-    """Sends a sequence of link probes to the currently connected AP, and
-    returns whether the probes succeeded or not.
-
-    Args:
-         ad: android device object
-         num_probes: number of probes to perform
-         delay_sec: delay time between probes, in seconds
-    Returns:
-        List[LinkProbeResult] one LinkProbeResults for each probe
-    """
-    logging.info('Sending link probes')
-    results = []
-    for _ in range(num_probes):
-        # send_link_probe() will also fail the test if it sees an exception
-        # in the stdout of the adb shell command
-        result = send_link_probe(ad)
-        logging.info('link probe results: ' + str(result))
-        results.append(result)
-        time.sleep(delay_sec)
-
-    return results
-
-
-def ap_setup(test, index, ap, network, bandwidth=80, channel=6):
-    """Set up the AP with provided network info.
-
-        Args:
-            test: the calling test class object.
-            index: int, index of the AP.
-            ap: access_point object of the AP.
-            network: dict with information of the network, including ssid,
-                     password and bssid.
-            bandwidth: the operation bandwidth for the AP, default 80MHz.
-            channel: the channel number for the AP.
-        Returns:
-            brconfigs: the bridge interface configs
-        """
-    bss_settings = []
-    ssid = network[WifiEnums.SSID_KEY]
-    test.access_points[index].close()
-    time.sleep(5)
-
-    # Configure AP as required.
-    if "password" in network.keys():
-        password = network["password"]
-        security = hostapd_security.Security(security_mode="wpa",
-                                             password=password)
-    else:
-        security = hostapd_security.Security(security_mode=None, password=None)
-    config = hostapd_ap_preset.create_ap_preset(channel=channel,
-                                                ssid=ssid,
-                                                security=security,
-                                                bss_settings=bss_settings,
-                                                vht_bandwidth=bandwidth,
-                                                profile_name='whirlwind',
-                                                iface_wlan_2g=ap.wlan_2g,
-                                                iface_wlan_5g=ap.wlan_5g)
-    ap.start_ap(config)
-    logging.info("AP started on channel {} with SSID {}".format(channel, ssid))
-
-
-def turn_ap_off(test, AP):
-    """Bring down hostapd on the Access Point.
-    Args:
-        test: The test class object.
-        AP: int, indicating which AP to turn OFF.
-    """
-    hostapd_2g = test.access_points[AP - 1]._aps['wlan0'].hostapd
-    if hostapd_2g.is_alive():
-        hostapd_2g.stop()
-        logging.debug('Turned WLAN0 AP%d off' % AP)
-    hostapd_5g = test.access_points[AP - 1]._aps['wlan1'].hostapd
-    if hostapd_5g.is_alive():
-        hostapd_5g.stop()
-        logging.debug('Turned WLAN1 AP%d off' % AP)
-
-
-def turn_ap_on(test, AP):
-    """Bring up hostapd on the Access Point.
-    Args:
-        test: The test class object.
-        AP: int, indicating which AP to turn ON.
-    """
-    hostapd_2g = test.access_points[AP - 1]._aps['wlan0'].hostapd
-    if not hostapd_2g.is_alive():
-        hostapd_2g.start(hostapd_2g.config)
-        logging.debug('Turned WLAN0 AP%d on' % AP)
-    hostapd_5g = test.access_points[AP - 1]._aps['wlan1'].hostapd
-    if not hostapd_5g.is_alive():
-        hostapd_5g.start(hostapd_5g.config)
-        logging.debug('Turned WLAN1 AP%d on' % AP)
-
-
-def turn_location_off_and_scan_toggle_off(ad):
-    """Turns off wifi location scans."""
-    utils.set_location_service(ad, False)
-    ad.droid.wifiScannerToggleAlwaysAvailable(False)
-    msg = "Failed to turn off location service's scan."
-    asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
-
-
-def set_softap_channel(dut, ap_iface='wlan1', cs_count=10, channel=2462):
-    """ Set SoftAP mode channel
-
-    Args:
-        dut: android device object
-        ap_iface: interface of SoftAP mode.
-        cs_count: how many beacon frames before switch channel, default = 10
-        channel: a wifi channel.
-    """
-    chan_switch_cmd = 'hostapd_cli -i {} chan_switch {} {}'
-    chan_switch_cmd_show = chan_switch_cmd.format(ap_iface, cs_count, channel)
-    dut.log.info('adb shell {}'.format(chan_switch_cmd_show))
-    chan_switch_result = dut.adb.shell(
-        chan_switch_cmd.format(ap_iface, cs_count, channel))
-    if chan_switch_result == 'OK':
-        dut.log.info('switch hotspot channel to {}'.format(channel))
-        return chan_switch_result
-
-    asserts.fail("Failed to switch hotspot channel")
-
-def get_wlan0_link(dut):
-    """ get wlan0 interface status"""
-    get_wlan0 = 'wpa_cli -iwlan0 -g@android:wpa_wlan0 IFNAME=wlan0 status'
-    out = dut.adb.shell(get_wlan0)
-    out = dict(re.findall(r'(\S+)=(".*?"|\S+)', out))
-    asserts.assert_true("ssid" in out,
-                        "Client doesn't connect to any network")
-    return out
-
-def verify_11ax_wifi_connection(ad, wifi6_supported_models, wifi6_ap):
-    """Verify 11ax for wifi connection.
-
-    Args:
-      ad: adndroid device object
-      wifi6_supported_models: device supporting 11ax.
-      wifi6_ap: if the AP supports 11ax.
-    """
-    if wifi6_ap and ad.model in wifi6_supported_models:
-        logging.info("Verifying 11ax. Model: %s" % ad.model)
-        asserts.assert_true(
-            ad.droid.wifiGetConnectionStandard() ==
-            wifi_constants.WIFI_STANDARD_11AX, "DUT did not connect to 11ax.")
-
-def verify_11ax_softap(dut, dut_client, wifi6_supported_models):
-    """Verify 11ax SoftAp if devices support it.
-
-    Check if both DUT and DUT client supports 11ax, then SoftAp turns on
-    with 11ax mode and DUT client can connect to it.
-
-    Args:
-      dut: Softap device.
-      dut_client: Client connecting to softap.
-      wifi6_supported_models: List of device models supporting 11ax.
-    """
-    if dut.model in wifi6_supported_models and dut_client.model in wifi6_supported_models:
-        logging.info(
-            "Verifying 11ax softap. DUT model: %s, DUT Client model: %s",
-            dut.model, dut_client.model)
-        asserts.assert_true(
-            dut_client.droid.wifiGetConnectionStandard() ==
-            wifi_constants.WIFI_STANDARD_11AX,
-            "DUT failed to start SoftAp in 11ax.")
-
-def check_available_channels_in_bands_2_5(dut, country_code):
-    """Check if DUT is capable of enable BridgedAp.
-    #TODO: Find a way to make this function flexible by taking an argument.
-
-    Args:
-        country_code: country code, e.g., 'US', 'JP'.
-    Returns:
-        True: If DUT is capable of enable BridgedAp.
-        False: If DUT is not capable of enable BridgedAp.
-    """
-    set_wifi_country_code(dut, country_code)
-    country = dut.droid.wifiGetCountryCode()
-    dut.log.info("DUT current country code : {}".format(country))
-    # Wi-Fi ON and OFF to make sure country code take effet.
-    wifi_toggle_state(dut, True)
-    wifi_toggle_state(dut, False)
-
-    # Register SoftAp Callback and get SoftAp capability.
-    callbackId = dut.droid.registerSoftApCallback()
-    capability = get_current_softap_capability(dut, callbackId, True)
-    dut.droid.unregisterSoftApCallback(callbackId)
-
-    if capability[wifi_constants.
-                  SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST] and \
-        capability[wifi_constants.
-                   SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST]:
-        return True
-    return False
-
-
-@retry(stop=stop_after_attempt(5), wait=wait_fixed(2))
-def validate_ping_between_two_clients(dut1, dut2):
-    """Make 2 DUT ping each other.
-
-    Args:
-        dut1: An AndroidDevice object.
-        dut2: An AndroidDevice object.
-    """
-    # Get DUTs' IPv4 addresses.
-    dut1_ip = ""
-    dut2_ip = ""
-    try:
-        dut1_ip = dut1.droid.connectivityGetIPv4Addresses('wlan0')[0]
-    except IndexError as e:
-        dut1.log.info(
-            "{} has no Wi-Fi connection, cannot get IPv4 address."
-            .format(dut1.serial))
-    try:
-        dut2_ip = dut2.droid.connectivityGetIPv4Addresses('wlan0')[0]
-    except IndexError as e:
-        dut2.log.info(
-            "{} has no Wi-Fi connection, cannot get IPv4 address."
-            .format(dut2.serial))
-    # Test fail if not able to obtain two DUT's IPv4 addresses.
-    asserts.assert_true(dut1_ip and dut2_ip,
-                        "Ping failed because no DUT's IPv4 address")
-
-    dut1.log.info("{} IPv4 addresses : {}".format(dut1.serial, dut1_ip))
-    dut2.log.info("{} IPv4 addresses : {}".format(dut2.serial, dut2_ip))
-
-    # Two clients ping each other
-    dut1.log.info("{} ping {}".format(dut1_ip, dut2_ip))
-    asserts.assert_true(
-        utils.adb_shell_ping(dut1, count=10, dest_ip=dut2_ip,
-                             timeout=20),
-        "%s ping %s failed" % (dut1.serial, dut2_ip))
-
-    dut2.log.info("{} ping {}".format(dut2_ip, dut1_ip))
-    asserts.assert_true(
-        utils.adb_shell_ping(dut2, count=10, dest_ip=dut1_ip,
-                             timeout=20),
-        "%s ping %s failed" % (dut2.serial, dut1_ip))
-
diff --git a/src/antlion/test_utils_tests/__init__.py b/src/antlion/test_utils_tests/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/acts_import_test_utils_test.py b/src/antlion/test_utils_tests/acts_import_test_utils_test.py
deleted file mode 100755
index 4e2ce49..0000000
--- a/src/antlion/test_utils_tests/acts_import_test_utils_test.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-
-class ActsImportTestUtilsTest(unittest.TestCase):
-    """This test class has unit tests for the implementation of everything
-    under acts_contrib.test_utils.*
-    """
-
-    def test_import_successful(self):
-        """ Test to return true if all imports were successful.
-
-        This test will fail if any import was unsuccessful.
-        """
-        try:
-            from antlion import utils
-
-            from antlion.test_utils.bt import BleEnum
-            from antlion.test_utils.bt import BluetoothBaseTest
-            from antlion.test_utils.bt import BluetoothCarHfpBaseTest
-            from antlion.test_utils.bt import BtEnum
-            from antlion.test_utils.bt import GattConnectedBaseTest
-            from antlion.test_utils.bt import GattEnum
-            from antlion.test_utils.bt import bt_contacts_utils
-            from antlion.test_utils.bt import bt_gatt_utils
-            from antlion.test_utils.bt import bt_test_utils
-            from antlion.test_utils.bt import native_bt_test_utils
-
-            from antlion.test_utils.net import connectivity_const
-            from antlion.test_utils.net import connectivity_const
-
-            from antlion.test_utils.wifi import wifi_constants
-            from antlion.test_utils.wifi import wifi_test_utils
-
-        except Exception:
-            self.fail('Unable to import all supported test_utils')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/audio_analysis_integrationtest.py b/src/antlion/test_utils_tests/audio_analysis_integrationtest.py
deleted file mode 100644
index e7c17a2..0000000
--- a/src/antlion/test_utils_tests/audio_analysis_integrationtest.py
+++ /dev/null
@@ -1,360 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Note: This test has been labelled as an integration test due to its use of
-# real data, and the five to six second execution time.
-import logging
-import numpy
-import os
-import unittest
-
-import antlion.test_utils.audio_analysis_lib.audio_analysis as audio_analysis
-import antlion.test_utils.audio_analysis_lib.audio_data as audio_data
-
-
-class SpectralAnalysisTest(unittest.TestCase):
-    def setUp(self):
-        """Uses the same seed to generate noise for each test."""
-        numpy.random.seed(0)
-
-    def dummy_peak_detection(self, array, window_size):
-        """Detects peaks in an array in simple way.
-
-        A point (i, array[i]) is a peak if array[i] is the maximum among
-        array[i - half_window_size] to array[i + half_window_size].
-        If array[i - half_window_size] to array[i + half_window_size] are all
-        equal, then there is no peak in this window.
-
-        Args:
-            array: The input array to detect peaks in. Array is a list of
-                absolute values of the magnitude of transformed coefficient.
-            window_size: The window to detect peaks.
-
-        Returns:
-            A list of tuples:
-                [(peak_index_1, peak_value_1), (peak_index_2, peak_value_2),
-                ...]
-                where the tuples are sorted by peak values.
-
-        """
-        half_window_size = window_size / 2
-        length = len(array)
-
-        def mid_is_peak(array, mid, left, right):
-            """Checks if value at mid is the largest among left to right.
-
-            Args:
-                array: A list of numbers.
-                mid: The mid index.
-                left: The left index.
-                rigth: The right index.
-
-            Returns:
-                True if array[index] is the maximum among numbers in array
-                    between index [left, right] inclusively.
-
-            """
-            value_mid = array[int(mid)]
-            for index in range(int(left), int(right) + 1):
-                if index == mid:
-                    continue
-                if array[index] >= value_mid:
-                    return False
-            return True
-
-        results = []
-        for mid in range(length):
-            left = max(0, mid - half_window_size)
-            right = min(length - 1, mid + half_window_size)
-            if mid_is_peak(array, mid, left, right):
-                results.append((mid, array[int(mid)]))
-
-        # Sort the peaks by values.
-        return sorted(results, key=lambda x: x[1], reverse=True)
-
-    def test_peak_detection(self):
-        array = [0, 1, 2, 3, 4, 3, 2, 1, 0, 1, 2, 3, 5, 3, 2, 1, 1, 1, 1, 1]
-        result = audio_analysis.peak_detection(array, 4)
-        golden_answer = [(12, 5), (4, 4)]
-        self.assertEqual(result, golden_answer)
-
-    def test_peak_detection_large(self):
-        array = numpy.random.uniform(0, 1, 1000000)
-        window_size = 100
-        logging.debug('Test large array using dummy peak detection')
-        dummy_answer = self.dummy_peak_detection(array, window_size)
-        logging.debug('Test large array using improved peak detection')
-        improved_answer = audio_analysis.peak_detection(array, window_size)
-        logging.debug('Compare the result')
-        self.assertEqual(dummy_answer, improved_answer)
-
-    def test_spectral_analysis(self):
-        rate = 48000
-        length_in_secs = 0.5
-        freq_1 = 490.0
-        freq_2 = 60.0
-        coeff_1 = 1
-        coeff_2 = 0.3
-        samples = length_in_secs * rate
-        noise = numpy.random.standard_normal(int(samples)) * 0.005
-        x = numpy.linspace(0.0, (samples - 1) * 1.0 / rate, samples)
-        y = (coeff_1 * numpy.sin(freq_1 * 2.0 * numpy.pi * x) + coeff_2 *
-             numpy.sin(freq_2 * 2.0 * numpy.pi * x)) + noise
-        results = audio_analysis.spectral_analysis(y, rate)
-        # Results should contains
-        # [(490, 1*k), (60, 0.3*k), (0, 0.1*k)] where 490Hz is the dominant
-        # frequency with coefficient 1, 60Hz is the second dominant frequency
-        # with coefficient 0.3, 0Hz is from Gaussian noise with coefficient
-        # around 0.1. The k constant is resulted from window function.
-        logging.debug('Results: %s', results)
-        self.assertTrue(abs(results[0][0] - freq_1) < 1)
-        self.assertTrue(abs(results[1][0] - freq_2) < 1)
-        self.assertTrue(
-            abs(results[0][1] / results[1][1] - coeff_1 / coeff_2) < 0.01)
-
-    def test_spectral_snalysis_real_data(self):
-        """This unittest checks the spectral analysis works on real data."""
-        file_path = os.path.join(
-            os.path.dirname(__file__), '../../../acts/framework/tests/test_data', '1k_2k.raw')
-        binary = open(file_path, 'rb').read()
-        data = audio_data.AudioRawData(binary, 2, 'S32_LE')
-        saturate_value = audio_data.get_maximum_value_from_sample_format(
-            'S32_LE')
-        golden_frequency = [1000, 2000]
-        for channel in [0, 1]:
-            normalized_signal = audio_analysis.normalize_signal(
-                data.channel_data[channel], saturate_value)
-            spectral = audio_analysis.spectral_analysis(normalized_signal,
-                                                        48000, 0.02)
-            logging.debug('channel %s: %s', channel, spectral)
-            self.assertTrue(
-                abs(spectral[0][0] - golden_frequency[channel]) < 5,
-                'Dominant frequency is not correct')
-
-    def test_not_meaningful_data(self):
-        """Checks that sepectral analysis handles un-meaningful data."""
-        rate = 48000
-        length_in_secs = 0.5
-        samples = length_in_secs * rate
-        noise_amplitude = audio_analysis.MEANINGFUL_RMS_THRESHOLD * 0.5
-        noise = numpy.random.standard_normal(int(samples)) * noise_amplitude
-        results = audio_analysis.spectral_analysis(noise, rate)
-        self.assertEqual([(0, 0)], results)
-
-    def testEmptyData(self):
-        """Checks that sepectral analysis rejects empty data."""
-        with self.assertRaises(audio_analysis.EmptyDataError):
-            results = audio_analysis.spectral_analysis([], 100)
-
-
-class NormalizeTest(unittest.TestCase):
-    def test_normalize(self):
-        y = [1, 2, 3, 4, 5]
-        normalized_y = audio_analysis.normalize_signal(y, 10)
-        expected = numpy.array([0.1, 0.2, 0.3, 0.4, 0.5])
-        for i in range(len(y)):
-            self.assertEqual(expected[i], normalized_y[i])
-
-
-class AnomalyTest(unittest.TestCase):
-    def setUp(self):
-        """Creates a test signal of sine wave."""
-        # Use the same seed for each test case.
-        numpy.random.seed(0)
-
-        self.block_size = 120
-        self.rate = 48000
-        self.freq = 440
-        length_in_secs = 0.25
-        self.samples = length_in_secs * self.rate
-        x = numpy.linspace(0.0, (self.samples - 1) * 1.0 / self.rate,
-                           self.samples)
-        self.y = numpy.sin(self.freq * 2.0 * numpy.pi * x)
-
-    def add_noise(self):
-        """Add noise to the test signal."""
-        noise_amplitude = 0.3
-        noise = numpy.random.standard_normal(len(self.y)) * noise_amplitude
-        self.y = self.y + noise
-
-    def insert_anomaly(self):
-        """Inserts an anomaly to the test signal.
-
-        The anomaly self.anomaly_samples should be created before calling this
-        method.
-
-        """
-        self.anomaly_start_secs = 0.1
-        self.y = numpy.insert(self.y,
-                              int(self.anomaly_start_secs * self.rate),
-                              self.anomaly_samples)
-
-    def generate_skip_anomaly(self):
-        """Skips a section of test signal."""
-        self.anomaly_start_secs = 0.1
-        self.anomaly_duration_secs = 0.005
-        anomaly_append_secs = self.anomaly_start_secs + self.anomaly_duration_secs
-        anomaly_start_index = self.anomaly_start_secs * self.rate
-        anomaly_append_index = anomaly_append_secs * self.rate
-        self.y = numpy.append(self.y[:int(anomaly_start_index)],
-                              self.y[int(anomaly_append_index):])
-
-    def create_constant_anomaly(self, amplitude):
-        """Creates an anomaly of constant samples.
-
-        Args:
-            amplitude: The amplitude of the constant samples.
-
-        """
-        self.anomaly_duration_secs = 0.005
-        self.anomaly_samples = ([amplitude] *
-                                int(self.anomaly_duration_secs * self.rate))
-
-    def run_analysis(self):
-        """Runs the anomaly detection."""
-        self.results = audio_analysis.anomaly_detection(
-            self.y, self.rate, self.freq, self.block_size)
-        logging.debug('Results: %s', self.results)
-
-    def check_no_anomaly(self):
-        """Verifies that there is no anomaly in detection result."""
-        self.run_analysis()
-        self.assertFalse(self.results)
-
-    def check_anomaly(self):
-        """Verifies that there is anomaly in detection result.
-
-        The detection result should contain anomaly time stamps that are
-        close to where anomaly was inserted. There can be multiple anomalies
-        since the detection depends on the block size.
-
-        """
-        self.run_analysis()
-        self.assertTrue(self.results)
-        # Anomaly can be detected as long as the detection window of block size
-        # overlaps with anomaly.
-        expected_detected_range_secs = (
-            self.anomaly_start_secs - float(self.block_size) / self.rate,
-            self.anomaly_start_secs + self.anomaly_duration_secs)
-        for detected_secs in self.results:
-            self.assertTrue(detected_secs <= expected_detected_range_secs[1])
-            self.assertTrue(detected_secs >= expected_detected_range_secs[0])
-
-    def test_good_signal(self):
-        """Sine wave signal with no noise or anomaly."""
-        self.check_no_anomaly()
-
-    def test_good_signal_noise(self):
-        """Sine wave signal with noise."""
-        self.add_noise()
-        self.check_no_anomaly()
-
-    def test_zero_anomaly(self):
-        """Sine wave signal with no noise but with anomaly.
-
-        This test case simulates underrun in digital data where there will be
-        one block of samples with 0 amplitude.
-
-        """
-        self.create_constant_anomaly(0)
-        self.insert_anomaly()
-        self.check_anomaly()
-
-    def test_zero_anomaly_noise(self):
-        """Sine wave signal with noise and anomaly.
-
-        This test case simulates underrun in analog data where there will be
-        one block of samples with amplitudes close to 0.
-
-        """
-        self.create_constant_anomaly(0)
-        self.insert_anomaly()
-        self.add_noise()
-        self.check_anomaly()
-
-    def test_low_constant_anomaly(self):
-        """Sine wave signal with low constant anomaly.
-
-        The anomaly is one block of constant values.
-
-        """
-        self.create_constant_anomaly(0.05)
-        self.insert_anomaly()
-        self.check_anomaly()
-
-    def test_low_constant_anomaly_noise(self):
-        """Sine wave signal with low constant anomaly and noise.
-
-        The anomaly is one block of constant values.
-
-        """
-        self.create_constant_anomaly(0.05)
-        self.insert_anomaly()
-        self.add_noise()
-        self.check_anomaly()
-
-    def test_high_constant_anomaly(self):
-        """Sine wave signal with high constant anomaly.
-
-        The anomaly is one block of constant values.
-
-        """
-        self.create_constant_anomaly(2)
-        self.insert_anomaly()
-        self.check_anomaly()
-
-    def test_high_constant_anomaly_noise(self):
-        """Sine wave signal with high constant anomaly and noise.
-
-        The anomaly is one block of constant values.
-
-        """
-        self.create_constant_anomaly(2)
-        self.insert_anomaly()
-        self.add_noise()
-        self.check_anomaly()
-
-    def test_skipped_anomaly(self):
-        """Sine wave signal with skipped anomaly.
-
-        The anomaly simulates the symptom where a block is skipped.
-
-        """
-        self.generate_skip_anomaly()
-        self.check_anomaly()
-
-    def test_skipped_anomaly_noise(self):
-        """Sine wave signal with skipped anomaly with noise.
-
-        The anomaly simulates the symptom where a block is skipped.
-
-        """
-        self.generate_skip_anomaly()
-        self.add_noise()
-        self.check_anomaly()
-
-    def test_empty_data(self):
-        """Checks that anomaly detection rejects empty data."""
-        self.y = []
-        with self.assertRaises(audio_analysis.EmptyDataError):
-            self.check_anomaly()
-
-
-if __name__ == '__main__':
-    logging.basicConfig(
-        level=logging.DEBUG,
-        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/audio_quality_measurement_integrationtest.py b/src/antlion/test_utils_tests/audio_quality_measurement_integrationtest.py
deleted file mode 100644
index 824bd60..0000000
--- a/src/antlion/test_utils_tests/audio_quality_measurement_integrationtest.py
+++ /dev/null
@@ -1,267 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Note: This test has been labelled as an integration test due to its use of
-# real data, and the 12+ second execution time. It also generates sine waves
-# during the test, rather than using data that has been pre-calculated.
-
-import math
-import numpy
-import unittest
-
-import antlion.test_utils.audio_analysis_lib.audio_quality_measurement as audio_quality_measurement
-
-
-class NoiseLevelTest(unittest.TestCase):
-    def setUp(self):
-        """Uses the same seed to generate noise for each test."""
-        numpy.random.seed(0)
-
-    def test_noise_level(self):
-        # Generates the standard sin wave with standard_noise portion of noise.
-        rate = 48000
-        length_in_secs = 2
-        frequency = 440
-        amplitude = 1
-        standard_noise = 0.05
-
-        wave = []
-        for index in range(0, rate * length_in_secs):
-            phase = 2.0 * math.pi * frequency * float(index) / float(rate)
-            sine_wave = math.sin(phase)
-            noise = standard_noise * numpy.random.standard_normal()
-            wave.append(float(amplitude) * (sine_wave + noise))
-
-        # Calculates the average value after applying teager operator.
-        teager_value_of_wave, length = 0, len(wave)
-        for i in range(1, length - 1):
-            ith_teager_value = abs(wave[i] * wave[i] - wave[i - 1] * wave[i +
-                                                                          1])
-            ith_teager_value *= max(1, abs(wave[i]))
-            teager_value_of_wave += ith_teager_value
-        teager_value_of_wave /= float(length * (amplitude**2))
-
-        noise = audio_quality_measurement.noise_level(
-            amplitude, frequency, rate, teager_value_of_wave)
-
-        self.assertTrue(abs(noise - standard_noise) < 0.01)
-
-
-class ErrorTest(unittest.TestCase):
-    def test_error(self):
-        value1 = [0.2, 0.4, 0.1, 0.01, 0.01, 0.01]
-        value2 = [0.3, 0.3, 0.08, 0.0095, 0.0098, 0.0099]
-        error = [0.5, 0.25, 0.2, 0.05, 0.02, 0.01]
-        for i in range(len(value1)):
-            ret = audio_quality_measurement.error(value1[i], value2[i])
-            self.assertTrue(abs(ret - error[i]) < 0.001)
-
-
-class QualityMeasurementTest(unittest.TestCase):
-    def setUp(self):
-        """Creates a test signal of sine wave."""
-        numpy.random.seed(0)
-
-        self.rate = 48000
-        self.freq = 440
-        self.amplitude = 1
-        length_in_secs = 2
-        self.samples = length_in_secs * self.rate
-        self.y = []
-        for index in range(self.samples):
-            phase = 2.0 * math.pi * self.freq * float(index) / float(self.rate)
-            sine_wave = math.sin(phase)
-            self.y.append(float(self.amplitude) * sine_wave)
-
-    def add_noise(self):
-        """Adds noise to the test signal."""
-        noise_amplitude = 0.01 * self.amplitude
-        for index in range(self.samples):
-            noise = noise_amplitude * numpy.random.standard_normal()
-            self.y[index] += noise
-
-    def generate_delay(self):
-        """Generates some delays during playing."""
-        self.delay_start_time = [0.200, 0.375, 0.513, 0.814, 1.000, 1.300]
-        self.delay_end_time = [0.201, 0.377, 0.516, 0.824, 1.100, 1.600]
-
-        for i in range(len(self.delay_start_time)):
-            start_index = int(self.delay_start_time[i] * self.rate)
-            end_index = int(self.delay_end_time[i] * self.rate)
-            for j in range(start_index, end_index):
-                self.y[j] = 0
-
-    def generate_artifacts_before_playback(self):
-        """Generates artifacts before playing."""
-        silence_before_playback_end_time = 0.2
-        end_index = int(silence_before_playback_end_time * self.rate)
-        for i in range(0, end_index):
-            self.y[i] = 0
-        noise_start_index = int(0.1 * self.rate)
-        noise_end_index = int(0.1005 * self.rate)
-        for i in range(noise_start_index, noise_end_index):
-            self.y[i] = 3 * self.amplitude
-
-    def generate_artifacts_after_playback(self):
-        """Generates artifacts after playing."""
-        silence_after_playback_start_time = int(1.9 * self.rate)
-        noise_start_index = int(1.95 * self.rate)
-        noise_end_index = int((1.95 + 0.02) * self.rate)
-
-        for i in range(silence_after_playback_start_time, self.samples):
-            self.y[i] = 0
-        for i in range(noise_start_index, noise_end_index):
-            self.y[i] = self.amplitude
-
-    def generate_burst_during_playback(self):
-        """Generates bursts during playing."""
-        self.burst_start_time = [0.300, 0.475, 0.613, 0.814, 1.300]
-        self.burst_end_time = [0.301, 0.476, 0.614, 0.815, 1.301]
-
-        for i in range(len(self.burst_start_time)):
-            start_index = int(self.burst_start_time[i] * self.rate)
-            end_index = int(self.burst_end_time[i] * self.rate)
-            for j in range(start_index, end_index):
-                self.y[j] = self.amplitude * (3 + numpy.random.uniform(-1, 1))
-
-    def generate_volume_changing(self):
-        """Generates volume changing during playing."""
-        start_time = [0.300, 1.400]
-        end_time = [0.600, 1.700]
-        for i in range(len(start_time)):
-            start_index = int(start_time[i] * self.rate)
-            end_index = int(end_time[i] * self.rate)
-            for j in range(start_index, end_index):
-                self.y[j] *= 1.4
-        self.volume_changing = [+1, -1, +1, -1]
-        self.volume_changing_time = [0.3, 0.6, 1.4, 1.7]
-
-    def test_good_signal(self):
-        """Sine wave signal with no noise or artifacts."""
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(len(result['volume_changes']) == 0)
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-
-    def test_good_signal_with_noise(self):
-        """Sine wave signal with noise."""
-        self.add_noise()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(len(result['volume_changes']) == 0)
-        self.assertTrue(0.009 < result['equivalent_noise_level'] < 0.011)
-
-    def test_delay(self):
-        """Sine wave with delay during playing."""
-        self.generate_delay()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(
-            len(result['volume_changes']) == 2 * len(self.delay_start_time))
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-
-        self.assertTrue(
-            len(result['artifacts']['delay_during_playback']) ==
-            len(self.delay_start_time))
-        for i in range(len(result['artifacts']['delay_during_playback'])):
-            delta = abs(result['artifacts']['delay_during_playback'][i][0] -
-                        self.delay_start_time[i])
-            self.assertTrue(delta < 0.001)
-            duration = self.delay_end_time[i] - self.delay_start_time[i]
-            delta = abs(result['artifacts']['delay_during_playback'][i][1] -
-                        duration)
-            self.assertTrue(delta < 0.001)
-
-    def test_artifacts_before_playback(self):
-        """Sine wave with artifacts before playback."""
-        self.generate_artifacts_before_playback()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 1)
-        delta = abs(result['artifacts']['noise_before_playback'][0][0] - 0.1)
-        self.assertTrue(delta < 0.01)
-        delta = abs(result['artifacts']['noise_before_playback'][0][1] - 0.005)
-        self.assertTrue(delta < 0.004)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(len(result['volume_changes']) == 0)
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-
-    def test_artifacts_after_playback(self):
-        """Sine wave with artifacts after playback."""
-        self.generate_artifacts_after_playback()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 1)
-        delta = abs(result['artifacts']['noise_after_playback'][0][0] - 1.95)
-        self.assertTrue(delta < 0.01)
-        delta = abs(result['artifacts']['noise_after_playback'][0][1] - 0.02)
-        self.assertTrue(delta < 0.001)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(len(result['volume_changes']) == 0)
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-
-    def test_burst_during_playback(self):
-        """Sine wave with burst during playback."""
-        self.generate_burst_during_playback()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 5)
-        self.assertTrue(len(result['volume_changes']) == 10)
-        self.assertTrue(result['equivalent_noise_level'] > 0.02)
-        for i in range(len(result['artifacts']['burst_during_playback'])):
-            delta = abs(self.burst_start_time[i] - result['artifacts'][
-                'burst_during_playback'][i])
-            self.assertTrue(delta < 0.002)
-
-    def test_volume_changing(self):
-        """Sine wave with volume changing during playback."""
-        self.generate_volume_changing()
-        result = audio_quality_measurement.quality_measurement(self.y,
-                                                               self.rate)
-        self.assertTrue(len(result['artifacts']['noise_before_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['noise_after_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['delay_during_playback']) == 0)
-        self.assertTrue(len(result['artifacts']['burst_during_playback']) == 0)
-        self.assertTrue(result['equivalent_noise_level'] < 0.005)
-        self.assertTrue(
-            len(result['volume_changes']) == len(self.volume_changing))
-        for i in range(len(self.volume_changing)):
-            self.assertTrue(
-                abs(self.volume_changing_time[i] - result['volume_changes'][i][
-                    0]) < 0.01)
-            self.assertTrue(
-                self.volume_changing[i] == result['volume_changes'][i][1])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/instrumentation/__init__.py b/src/antlion/test_utils_tests/instrumentation/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/instrumentation/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/instrumentation/device/__init__.py b/src/antlion/test_utils_tests/instrumentation/device/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/instrumentation/device/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/instrumentation/device/command/__init__.py b/src/antlion/test_utils_tests/instrumentation/device/command/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/instrumentation/device/command/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/instrumentation/device/command/instrumentation_command_builder_test.py b/src/antlion/test_utils_tests/instrumentation/device/command/instrumentation_command_builder_test.py
deleted file mode 100755
index 6afae91..0000000
--- a/src/antlion/test_utils_tests/instrumentation/device/command/instrumentation_command_builder_test.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.test_utils.instrumentation.device.command.instrumentation_command_builder import InstrumentationCommandBuilder
-from antlion.test_utils.instrumentation.device.command.instrumentation_command_builder import InstrumentationTestCommandBuilder
-
-
-class InstrumentationCommandBuilderTest(unittest.TestCase):
-
-    def test__runner_and_manifest_package_definition(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_manifest_package('package')
-        builder.set_runner('runner')
-        call = builder.build()
-        self.assertIn('package/runner', call)
-
-    def test__manifest_package_must_be_defined(self):
-        builder = InstrumentationCommandBuilder()
-
-        with self.assertRaisesRegex(Exception, '.*package cannot be none.*'):
-            builder.build()
-
-    def test__runner_must_be_defined(self):
-        builder = InstrumentationCommandBuilder()
-
-        with self.assertRaisesRegex(Exception, '.*runner cannot be none.*'):
-            builder.build()
-
-    def test__output_as_proto(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.set_proto_path()
-
-        call = builder.build()
-        self.assertIn('-f', call)
-
-    def test__proto_flag_with_set_proto_path(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.set_proto_path('/some/proto/path')
-
-        call = builder.build()
-        self.assertIn('-f', call)
-        self.assertIn('/some/proto/path', call)
-
-    def test__set_output_as_text_clears_proto_options(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.set_proto_path('/some/proto/path')
-        builder.set_output_as_text()
-
-        call = builder.build()
-        self.assertNotIn('-f', call)
-        self.assertNotIn('/some/proto/path', call)
-
-    def test__set_nohup(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.set_nohup()
-
-        call = builder.build()
-        self.assertEqual(
-            call, 'nohup am instrument some.manifest.package/runner >> '
-                  '$EXTERNAL_STORAGE/instrumentation_output.txt 2>&1')
-
-    def test__key_value_param_definition(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-
-        builder.add_key_value_param('my_key_1', 'my_value_1')
-        builder.add_key_value_param('my_key_2', 'my_value_2')
-
-        call = builder.build()
-        self.assertIn('-e my_key_1 my_value_1', call)
-        self.assertIn('-e my_key_2 my_value_2', call)
-
-    def test__flags(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-
-        builder.add_flag('--flag1')
-        builder.add_flag('--flag2')
-
-        call = builder.build()
-        self.assertIn('--flag1', call)
-        self.assertIn('--flag2', call)
-
-    def test__remove_flags(self):
-        builder = InstrumentationCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-
-        builder.add_flag('--flag1')
-        builder.add_flag('--flag2')
-        builder.remove_flag('--flag1')
-
-        call = builder.build()
-        self.assertNotIn('--flag1', call)
-        self.assertIn('--flag2', call)
-
-
-class InstrumentationTestCommandBuilderTest(unittest.TestCase):
-    """Test class for
-    acts_contrib/test_utils/instrumentation/instrumentation_call_builder.py
-    """
-
-    def test__test_packages_can_not_be_added_if_classes_were_added_first(self):
-        builder = InstrumentationTestCommandBuilder()
-        builder.add_test_class('some.tests.Class')
-
-        with self.assertRaisesRegex(Exception, '.*only a list of classes.*'):
-            builder.add_test_package('some.tests.package')
-
-    def test__test_classes_can_not_be_added_if_packages_were_added_first(self):
-        builder = InstrumentationTestCommandBuilder()
-        builder.add_test_package('some.tests.package')
-
-        with self.assertRaisesRegex(Exception, '.*only a list of classes.*'):
-            builder.add_test_class('some.tests.Class')
-
-    def test__test_classes_and_test_methods_can_be_combined(self):
-        builder = InstrumentationTestCommandBuilder()
-        builder.set_runner('runner')
-        builder.set_manifest_package('some.manifest.package')
-        builder.add_test_class('some.tests.Class1')
-        builder.add_test_method('some.tests.Class2', 'favoriteTestMethod')
-
-        call = builder.build()
-        self.assertIn('some.tests.Class1', call)
-        self.assertIn('some.tests.Class2', call)
-        self.assertIn('favoriteTestMethod', call)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/__init__.py b/src/antlion/test_utils_tests/power/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/power/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/power/tel/__init__.py b/src/antlion/test_utils_tests/power/tel/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/test_utils_tests/power/tel/__init__.py
+++ /dev/null
diff --git a/src/antlion/test_utils_tests/power/tel/lab/consume_parameter_test.py b/src/antlion/test_utils_tests/power/tel/lab/consume_parameter_test.py
deleted file mode 100644
index ca26e53..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/consume_parameter_test.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-import mobly.config_parser as mobly_config_parser
-
-from unittest import mock
-
-
-class ConsumeParameterTest(unittest.TestCase):
-    """ Unit tests for testing the consumption of test name parameters
-      for instances of PowerCellularLabBaseTest
-    """
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-    def test_consume_parameter_typical_case(self):
-        """ Tests the typical case: The parameter is available
-            for consumption and it has enough values
-        """
-        parameters = ['param1', 1, 2, 'param2', 3, 'param3', 'value']
-        expected = ['param2', 3]
-        self.test.unpack_userparams(parameters=parameters)
-        try:
-            result = self.test.consume_parameter('param2', 1)
-            self.assertTrue(
-                result == expected,
-                'Consume parameter did not return the expected result')
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_consume_parameter_returns_empty_when_parameter_unavailabe(self):
-        """ Tests the case where the requested parameter is unavailable
-            for consumption. In this case, a ValueError should be raised
-        """
-        parameters = ['param1', 1, 2]
-        expected = []
-        self.test.unpack_userparams(parameters=parameters)
-        try:
-            result = self.test.consume_parameter('param2', 1)
-            self.assertTrue(
-                result == expected,
-                'Consume parameter should return empty list for an invalid key'
-            )
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_consume_parameter_throws_when_requesting_too_many_parameters(
-            self):
-        """ Tests the case where the requested parameter is available
-            for consumption, but too many values are requested
-        """
-        parameters = ['param1', 1, 2]
-        self.test.unpack_userparams(parameters=parameters)
-        with self.assertRaises(ValueError):
-            self.test.consume_parameter('param1', 3)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/ensure_valid_calibration_table_test.py b/src/antlion/test_utils_tests/power/tel/lab/ensure_valid_calibration_table_test.py
deleted file mode 100644
index 1aa9361..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/ensure_valid_calibration_table_test.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-import mobly.config_parser as mobly_config_parser
-
-
-class EnsureValidCalibrationTableTest(unittest.TestCase):
-    """ Unit tests for exercising the logic of ensure_valid_calibration_table
-        for instances of PowerCellularLabBaseTest
-    """
-
-    VALID_CALIBRATION_TABLE = {'1': {'2': {'3': 123, '4': 3.14}}, '2': 45.67}
-
-    INVALID_CALIBRATION_TABLE = invalid = {'1': {'a': 'invalid'}, '2': 1234}
-
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-
-    def _assert_no_exception(self, func, *args, **kwargs):
-        try:
-            func(*args, **kwargs)
-        except Exception as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def _assert_calibration_table_passes(self, table):
-        self._assert_no_exception(self.test.ensure_valid_calibration_table, table)
-
-    def _assert_calibration_table_fails(self, table):
-        with self.assertRaises(TypeError):
-            self.test.ensure_valid_calibration_table(table)
-
-    def test_ensure_valid_calibration_table_passes_with_empty_table(self):
-        """ Ensure that empty calibration tables are invalid """
-        self._assert_calibration_table_passes({})
-
-    def test_ensure_valid_calibration_table_passes_with_valid_table(self):
-        """ Ensure that valid calibration tables throw no error """
-        self._assert_calibration_table_passes(self.VALID_CALIBRATION_TABLE)
-
-    def test_ensure_valid_calibration_table_fails_with_invalid_data(self):
-        """ Ensure that calibration tables with invalid entries throw an error """
-        self._assert_calibration_table_fails(self.INVALID_CALIBRATION_TABLE)
-
-    def test_ensure_valid_calibration_table_fails_with_none(self):
-        """ Ensure an exception is thrown if no calibration table is given """
-        self._assert_calibration_table_fails(None)
-
-    def test_ensure_valid_calibration_table_fails_with_invalid_type(self):
-        """ Ensure an exception is thrown if no calibration table is given """
-        self._assert_calibration_table_fails([])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/init_simulation_test.py b/src/antlion/test_utils_tests/power/tel/lab/init_simulation_test.py
deleted file mode 100644
index c7c4285..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/init_simulation_test.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-import mobly.config_parser as mobly_config_parser
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-from antlion.controllers.cellular_lib.UmtsSimulation import UmtsSimulation
-
-
-class InitSimulationTest(unittest.TestCase):
-    """ Unit tests for ensuring the simulation is correctly
-        initialized for instances of PowerCellularLabBaseTest
-    """
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-    def test_init_simulation_reuses_simulation_if_same_type(self):
-        """ Ensure that a new simulation is not instantiated if
-            the type is the same as the last simulation
-        """
-        mock_lte_sim = mock.Mock(spec=LteSimulation)
-        self.test.unpack_userparams(simulation=mock_lte_sim)
-        try:
-            self.test.init_simulation(self.PCBT.PARAM_SIM_TYPE_LTE)
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-        self.assertTrue(self.test.simulation is mock_lte_sim,
-                        'A new simulation was instantiated')
-
-    def test_init_simulation_does_not_reuse_simulation_if_different_type(self):
-        """ Ensure that a new simulation is instantiated if
-            the type is different from the last simulation
-        """
-        self.test.unpack_userparams(simulation=mock.Mock(spec=LteSimulation),
-                               test_params=mock.Mock())
-        try:
-            with mock.patch.object(UmtsSimulation,
-                                   '__init__',
-                                   return_value=None) as mock_init:
-                self.test.init_simulation(self.PCBT.PARAM_SIM_TYPE_UMTS)
-        except Exception as e:
-            self.fail('Error thrown: {}'.format(e))
-        self.assertTrue(mock_init.called,
-                        'A new simulation was not instantiated')
-
-    def test_init_simulation_throws_error_with_invalid_simulation_type(self):
-        """ Ensure that a new simulation is not instantiated if
-            the type is invalid
-        """
-        self.test.unpack_userparams(simulation=mock.Mock(spec=LteSimulation))
-        with self.assertRaises(ValueError):
-            self.test.init_simulation('Invalid simulation type')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/initialize_simulator_test.py b/src/antlion/test_utils_tests/power/tel/lab/initialize_simulator_test.py
deleted file mode 100644
index cd23f4b..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/initialize_simulator_test.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-import mobly.config_parser as mobly_config_parser
-from antlion.controllers.anritsu_lib import md8475_cellular_simulator as anritsu
-from antlion.controllers.rohdeschwarz_lib import cmw500_cellular_simulator as cmw
-
-
-class InitializeSimulatorTest(unittest.TestCase):
-    """ Unit tests for ensuring the simulator is correctly
-        initialized for instances of PowerCellularLabBaseTest
-    """
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-    def test_initialize_simulator_md8475_A(self):
-        """ Ensure that an instance of MD8475CellularSimulator
-            is returned when requesting md8475_version A
-        """
-        self.test.unpack_userparams(md8475_version='A', md8475a_ip_address='12345')
-        try:
-            with mock.patch.object(anritsu.MD8475CellularSimulator,
-                                   '__init__',
-                                   return_value=None):
-                result = self.test.initialize_simulator()
-                self.assertTrue(
-                    isinstance(result, anritsu.MD8475CellularSimulator),
-                    'Incorrect simulator type returned for md8475_version A')
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_initialize_simulator_md8475_B(self):
-        """ Ensure that an instance of MD8475BCellularSimulator
-            is returned when requesting md8475_version B
-        """
-        self.test.unpack_userparams(md8475_version='B', md8475a_ip_address='12345')
-        try:
-            with mock.patch.object(anritsu.MD8475BCellularSimulator,
-                                   '__init__',
-                                   return_value=None):
-                result = self.test.initialize_simulator()
-                self.assertTrue(
-                    isinstance(result, anritsu.MD8475BCellularSimulator),
-                    'Incorrect simulator type returned for md8475_version B')
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_initialize_simulator_cmw500(self):
-        """ Ensure that an instance of CMW500CellularSimulator
-            is returned when requesting cmw500
-        """
-        self.test.unpack_userparams(md8475_version=None,
-                               md8475a_ip_address=None,
-                               cmw500_ip='12345',
-                               cmw500_port='12345')
-        try:
-            with mock.patch.object(cmw.CMW500CellularSimulator,
-                                   '__init__',
-                                   return_value=None):
-                result = self.test.initialize_simulator()
-                self.assertTrue(
-                    isinstance(result, cmw.CMW500CellularSimulator),
-                    'Incorrect simulator type returned for cmw500')
-        except ValueError as e:
-            self.fail('Error thrown: {}'.format(e))
-
-    def test_initialize_simulator_throws_with_missing_configs(self):
-        """ Ensure that an error is raised when initialize_simulator
-            is called with missing configs
-        """
-        self.test.unpack_userparams(md8475_version=None,
-                               md8475a_ip_address=None,
-                               cmw500_ip='12345',
-                               cmw500_port=None)
-        with self.assertRaises(RuntimeError), mock.patch.object(
-                cmw.CMW500CellularSimulator, '__init__', return_value=None):
-            self.test.initialize_simulator()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/power_tel_traffic_e2e_test.py b/src/antlion/test_utils_tests/power/tel/lab/power_tel_traffic_e2e_test.py
deleted file mode 100644
index be71fee..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/power_tel_traffic_e2e_test.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-import antlion.test_utils.power.cellular.cellular_traffic_power_test as ctpt
-import mobly.config_parser as mobly_config_parser
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-from antlion.controllers.rohdeschwarz_lib import cmw500_cellular_simulator as cmw
-
-magic_patch = lambda patched: mock.patch(patched, mock.MagicMock())
-
-
-class PowerTelTrafficE2eTest(unittest.TestCase):
-    """ E2E sanity test for the power cellular traffic tests """
-    @classmethod
-    def setUpClass(cls):
-        cls.PTTT = ctpt.PowerTelTrafficTest
-        cls.PTTT.log = mock.Mock()
-        cls.PTTT.log_path = ''
-
-    @magic_patch('json.load')
-    @magic_patch('builtins.open')
-    @magic_patch('os.chmod')
-    @magic_patch('os.system')
-    @magic_patch('time.sleep')
-    @magic_patch(
-        'acts_contrib.test_utils.power.cellular.cellular_power_base_test.telutils')
-    @magic_patch('acts_contrib.test_utils.power.PowerBaseTest.wutils')
-    @magic_patch(
-        'acts.metrics.loggers.blackbox.BlackboxMetricLogger.for_test_case')
-    @magic_patch(
-        'acts_contrib.test_utils.power.loggers.power_metric_logger.PowerMetricLogger.for_test_case'
-    )
-    def test_e2e(self, *args):
-
-        # Configure the test
-        test_to_mock = 'test_lte_traffic_direction_dlul_blimit_0_0'
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PTTT(test_run_config)
-        mock_android = mock.Mock()
-        mock_android.model = 'coral'
-        test.unpack_userparams(
-            android_devices=[mock_android],
-            monsoons=[mock.Mock()],
-            iperf_servers=[mock.Mock(), mock.Mock()],
-            packet_senders=[mock.Mock(), mock.Mock()],
-            custom_files=[
-                'pass_fail_threshold_coral.json', 'rockbottom_coral.sh'
-            ],
-            simulation=mock.Mock(spec=LteSimulation),
-            mon_freq=5000,
-            mon_duration=0,
-            mon_offset=0,
-            current_test_name=test_to_mock,
-            test_name=test_to_mock,
-            test_result=mock.Mock(),
-            bug_report={},
-            dut_rockbottom=mock.Mock(),
-            start_tel_traffic=mock.Mock(),
-            init_simulation=mock.Mock(),
-            initialize_simulator=mock.Mock(return_value=mock.Mock(
-                spec=cmw.CMW500CellularSimulator)),
-            collect_power_data=mock.Mock(),
-            get_iperf_results=mock.Mock(return_value={
-                'ul': 0,
-                'dl': 0
-            }),
-            pass_fail_check=mock.Mock())
-
-        # Emulate lifecycle
-        test.setup_class()
-        test.setup_test()
-        test.power_tel_traffic_test()
-        test.teardown_test()
-        test.teardown_class()
-
-        self.assertTrue(test.start_tel_traffic.called,
-                        'Start traffic was not called')
-        self.assertTrue(test.init_simulation.called,
-                        'Simulation was not initialized')
-        self.assertTrue(test.initialize_simulator.called,
-                        'Simulator was not initialized')
-        self.assertTrue(test.collect_power_data.called,
-                        'Power data was not collected')
-        self.assertTrue(test.get_iperf_results.called,
-                        'Did not get iperf results')
-        self.assertTrue(test.pass_fail_check.called,
-                        'Pass/Fail check was not performed')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/test_utils_tests/power/tel/lab/save_summary_to_file_test.py b/src/antlion/test_utils_tests/power/tel/lab/save_summary_to_file_test.py
deleted file mode 100644
index 556cfdb..0000000
--- a/src/antlion/test_utils_tests/power/tel/lab/save_summary_to_file_test.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-from unittest.mock import mock_open
-
-import mobly.config_parser as mobly_config_parser
-from antlion.controllers.cellular_lib.LteSimulation import LteSimulation
-
-
-class SaveSummaryToFileTest(unittest.TestCase):
-    """ Unit tests for testing the save summary functionality for
-        instances of PowerCellularLabBaseTest
-    """
-
-    @classmethod
-    def setUpClass(self):
-        from antlion.test_utils.power.cellular.cellular_power_base_test import PowerCellularLabBaseTest as PCBT
-        self.PCBT = PCBT
-        PCBT.log = mock.Mock()
-        PCBT.log_path = ''
-
-    def setUp(self):
-        self.tb_key = 'testbed_configs'
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = 'MockTestBed'
-        test_run_config.log_path = '/tmp'
-        test_run_config.summary_writer = mock.MagicMock()
-        test = self.PCBT(test_run_config)
-        self.test = test
-
-    def test_save_summary_to_file(self):
-        """ Ensure that a new file is written when saving
-            the test summary
-        """
-        self.test.unpack_userparams(simulation=mock.Mock(spec=LteSimulation))
-        m = mock_open()
-        with mock.patch('builtins.open', m, create=False):
-            self.test.save_summary_to_file()
-        self.assertTrue(m.called, 'Test summary was not written to output')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/tests/bt/BleFuchsiaAndroidTest.py b/src/antlion/tests/bt/BleFuchsiaAndroidTest.py
deleted file mode 100644
index 982c3ac..0000000
--- a/src/antlion/tests/bt/BleFuchsiaAndroidTest.py
+++ /dev/null
@@ -1,164 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This script shows simple examples of how to get started with bluetooth
-   low energy testing in acts.
-"""
-
-import pprint
-
-from antlion.controllers import android_device
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-from antlion.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
-from antlion.test_utils.bt.bt_constants import ble_advertise_settings_modes
-from antlion.test_utils.bt.bt_constants import adv_succ
-from antlion.test_utils.bt.bt_constants import ble_scan_settings_modes
-from antlion.test_utils.bt.bt_constants import scan_result
-from antlion.test_utils.bt.bt_test_utils import cleanup_scanners_and_advertisers
-from antlion.test_utils.bt.bt_test_utils import reset_bluetooth
-
-
-class BleFuchsiaAndroidTest(BluetoothBaseTest):
-    default_timeout = 10
-    active_adv_callback_list = []
-    droid = None
-
-    def setup_class(self):
-        super().setup_class()
-
-        # Android device under test
-        self.ad = self.android_devices[0]
-        # Fuchsia device under test
-        self.fd = self.fuchsia_devices[0]
-        self.log.info("There are: {} fuchsia and {} android devices.".format(
-            len(self.fuchsia_devices), len(self.android_devices)))
-
-    def _start_generic_advertisement_include_device_name(self):
-        self.ad.droid.bleSetAdvertiseDataIncludeDeviceName(True)
-        self.ad.droid.bleSetAdvertiseSettingsAdvertiseMode(
-            ble_advertise_settings_modes['low_latency'])
-        advertise_data = self.ad.droid.bleBuildAdvertiseData()
-        advertise_settings = self.ad.droid.bleBuildAdvertiseSettings()
-        advertise_callback = self.ad.droid.bleGenBleAdvertiseCallback()
-        self.ad.droid.bleStartBleAdvertising(advertise_callback,
-                                             advertise_data,
-                                             advertise_settings)
-        self.ad.ed.pop_event(adv_succ.format(advertise_callback),
-                             self.default_timeout)
-        self.active_adv_callback_list.append(advertise_callback)
-        return advertise_callback
-
-    # Basic test for android device as advertiser and fuchsia device as scanner
-    # Returns True if scan result has an entry corresponding to sample_android_name
-    @BluetoothBaseTest.bt_test_wrap
-    def test_fuchsia_scan_android_adv(self):
-        sample_android_name = "Pixel1234"
-        self.ad.droid.bluetoothSetLocalName(sample_android_name)
-        adv_callback = self._start_generic_advertisement_include_device_name()
-        droid_name = self.ad.droid.bluetoothGetLocalName()
-        self.log.info("Android device name: {}".format(droid_name))
-        res = True
-        if not le_scan_for_device_by_name(
-                self.fd, self.log, sample_android_name, self.default_timeout):
-            res = False
-
-        #Stop android advertising
-        self.ad.droid.bleStopBleAdvertising(adv_callback)
-
-        return res
-
-    # Test for fuchsia device attempting to connect to android device (peripheral)
-    # Also tests the list_services and discconect to a peripheral
-    @BluetoothBaseTest.bt_test_wrap
-    def test_fuchsia_connect_android_periph(self):
-        sample_android_name = "Pixel1234"
-        self.ad.droid.bluetoothStartPairingHelper()
-        self.ad.droid.bluetoothSetLocalName(sample_android_name)
-        adv_callback = self._start_generic_advertisement_include_device_name()
-        droid_name = self.ad.droid.bluetoothGetLocalName()
-        self.log.info("Android device name: {}".format(droid_name))
-
-        scan_result = le_scan_for_device_by_name(self.fd, self.log,
-                                                 sample_android_name,
-                                                 self.default_timeout)
-        if not scan_result:
-            return False
-
-        name, did, connectable = scan_result["name"], scan_result[
-            "id"], scan_result["connectable"]
-
-        connect = self.fd.sl4f.gattc_lib.bleConnectToPeripheral(did)
-        self.log.info("Connecting returned status: {}".format(connect))
-
-        services = self.fd.sl4f.gattc_lib.listServices(did)
-        self.log.info("Listing services returned: {}".format(services))
-
-        dconnect = self.fd.sl4f.gattc_lib.bleDisconnectPeripheral(did)
-        self.log.info("Disconnect status: {}".format(dconnect))
-
-        #Print clients to validate results are saved
-        self.fd.sl4f.print_clients()
-
-        #Stop android advertising + cleanup sl4f
-        self.ad.droid.bleStopBleAdvertising(adv_callback)
-
-        return True
-
-    # Currently, this test doesn't work. The android device does not scan
-    # TODO(): Debug android scan
-    @BluetoothBaseTest.bt_test_wrap
-    def test_fuchsia_adv_android_scan(self):
-        #Initialize advertising on fuchsia device with name and interval
-        fuchsia_name = "testADV123"
-        adv_data = {
-            "name": fuchsia_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        interval = 1000
-
-        #Start advertising
-        self.fd.sl4f.ble_lib.bleStartBleAdvertising(adv_data, scan_response,
-                                                    interval, connectable)
-
-        # Initialize scan on android device which scan settings + callback
-        filter_list = self.ad.droid.bleGenFilterList()
-        self.ad.droid.bleSetScanFilterDeviceName(fuchsia_name)
-        self.ad.droid.bleSetScanSettingsScanMode(
-            ble_scan_settings_modes['low_latency'])
-        scan_settings = self.ad.droid.bleBuildScanSetting()
-        scan_callback = self.ad.droid.bleGenScanCallback()
-        self.ad.droid.bleBuildScanFilter(filter_list)
-        self.ad.droid.bleStartBleScan(filter_list, scan_settings,
-                                      scan_callback)
-        event_name = scan_result.format(scan_callback)
-        try:
-            event = self.ad.ed.pop_event(event_name, self.default_timeout)
-            self.log.info("Found scan result: {}".format(
-                pprint.pformat(event)))
-        except Exception:
-            self.log.error("Didn't find any scan results.")
-            return False
-        finally:
-            self.fd.sl4f.ble_lib.bleStopBleAdvertising()
-            self.ad.droid.bleStopBleScan(scan_callback)
-        # TODO(): Validate result
-        return True
diff --git a/src/antlion/tests/bt/BleFuchsiaTest.py b/src/antlion/tests/bt/BleFuchsiaTest.py
deleted file mode 100644
index 6730924..0000000
--- a/src/antlion/tests/bt/BleFuchsiaTest.py
+++ /dev/null
@@ -1,143 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This scrip tests various BLE apis for Fuchsia devices.
-"""
-
-import random
-
-from antlion.base_test import BaseTestClass
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-
-class BleFuchsiaTest(BaseTestClass):
-    default_timeout = 10
-    active_scan_callback_list = []
-    active_adv_callback_list = []
-    droid = None
-
-    def setup_class(self):
-        super().setup_class()
-
-        if (len(self.fuchsia_devices) < 2):
-            self.log.error("BleFuchsiaTest Init: Not enough fuchsia devices.")
-        self.log.info("Running testbed setup with two fuchsia devices")
-        self.fuchsia_adv = self.fuchsia_devices[0]
-        self.fuchsia_scan = self.fuchsia_devices[1]
-
-    def test_fuchsia_publish_service(self):
-        service_primary = True
-        # Random uuid
-        service_type = "0000180f-0000-1000-8000-00805fffffff"
-
-        # Generate a random key for sl4f storage of proxy key
-        service_proxy_key = "SProxy" + str(random.randint(0, 1000000))
-        res = self.fuchsia_adv.sl4f.ble_lib.blePublishService(
-            service_primary, service_type, service_proxy_key)
-        self.log.info("Publish result: {}".format(res))
-
-        return True
-
-    def test_fuchsia_scan_fuchsia_adv(self):
-        # Initialize advertising on fuchsia dveice with name and interval
-        fuchsia_name = "testADV1234"
-        adv_data = {
-            "name": fuchsia_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        interval = 1000
-        res = True
-
-        # Start advertising
-        self.fuchsia_adv.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, interval, connectable)
-        self.log.info("Fuchsia advertising name: {}".format(fuchsia_name))
-
-        # Start scan
-        scan_result = le_scan_for_device_by_name(self.fuchsia_scan, self.log,
-                                                 fuchsia_name,
-                                                 self.default_timeout)
-        if not scan_result:
-            res = False
-
-        # Stop advertising
-        self.fuchsia_adv.sl4f.ble_lib.bleStopBleAdvertising()
-
-        return res
-
-    def test_fuchsia_gatt_fuchsia_periph(self):
-        # Create random service with primary, and uuid
-        service_primary = True
-        # Random uuid
-        service_type = "0000180f-0000-1000-8000-00805fffffff"
-
-        # Generate a random key for sl4f storage of proxy key
-        service_proxy_key = "SProxy" + str(random.randint(0, 1000000))
-        res = self.fuchsia_adv.sl4f.ble_lib.blePublishService(
-            service_primary, service_type, service_proxy_key)
-        self.log.info("Publish result: {}".format(res))
-
-        # Initialize advertising on fuchsia dveice with name and interval
-        fuchsia_name = "testADV1234"
-        adv_data = {
-            "name": fuchsia_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        interval = 1000
-
-        # Start advertising
-        self.fuchsia_adv.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, interval, connectable)
-        self.log.info("Fuchsia advertising name: {}".format(fuchsia_name))
-
-        # Start Scan
-        scan_result = le_scan_for_device_by_name(self.fuchsia_scan, self.log,
-                                                 fuchsia_name,
-                                                 self.default_timeout)
-        if not scan_result:
-            self.fuchsia_adv.sl4f.ble_lib.bleStopBleAdvertising()
-            return False
-
-        name, did, connectable = scan_result["name"], scan_result[
-            "id"], scan_result["connectable"]
-
-        connect = self.fuchsia_scan.sl4f.gattc_lib.bleConnectToPeripheral(did)
-        self.log.info("Connecting returned status: {}".format(connect))
-
-        services = self.fuchsia_scan.sl4f.gattc_lib.listServices(did)
-        self.log.info("Listing services returned: {}".format(services))
-
-        dconnect = self.fuchsia_scan.sl4f.gattc_lib.bleDisconnectPeripheral(
-            did)
-        self.log.info("Disconnect status: {}".format(dconnect))
-
-        # Stop fuchsia advertising
-        self.fuchsia_adv.sl4f.ble_lib.bleStopBleAdvertising()
-
-        return True
diff --git a/src/antlion/tests/bt/BluetoothCmdLineTest.py b/src/antlion/tests/bt/BluetoothCmdLineTest.py
deleted file mode 100644
index 588d095..0000000
--- a/src/antlion/tests/bt/BluetoothCmdLineTest.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for initializing a cmd line tool for PTS and other purposes.
-Required custom config parameters:
-'target_mac_address': '00:00:00:00:00:00'
-
-"""
-from antlion.base_test import BaseTestClass
-from command_input import CommandInput
-
-
-class BluetoothCmdLineTest(BaseTestClass):
-    target_device_name = ""
-
-    def setup_class(self):
-        super().setup_class()
-        dut = self.user_params.get('dut', None)
-        if dut:
-            if dut == 'fuchsia_devices':
-                self.dut = self.fuchsia_devices[0]
-                self.dut.sl4f.bts_lib.initBluetoothSys()
-                self.dut.sl4f.sdp_lib.init()
-            elif dut == 'android_devices':
-                self.dut = self.android_devices[0]
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an Fuchsia device
-            self.dut = self.fuchsia_devices[0]
-        if not "target_device_name" in self.user_params.keys():
-            self.log.warning("Missing user config \"target_device_name\"!")
-            self.target_device_name = ""
-        else:
-            self.target_device_name = self.user_params["target_device_name"]
-
-    def test_cmd_line_helper(self):
-        cmd_line = CommandInput()
-        cmd_line.setup_vars(self.dut, self.target_device_name, self.log)
-        cmd_line.cmdloop()
-        return True
diff --git a/src/antlion/tests/bt/FuchsiaBtMacAddressTest.py b/src/antlion/tests/bt/FuchsiaBtMacAddressTest.py
deleted file mode 100644
index 67ee6dd..0000000
--- a/src/antlion/tests/bt/FuchsiaBtMacAddressTest.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is a test to verify two or more Fuchsia devices don't have the same mac
-address.
-
-Setup:
-This test requires at least two fuchsia devices.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-
-
-class FuchsiaBtMacAddressTest(BaseTestClass):
-    scan_timeout_seconds = 10
-
-    def setup_class(self):
-        super().setup_class()
-
-        if len(self.fuchsia_devices) < 2:
-            raise signals.TestAbortAll("Need at least two Fuchsia devices")
-        for device in self.fuchsia_devices:
-            device.sl4f.bts_lib.initBluetoothSys()
-
-    def test_verify_different_mac_addresses(self):
-        """Verify that all connected Fuchsia devices have unique mac addresses.
-
-        Steps:
-        1. Get mac address from each device
-
-        Expected Result:
-        Verify duplicate mac addresses don't exist.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BR/EDR, BT
-        Priority: 1
-        """
-        mac_addr_list = []
-        for device in self.fuchsia_devices:
-            mac_addr_list.append(
-                device.sl4f.bts_lib.getActiveAdapterAddress().get("result"))
-        if len(mac_addr_list) != len(set(mac_addr_list)):
-            raise signals.TestFailure(
-                "Found duplicate mac addresses {}.".format(mac_addr_list))
-        raise signals.TestPass(
-            "Success: All Bluetooth Mac address unique: {}".format(
-                mac_addr_list))
diff --git a/src/antlion/tests/bt/FuchsiaBtScanTest.py b/src/antlion/tests/bt/FuchsiaBtScanTest.py
deleted file mode 100644
index fd659a0..0000000
--- a/src/antlion/tests/bt/FuchsiaBtScanTest.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is a stress test for Fuchsia GATT connections.
-
-Setup:
-This test only requires two fuchsia devices as the purpose is to test
-the robusntess of GATT connections.
-"""
-
-import time
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-
-
-class FuchsiaBtScanTest(BaseTestClass):
-    scan_timeout_seconds = 30
-
-    def setup_class(self):
-        super().setup_class()
-        self.pri_dut = self.fuchsia_devices[0]
-        self.sec_dut = self.fuchsia_devices[1]
-
-        self.pri_dut.sl4f.bts_lib.initBluetoothSys()
-        self.sec_dut.sl4f.bts_lib.initBluetoothSys()
-
-    def test_scan_with_peer_set_non_discoverable(self):
-        """Test Bluetooth scan with peer set to non discoverable.
-
-        Steps:
-        1. Set peer device to a unique device name.
-        2. Set peer device to be non-discoverable.
-        3. Perform a BT Scan with primary dut with enough time to
-        gather results.
-
-        Expected Result:
-        Verify there are no results that match the unique device
-        name in step 1.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BR/EDR, BT
-        Priority: 1
-        """
-        local_name = generate_id_by_size(10)
-        self.sec_dut.sl4f.bts_lib.setName(local_name)
-        self.sec_dut.sl4f.bts_lib.setDiscoverable(False)
-
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(True)
-        time.sleep(self.scan_timeout_seconds)
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(False)
-        discovered_devices = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices()
-        for device in discovered_devices.get("result").values():
-            discoverd_name = device.get("name")
-            if discoverd_name is not None and discoverd_name is local_name:
-                raise signals.TestFailure(
-                    "Found peer unexpectedly: {}.".format(device))
-        raise signals.TestPass("Successfully didn't find peer device.")
-
-    def test_scan_with_peer_set_discoverable(self):
-        """Test Bluetooth scan with peer set to discoverable.
-
-        Steps:
-        1. Set peer device to a unique device name.
-        2. Set peer device to be discoverable.
-        3. Perform a BT Scan with primary dut with enough time to
-        gather results.
-
-        Expected Result:
-        Verify there is a result that match the unique device
-        name in step 1.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BR/EDR, BT
-        Priority: 1
-        """
-        local_name = generate_id_by_size(10)
-        self.log.info("Setting local peer name to: {}".format(local_name))
-        self.sec_dut.sl4f.bts_lib.setName(local_name)
-        self.sec_dut.sl4f.bts_lib.setDiscoverable(True)
-
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(True)
-        end_time = time.time() + self.scan_timeout_seconds
-        poll_timeout = 10
-        while time.time() < end_time:
-            discovered_devices = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices(
-            )
-            for device in discovered_devices.get("result").values():
-                self.log.info(device)
-                discoverd_name = device.get("name")
-                if discoverd_name is not None and discoverd_name in local_name:
-                    self.pri_dut.sl4f.bts_lib.requestDiscovery(False)
-                    raise signals.TestPass("Successfully found peer device.")
-            time.sleep(poll_timeout)
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(False)
-        raise signals.TestFailure("Unable to find peer device.")
diff --git a/src/antlion/tests/bt/GapSecSemTest.py b/src/antlion/tests/bt/GapSecSemTest.py
deleted file mode 100644
index 71a21c1..0000000
--- a/src/antlion/tests/bt/GapSecSemTest.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-PTS GAP/SEC/SEM Automation
-
-Optional custom parameter "collect_detailed_pass_logs"
-    Used to collect link keys and extra logs on pass results
-    to be used for certification purposes.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-from antlion.test_utils.fuchsia.bt_test_utils import get_link_keys
-from antlion.test_utils.fuchsia.bt_test_utils import unbond_all_known_devices
-from contextlib import suppress
-import inspect
-
-
-class GapSecSemTest(BaseTestClass):
-    gatt_connect_err_message = "Gatt connection failed with: {}"
-    gatt_disconnect_err_message = "Gatt disconnection failed with: {}"
-    ble_advertise_interval = 50
-    scan_timeout_seconds = 60
-
-    def setup_class(self):
-        super().setup_class()
-        self.pri_dut = self.fuchsia_devices[0]
-        # TODO: fxb/57968 Provide Facade for setting secure connections only mode,
-        # for the interim set this manually in the build.
-        self.sec_dut = self.fuchsia_devices[1]
-        for fd in self.fuchsia_devices:
-            fd.sl4f.bts_lib.initBluetoothSys()
-        # Optional user param for collecting enough information for
-        # certification on pass results.
-        self.collect_detailed_pass_logs = self.user_params.get(
-            "collect_detailed_pass_logs", False)
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            fd.take_bug_report(test_name, begin_time)
-
-    def teardown_test(self):
-        # Stop scanning and advertising on all devices at the end of a test.
-        with suppress(Exception):
-            for fd in self.fuchsia_devices:
-                fd.sl4f.ble_lib.bleStopBleAdvertising()
-                fd.bleStopBleScan()
-        for fd in self.fuchsia_devices:
-            unbond_all_known_devices(fd, self.log)
-
-    def teardown_class(self):
-        for fd in self.fuchsia_devices:
-            fd.sl4f.bts_lib.requestDiscovery(False)
-
-    def on_pass(self, test_name, begin_time):
-        if self.collect_detailed_pass_logs == True:
-            for fd in self.fuchsia_devices:
-                fd.take_bt_snoop_log(test_name)
-                fd.take_bug_report(test_name, begin_time)
-
-    def _orchestrate_gatt_connection(self, central, peripheral):
-        """ Orchestrate a GATT connetion from the input Central
-        Fuchsia device to the Peripheral Fuchsia device.
-        Args:
-                central: The central Fuchsia device
-                peripheral: The peripheral Fuchsia device
-                peripheral: The peripheral role Fuchsia device
-        Returns:
-            Dictionary of device info if connection successful.
-        """
-        adv_name = generate_id_by_size(10)
-        adv_data = {
-            "name": adv_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-
-        peripheral.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, self.ble_advertise_interval, connectable)
-        scan_filter = {"name_substring": adv_name}
-        central.sl4f.gattc_lib.bleStartBleScan(scan_filter)
-        device = le_scan_for_device_by_name(central,
-                                            self.log,
-                                            adv_name,
-                                            self.scan_timeout_seconds,
-                                            partial_match=False,
-                                            self_manage_scan=False)
-        if device is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-        connect_result = central.sl4f.gattc_lib.bleConnectToPeripheral(
-            device["id"])
-        if connect_result.get("error") is not None:
-            raise signals.TestFailure(
-                self.gatt_connect_err_message.format(
-                    connect_result.get("error")))
-        self.log.info("Connection Successful...")
-
-        return device
-
-    def _orchestrate_gap_sec_sem_37_to_44_test(self, test_name, central,
-                                               peripheral,
-                                               is_central_pairing_initiator,
-                                               security_level):
-        """ Performs GAP/SEC/SEM/BV-37 to 44 tests.
-            These set of tests deal with varying modes and directions of
-            service level connections with LE secure Connections.
-
-            Args:
-                test_name: The name of the test for logging purposes
-                central: The central role Fuchsia device
-                peripheral: The peripheral role Fuchsia device
-                is_central_pairing_initiator: True if Central should initiate
-                    the pairing. False if Peripheral should initiate.
-                security_level: 1 for Encrypted, 2 for Authenticated
-            Orchestration Steps:
-                1. Perform GATT connection from Central to Peripheral
-                2. Pair with specified security_level in direction specified
-                    by is_central_pairing_initiator.
-                3. Exchange pairing pins
-                4. Collect link keys if applicable
-                5. Disconnect device
-                6. Forget pairing.
-        """
-        input_capabilities = "NONE"
-        output_capabilities = "NONE"
-
-        central.sl4f.bts_lib.acceptPairing("KEYBOARD", "DISPLAY")
-
-        peripheral.sl4f.bts_lib.acceptPairing("KEYBOARD", "DISPLAY")
-
-        device = self._orchestrate_gatt_connection(central, peripheral)
-        # TODO: fxb/71289 Change once all peer IDs are ints and not strings
-        identifier = int("0x{}".format(device["id"]), 0)
-        bondable = True
-        transport = 2  #LE
-        if is_central_pairing_initiator:
-            pair_result = central.sl4f.bts_lib.pair(identifier, security_level,
-                                                    bondable, transport)
-        if not is_central_pairing_initiator:
-            device_list = peripheral.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            print(device_list)
-            for id_dict in device_list:
-                d = device_list[id_dict]
-                name = None
-                if d['connected'] is True:
-                    did = d['id']
-            pair_result = peripheral.sl4f.bts_lib.pair(did, security_level,
-                                                       bondable, transport)
-
-        pins_transferred = False
-        pairing_pin = central.sl4f.bts_lib.getPairingPin()['result']
-        if pairing_pin != "0" and pairing_pin is not None:
-            peripheral.sl4f.bts_lib.inputPairingPin(pairing_pin)
-            pins_transferred = True
-        if not pins_transferred:
-            pairing_pin = peripheral.sl4f.bts_lib.getPairingPin()['result']
-            if pairing_pin != "0":
-                central.sl4f.bts_lib.inputPairingPin(pairing_pin)
-
-        if self.collect_detailed_pass_logs == True:
-            save_path = f"{central.log_path}/{test_name}_stash_secure.store"
-            self.log.info(
-                f"Known Link Keys: {get_link_keys(central, save_path)}")
-            save_path = f"{peripheral.log_path}/{test_name}_stash_secure.store"
-            self.log.info(
-                f"Known Link Keys: {get_link_keys(peripheral, save_path)}")
-
-        disconnect_result = central.sl4f.gattc_lib.bleDisconnectPeripheral(
-            device["id"])
-        if disconnect_result.get("error") is not None:
-            raise signals.TestFailure(
-                self.gatt_disconnect_err_message.format(
-                    disconnect_result.get("error")))
-        self.log.info("Disconnection Successful...")
-
-        central.sl4f.bts_lib.forgetDevice(identifier)
-
-        raise signals.TestPass("Success")
-
-    def test_gap_sec_sem_bv_37_c(self):
-        central = self.pri_dut
-        peripheral = self.sec_dut
-        is_central_pairing_initiator = True
-        security_level = 1  # Encrypted
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_38_c(self):
-        central = self.pri_dut
-        peripheral = self.sec_dut
-        is_central_pairing_initiator = True
-        security_level = 2  # Authenticated
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_39_c(self):
-        central = self.pri_dut
-        peripheral = self.sec_dut
-        is_central_pairing_initiator = False
-        security_level = 1  # Encrypted
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_40_c(self):
-        central = self.pri_dut
-        peripheral = self.sec_dut
-        is_central_pairing_initiator = False
-        security_level = 2  # Authenticated
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_41_c(self):
-        central = self.sec_dut
-        peripheral = self.pri_dut
-        is_central_pairing_initiator = True
-        security_level = 1  # Encrypted
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_42_c(self):
-        central = self.sec_dut
-        peripheral = self.pri_dut
-        is_central_pairing_initiator = True
-        security_level = 2  # Authenticated
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_43_c(self):
-        central = self.sec_dut
-        peripheral = self.pri_dut
-        is_central_pairing_initiator = False
-        security_level = 1  # Encrypted
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
-
-    def test_gap_sec_sem_bv_44_c(self):
-        central = self.sec_dut
-        peripheral = self.pri_dut
-        is_central_pairing_initiator = False
-        security_level = 2  # Authenticated
-        test_name = inspect.currentframe().f_code.co_name
-        self._orchestrate_gap_sec_sem_37_to_44_test(
-            test_name, central, peripheral, is_central_pairing_initiator,
-            security_level)
diff --git a/src/antlion/tests/bt/command_input.py b/src/antlion/tests/bt/command_input.py
deleted file mode 100644
index 46e0e3f..0000000
--- a/src/antlion/tests/bt/command_input.py
+++ /dev/null
@@ -1,3050 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Python script for wrappers to various libraries.
-
-Class CmdInput inherts from the cmd library.
-
-Functions that start with "do_" have a method
-signature that doesn't match the actual command
-line command and that is intended. This is so the
-"help" command knows what to display (in this case
-the documentation of the command itself).
-
-For example:
-Looking at the function "do_tool_set_target_device_name"
-has the inputs self and line which is expected of this type
-of method signature. When the "help" command is done on the
-method name you get the function documentation as such:
-
-(Cmd) help tool_set_target_device_name
-
-        Description: Reset the target device name.
-        Input(s):
-            device_name: Required. The advertising name to connect to.
-        Usage: tool_set_target_device_name new_target_device name
-          Examples:
-            tool_set_target_device_name le_watch
-
-This is all to say this documentation pattern is expected.
-
-"""
-
-from antlion.test_utils.audio_analysis_lib.check_quality import quality_analysis
-from antlion.test_utils.bt.bt_constants import audio_bits_per_sample_32
-from antlion.test_utils.bt.bt_constants import audio_sample_rate_48000
-from antlion.test_utils.abstract_devices.bluetooth_device import create_bluetooth_device
-from antlion.test_utils.bt.bt_constants import bt_attribute_values
-from antlion.test_utils.bt.bt_constants import sig_appearance_constants
-from antlion.test_utils.bt.bt_constants import sig_uuid_constants
-from antlion.test_utils.fuchsia.sdp_records import sdp_pts_record_list
-
-import antlion.test_utils.bt.gatt_test_database as gatt_test_database
-
-import cmd
-import pprint
-import time
-"""Various Global Strings"""
-BASE_UUID = sig_uuid_constants['BASE_UUID']
-CMD_LOG = "CMD {} result: {}"
-FAILURE = "CMD {} threw exception: {}"
-BASIC_ADV_NAME = "fs_test"
-
-
-class CommandInput(cmd.Cmd):
-    ble_adv_interval = 1000
-    ble_adv_appearance = None
-    ble_adv_data_include_tx_power_level = False
-    ble_adv_include_name = True
-    ble_adv_include_scan_response = False
-    ble_adv_name = "fs_test"
-    ble_adv_data_manufacturer_data = None
-    ble_adv_data_service_data = None
-    ble_adv_data_service_uuid_list = None
-    ble_adv_data_uris = None
-
-    bt_control_ids = []
-    bt_control_names = []
-    bt_control_devices = []
-    bt_scan_poll_timer = 0.5
-    target_device_name = ""
-    le_ids = []
-    unique_mac_addr_id = None
-
-    def setup_vars(self, dut, target_device_name, log):
-        self.pri_dut = dut
-        # Note: test_dut is the start of a slow conversion from a Fuchsia specific
-        # Tool to an abstract_device tool. Only commands that use test_dut will work
-        # Otherwise this tool is primarially targeted at Fuchsia devices.
-        self.test_dut = create_bluetooth_device(self.pri_dut)
-        self.test_dut.initialize_bluetooth_controller()
-        self.target_device_name = target_device_name
-        self.log = log
-
-    def emptyline(self):
-        pass
-
-    def do_EOF(self, line):
-        "End Script"
-        return True
-
-    """ Useful Helper functions and cmd line tooling """
-
-    def str_to_bool(self, s):
-        if s.lower() == 'true':
-            return True
-        elif s.lower() == 'false':
-            return False
-
-    def _find_unique_id_over_le(self):
-        scan_filter = {"name_substring": self.target_device_name}
-        self.unique_mac_addr_id = None
-        self.pri_dut.sl4f.gattc_lib.bleStartBleScan(scan_filter)
-        tries = 10
-        for i in range(tries):
-            time.sleep(self.bt_scan_poll_timer)
-            scan_res = self.pri_dut.sl4f.gattc_lib.bleGetDiscoveredDevices(
-            )['result']
-            for device in scan_res:
-                name, did, connectable = device["name"], device["id"], device[
-                    "connectable"]
-                if (self.target_device_name in name):
-                    self.unique_mac_addr_id = did
-                    self.log.info(
-                        "Successfully found device: name, id: {}, {}".format(
-                            name, did))
-                    break
-            if self.unique_mac_addr_id:
-                break
-        self.pri_dut.sl4f.gattc_lib.bleStopBleScan()
-
-    def _find_unique_id_over_bt_control(self):
-        self.unique_mac_addr_id = None
-        self.bt_control_devices = []
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(True)
-        tries = 10
-        for i in range(tries):
-            if self.unique_mac_addr_id:
-                break
-            time.sleep(self.bt_scan_poll_timer)
-            device_list = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for id_dict in device_list:
-                device = device_list[id_dict]
-                self.bt_control_devices.append(device)
-                name = None
-                if device['name'] is not None:
-                    name = device['name']
-                did, address = device['id'], device['address']
-
-                self.bt_control_ids.append(did)
-                if name is not None:
-                    self.bt_control_names.append(name)
-                    if self.target_device_name in name:
-                        self.unique_mac_addr_id = did
-                        self.log.info(
-                            "Successfully found device: name, id, address: {}, {}, {}"
-                            .format(name, did, address))
-                        break
-        self.pri_dut.sl4f.bts_lib.requestDiscovery(False)
-
-    def do_tool_take_bt_snoop_log(self, custom_name):
-        """
-        Description: Takes the bt snoop log from the Fuchsia device.
-        Logs will show up in your config files' logpath directory.
-
-        Input(s):
-            custom_name: Optional. Override the default pcap file name.
-
-        Usage: tool_set_target_device_name new_target_device name
-          Examples:
-            tool_take_bt_snoop_log connection_error
-            tool_take_bt_snoop_log
-        """
-        self.pri_dut.take_bt_snoop_log(custom_name)
-
-    def do_tool_refresh_unique_id(self, line):
-        """
-        Description: Refresh command line tool mac unique id.
-        Usage:
-          Examples:
-            tool_refresh_unique_id
-        """
-        try:
-            self._find_unique_id_over_le()
-        except Exception as err:
-            self.log.error(
-                "Failed to scan or find scan result: {}".format(err))
-
-    def do_tool_refresh_unique_id_using_bt_control(self, line):
-        """
-        Description: Refresh command line tool mac unique id.
-        Usage:
-          Examples:
-            tool_refresh_unique_id_using_bt_control
-        """
-        try:
-            self._find_unique_id_over_bt_control()
-        except Exception as err:
-            self.log.error(
-                "Failed to scan or find scan result: {}".format(err))
-
-    def do_tool_set_target_device_name(self, line):
-        """
-        Description: Reset the target device name.
-        Input(s):
-            device_name: Required. The advertising name to connect to.
-        Usage: tool_set_target_device_name new_target_device name
-          Examples:
-            tool_set_target_device_name le_watch
-        """
-        self.log.info("Setting target_device_name to: {}".format(line))
-        self.target_device_name = line
-
-    def do_tool_set_unique_mac_addr_id(self, line):
-        """
-        Description: Sets the unique mac address id (Specific to Fuchsia)
-        Input(s):
-            device_id: Required. The id to set the unique mac address id to
-        Usage: tool_set_unique_mac_addr_id device_id
-          Examples:
-            tool_set_unique_mac_addr_id 7fb2cae53aad9e0d
-        """
-        self.unique_mac_addr_id = line
-
-    """Begin BLE advertise wrappers"""
-
-    def complete_ble_adv_data_include_name(self, text, line, begidx, endidx):
-        roles = ["true", "false"]
-        if not text:
-            completions = roles
-        else:
-            completions = [s for s in roles if s.startswith(text)]
-        return completions
-
-    def do_ble_adv_data_include_name(self, line):
-        cmd = "Include name in the advertisement."
-        try:
-            self.ble_adv_include_name = self.str_to_bool(line)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_adv_data_set_name(self, line):
-        cmd = "Set the name to be included in the advertisement."
-        try:
-            self.ble_adv_name = line
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_ble_adv_data_set_appearance(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(sig_appearance_constants.keys())
-        else:
-            completions = [
-                s for s in sig_appearance_constants.keys()
-                if s.startswith(text)
-            ]
-        return completions
-
-    def do_ble_adv_data_set_appearance(self, line):
-        cmd = "Set the appearance to known SIG values."
-        try:
-            self.ble_adv_appearance = line
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_ble_adv_data_include_tx_power_level(self, text, line, begidx,
-                                                     endidx):
-        options = ['true', 'false']
-        if not text:
-            completions = list(options)[:]
-        else:
-            completions = [s for s in options if s.startswith(text)]
-        return completions
-
-    def do_ble_adv_data_include_tx_power_level(self, line):
-        """Include the tx_power_level in the advertising data.
-        Description: Adds tx_power_level to the advertisement data to the BLE
-            advertisement.
-        Input(s):
-            value: Required. True or False
-        Usage: ble_adv_data_include_tx_power_level bool_value
-          Examples:
-            ble_adv_data_include_tx_power_level true
-            ble_adv_data_include_tx_power_level false
-        """
-        cmd = "Include tx_power_level in advertisement."
-        try:
-            self.ble_adv_data_include_tx_power_level = self.str_to_bool(line)
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def complete_ble_adv_include_scan_response(self, text, line, begidx,
-                                               endidx):
-        options = ['true', 'false']
-        if not text:
-            completions = list(options)[:]
-        else:
-            completions = [s for s in options if s.startswith(text)]
-        return completions
-
-    def do_ble_adv_include_scan_response(self, line):
-        """Include scan response in advertisement. inputs: [true|false]
-            Note: Currently just sets the scan response data to the
-                Advertisement data.
-        """
-        cmd = "Include tx_power_level in advertisement."
-        try:
-            self.ble_adv_include_scan_response = self.str_to_bool(line)
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def do_ble_adv_data_add_manufacturer_data(self, line):
-        """Include manufacturer id and data to the advertisment
-        Description: Adds manufacturer data to the BLE advertisement.
-        Input(s):
-            id: Required. The int representing the manufacturer id.
-            data: Required. The string representing the data.
-        Usage: ble_adv_data_add_manufacturer_data id data
-          Examples:
-            ble_adv_data_add_manufacturer_data 1 test
-        """
-        cmd = "Include manufacturer id and data to the advertisment."
-        try:
-
-            info = line.split()
-            if self.ble_adv_data_manufacturer_data is None:
-                self.ble_adv_data_manufacturer_data = []
-            self.ble_adv_data_manufacturer_data.append({
-                "id": int(info[0]),
-                "data": info[1]
-            })
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def do_ble_adv_data_add_service_data(self, line):
-        """Include service data to the advertisment
-        Description: Adds service data to the BLE advertisement.
-        Input(s):
-            uuid: Required. The string representing the uuid.
-            data: Required. The string representing the data.
-        Usage: ble_adv_data_add_service_data uuid data
-          Examples:
-            ble_adv_data_add_service_data 00001801-0000-1000-8000-00805f9b34fb test
-        """
-        cmd = "Include manufacturer id and data to the advertisment."
-        try:
-            info = line.split()
-            if self.ble_adv_data_service_data is None:
-                self.ble_adv_data_service_data = []
-            self.ble_adv_data_service_data.append({
-                "uuid": info[0],
-                "data": info[1]
-            })
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def do_ble_adv_add_service_uuid_list(self, line):
-        """Include a list of service uuids to the advertisment:
-        Description: Adds service uuid list to the BLE advertisement.
-        Input(s):
-            uuid: Required. A list of N string UUIDs to add.
-        Usage: ble_adv_add_service_uuid_list uuid0 uuid1 ... uuidN
-          Examples:
-            ble_adv_add_service_uuid_list 00001801-0000-1000-8000-00805f9b34fb
-            ble_adv_add_service_uuid_list 00001801-0000-1000-8000-00805f9b34fb 00001802-0000-1000-8000-00805f9b34fb
-        """
-        cmd = "Include service uuid list to the advertisment data."
-        try:
-            self.ble_adv_data_service_uuid_list = line
-        except Exception as err:
-            self.log.info(FAILURE.format(cmd, err))
-
-    def do_ble_adv_data_set_uris(self, uris):
-        """Set the URIs of the LE advertisement data:
-        Description: Adds list of String UIRs
-          See (RFC 3986 1.1.2 https://tools.ietf.org/html/rfc3986)
-          Valid URI examples:
-            ftp://ftp.is.co.za/rfc/rfc1808.txt
-            http://www.ietf.org/rfc/rfc2396.txt
-            ldap://[2001:db8::7]/c=GB?objectClass?one
-            mailto:John.Doe@example.com
-            news:comp.infosystems.www.servers.unix
-            tel:+1-816-555-1212
-            telnet://192.0.2.16:80/
-            urn:oasis:names:specification:docbook:dtd:xml:4.1.2
-        Input(s):
-            uris: Required. A list of URIs to add.
-        Usage: ble_adv_data_set_uris uri0 uri1 ... uriN
-          Examples:
-            ble_adv_data_set_uris telnet://192.0.2.16:80/
-            ble_adv_data_set_uris tel:+1-816-555-1212
-        """
-        cmd = "Set the appearance to known SIG values."
-        try:
-            self.ble_adv_data_uris = uris.split()
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def start_advertisement(self, connectable):
-        """ Handle setting advertising data and the advertisement
-            Note: After advertisement is successful, clears values set for
-                * Manufacturer data
-                * Appearance information
-                * Scan Response
-                * Service UUIDs
-                * URI list
-            Args:
-                connectable: Bool of whether to start a connectable
-                    advertisement or not.
-        """
-        adv_data_name = self.ble_adv_name
-        if not self.ble_adv_include_name:
-            adv_data_name = None
-
-        manufacturer_data = self.ble_adv_data_manufacturer_data
-
-        tx_power_level = None
-        if self.ble_adv_data_include_tx_power_level:
-            tx_power_level = 1  # Not yet implemented so set to 1
-
-        scan_response = self.ble_adv_include_scan_response
-
-        adv_data = {
-            "name": adv_data_name,
-            "appearance": self.ble_adv_appearance,
-            "service_data": self.ble_adv_data_service_data,
-            "tx_power_level": tx_power_level,
-            "service_uuids": self.ble_adv_data_service_uuid_list,
-            "manufacturer_data": manufacturer_data,
-            "uris": self.ble_adv_data_uris,
-        }
-
-        if not self.ble_adv_include_scan_response:
-            scan_response = None
-        else:
-            scan_response = adv_data
-
-        result = self.pri_dut.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, self.ble_adv_interval, connectable)
-        self.log.info("Result of starting advertisement: {}".format(result))
-        self.ble_adv_data_manufacturer_data = None
-        self.ble_adv_appearance = None
-        self.ble_adv_include_scan_response = False
-        self.ble_adv_data_service_uuid_list = None
-        self.ble_adv_data_uris = None
-        self.ble_adv_data_service_data = None
-
-    def do_ble_start_generic_connectable_advertisement(self, line):
-        """
-        Description: Start a connectable LE advertisement
-
-        Usage: ble_start_generic_connectable_advertisement
-        """
-        cmd = "Start a connectable LE advertisement"
-        try:
-            connectable = True
-            self.start_advertisement(connectable)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_start_generic_nonconnectable_advertisement(self, line):
-        """
-        Description: Start a non-connectable LE advertisement
-
-        Usage: ble_start_generic_nonconnectable_advertisement
-        """
-        cmd = "Start a nonconnectable LE advertisement"
-        try:
-            connectable = False
-            self.start_advertisement(connectable)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_stop_advertisement(self, line):
-        """
-        Description: Stop a BLE advertisement.
-        Usage: ble_stop_advertisement
-        """
-        cmd = "Stop a connectable LE advertisement"
-        try:
-            self.pri_dut.sl4f.ble_lib.bleStopBleAdvertising()
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End BLE advertise wrappers"""
-    """Begin GATT client wrappers"""
-
-    def complete_gattc_connect_by_id(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(self.le_ids)[:]
-        else:
-            completions = [s for s in self.le_ids if s.startswith(text)]
-        return completions
-
-    def do_gattc_connect_by_id(self, line):
-        """
-        Description: Connect to a LE peripheral.
-        Input(s):
-            device_id: Required. The unique device ID from Fuchsia
-                discovered devices.
-        Usage:
-          Examples:
-            gattc_connect device_id
-        """
-        cmd = "Connect to a LE peripheral by input ID."
-        try:
-
-            connection_status = self.pri_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-                line)
-            self.log.info("Connection status: {}".format(
-                pprint.pformat(connection_status)))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_connect(self, line):
-        """
-        Description: Connect to a LE peripheral.
-        Optional input: device_name
-        Input(s):
-            device_name: Optional. The peripheral ID to connect to.
-        Usage:
-          Examples:
-            gattc_connect
-            gattc_connect eddystone_123
-        """
-        cmd = "Connect to a LE peripheral."
-        try:
-            if len(line) > 0:
-                self.target_device_name = line
-                self.unique_mac_addr_id = None
-            if not self.unique_mac_addr_id:
-                try:
-                    self._find_unique_id()
-                except Exception as err:
-                    self.log.info("Failed to scan or find device.")
-                    return
-            connection_status = self.pri_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-                self.unique_mac_addr_id)
-            self.log.info("Connection status: {}".format(
-                pprint.pformat(connection_status)))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_connect_disconnect_iterations(self, line):
-        """
-        Description: Connect then disconnect to a LE peripheral multiple times.
-        Input(s):
-            iterations: Required. The number of iterations to run.
-        Usage:
-          Examples:
-            gattc_connect_disconnect_iterations 10
-        """
-        cmd = "Connect to a LE peripheral."
-        try:
-            if not self.unique_mac_addr_id:
-                try:
-                    self._find_unique_id()
-                except Exception as err:
-                    self.log.info("Failed to scan or find device.")
-                    return
-            for i in range(int(line)):
-                self.log.info("Running iteration {}".format(i + 1))
-                connection_status = self.pri_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-                    self.unique_mac_addr_id)
-                self.log.info("Connection status: {}".format(
-                    pprint.pformat(connection_status)))
-                time.sleep(4)
-                disc_status = self.pri_dut.sl4f.gattc_lib.bleDisconnectPeripheral(
-                    self.unique_mac_addr_id)
-                self.log.info("Disconnect status: {}".format(disc_status))
-                time.sleep(3)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_disconnect(self, line):
-        """
-        Description: Disconnect from LE peripheral.
-        Assumptions: Already connected to a peripheral.
-        Usage:
-          Examples:
-            gattc_disconnect
-        """
-        cmd = "Disconenct from LE peripheral."
-        try:
-            disconnect_status = self.pri_dut.sl4f.gattc_lib.bleDisconnectPeripheral(
-                self.unique_mac_addr_id)
-            self.log.info("Disconnect status: {}".format(disconnect_status))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_list_services(self, discover_chars):
-        """
-        Description: List services from LE peripheral.
-        Assumptions: Already connected to a peripheral.
-        Input(s):
-            discover_chars: Optional. An optional input to discover all
-                characteristics on the service.
-        Usage:
-          Examples:
-            gattc_list_services
-            gattc_list_services true
-        """
-        cmd = "List services from LE peripheral."
-        try:
-
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            self.log.info("Discovered Services: \n{}".format(
-                pprint.pformat(services)))
-            discover_characteristics = self.str_to_bool(discover_chars)
-            if discover_chars:
-                for service in services.get('result'):
-                    self.pri_dut.sl4f.gattc_lib.connectToService(
-                        self.unique_mac_addr_id, service.get('id'))
-                    chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics(
-                    )
-                    self.log.info("Discovered chars:\n{}".format(
-                        pprint.pformat(chars)))
-
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_connect_to_service(self, line):
-        """
-        Description: Connect to Peripheral GATT server service.
-        Assumptions: Already connected to peripheral.
-        Input(s):
-            service_id: Required. The service id reference on the GATT server.
-        Usage:
-          Examples:
-            gattc_connect_to_service service_id
-        """
-        cmd = "GATT client connect to GATT server service."
-        try:
-            self.pri_dut.sl4f.gattc_lib.connectToService(
-                self.unique_mac_addr_id, int(line))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_discover_characteristics(self, line):
-        """
-        Description: Discover characteristics from a connected service.
-        Assumptions: Already connected to a GATT server service.
-        Usage:
-          Examples:
-            gattc_discover_characteristics
-        """
-        cmd = "Discover and list characteristics from a GATT server."
-        try:
-            chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-            self.log.info("Discovered chars:\n{}".format(
-                pprint.pformat(chars)))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_notify_all_chars(self, line):
-        """
-        Description: Enable all notifications on all Characteristics on
-            a GATT server.
-        Assumptions: Basic GATT connection made.
-        Usage:
-          Examples:
-            gattc_notify_all_chars
-        """
-        cmd = "Read all characteristics from the GATT service."
-        try:
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    # quick char filter for apple-4 test... remove later
-                    print("found uuid {}".format(char_uuid))
-                    try:
-                        self.pri_dut.sl4f.gattc_lib.enableNotifyCharacteristic(
-                            char_id)
-                    except Exception as err:
-                        print("error enabling notification")
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_all_chars(self, line):
-        """
-        Description: Read all Characteristic values from a GATT server across
-            all services.
-        Assumptions: Basic GATT connection made.
-        Usage:
-          Examples:
-            gattc_read_all_chars
-        """
-        cmd = "Read all characteristics from the GATT service."
-        try:
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    try:
-                        read_val =  \
-                            self.pri_dut.sl4f.gattc_lib.readCharacteristicById(
-                                char_id)
-                        print("  Characteristic uuid / Value: {} / {}".format(
-                            char_uuid, read_val['result']))
-                        str_value = ""
-                        for val in read_val['result']:
-                            str_value += chr(val)
-                        print("    str val: {}".format(str_value))
-                    except Exception as err:
-                        print(err)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_all_desc(self, line):
-        """
-        Description: Read all Descriptors values from a GATT server across
-            all services.
-        Assumptions: Basic GATT connection made.
-        Usage:
-          Examples:
-            gattc_read_all_chars
-        """
-        cmd = "Read all descriptors from the GATT service."
-        try:
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    print("  Reading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        read_val = self.pri_dut.sl4f.gattc_lib.readDescriptorById(
-                            desc_id)
-                        print("    Descriptor uuid / Value: {} / {}".format(
-                            desc_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_all_desc(self, line):
-        """
-        Description: Write a value to all Descriptors on the GATT server.
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            offset: Required. The offset to start writing to.
-            size: Required. The size of bytes to write (value will be generated).
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_all_desc 0 100
-            gattc_write_all_desc 10 2
-        """
-        cmd = "Read all descriptors from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Offset] [Size]")
-                return
-            offset = int(args[0])
-            size = args[1]
-            write_value = []
-            for i in range(int(size)):
-                write_value.append(i % 256)
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Writing descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    print("  Reading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        write_val = self.pri_dut.sl4f.gattc_lib.writeDescriptorById(
-                            desc_id, offset, write_value)
-                        print("    Descriptor uuid / Result: {} / {}".format(
-                            desc_uuid, write_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_all_long_desc(self, line):
-        """
-        Description: Read all long Characteristic Descriptors
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            offset: Required. The offset to start reading from.
-            max_bytes: Required. The max size of bytes to return.
-        Usage:
-          Examples:
-            gattc_read_all_long_desc 0 100
-            gattc_read_all_long_desc 10 20
-        """
-        cmd = "Read all long descriptors from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Offset] [Size]")
-                return
-            offset = int(args[0])
-            max_bytes = int(args[1])
-            services = self.pri_dut.sl4f.ble_lib.bleListServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading descs in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    descriptors = char['descriptors']
-                    print("  Reading descs in char uuid: {}".format(char_uuid))
-                    for desc in descriptors:
-                        desc_id = desc["id"]
-                        desc_uuid = desc["uuid_type"]
-                    try:
-                        read_val = self.pri_dut.sl4f.gattc_lib.readLongDescriptorById(
-                            desc_id, offset, max_bytes)
-                        print("    Descriptor uuid / Result: {} / {}".format(
-                            desc_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_all_long_char(self, line):
-        """
-        Description: Read all long Characteristic
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            offset: Required. The offset to start reading from.
-            max_bytes: Required. The max size of bytes to return.
-        Usage:
-          Examples:
-            gattc_read_all_long_char 0 100
-            gattc_read_all_long_char 10 20
-        """
-        cmd = "Read all long Characteristics from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Offset] [Size]")
-                return
-            offset = int(args[0])
-            max_bytes = int(args[1])
-            services = self.pri_dut.sl4f.ble_lib.bleListServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    try:
-                        read_val = self.pri_dut.sl4f.gattc_lib.readLongCharacteristicById(
-                            char_id, offset, max_bytes)
-                        print("    Char uuid / Result: {} / {}".format(
-                            char_uuid, read_val['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_all_chars(self, line):
-        """
-        Description: Write all characteristic values from a GATT server across
-            all services.
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            offset: Required. The offset to start writing on.
-            size: The write value size (value will be generated)
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_all_chars 0 10
-            gattc_write_all_chars 10 1
-        """
-        cmd = "Read all characteristics from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Offset] [Size]")
-                return
-            offset = int(args[0])
-            size = int(args[1])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Writing chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    try:
-                        write_result = self.pri_dut.sl4f.gattc_lib.writeCharById(
-                            char_id, offset, write_value)
-                        print("  Characteristic uuid write result: {} / {}".
-                              format(char_uuid, write_result['result']))
-                    except Exception as err:
-                        print("error writing char {}".format(err))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_all_chars_without_response(self, line):
-        """
-        Description: Write all characteristic values from a GATT server across
-            all services.
-        Assumptions: Basic GATT connection made.
-        Input(s):
-            size: The write value size (value will be generated).
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_all_chars_without_response 100
-        """
-        cmd = "Read all characteristics from the GATT service."
-        try:
-            args = line.split()
-            if len(args) != 1:
-                self.log.info("1 Arguments required: [Size]")
-                return
-            size = int(args[0])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            services = self.pri_dut.sl4f.gattc_lib.listServices(
-                self.unique_mac_addr_id)
-            for service in services['result']:
-                service_id = service['id']
-                service_uuid = service['uuid_type']
-                self.pri_dut.sl4f.gattc_lib.connectToService(
-                    self.unique_mac_addr_id, service_id)
-                chars = self.pri_dut.sl4f.gattc_lib.discoverCharacteristics()
-                print("Reading chars in service uuid: {}".format(service_uuid))
-
-                for char in chars['result']:
-                    char_id = char['id']
-                    char_uuid = char['uuid_type']
-                    try:
-                        write_result = \
-                            self.pri_dut.sl4f.gattc_lib.writeCharByIdWithoutResponse(
-                                char_id, write_value)
-                        print("  Characteristic uuid write result: {} / {}".
-                              format(char_uuid, write_result['result']))
-                    except Exception as err:
-                        pass
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_char_by_id(self, line):
-        """
-        Description: Write char by characteristic id reference.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            offset: The offset value to use
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_char_by_id char_id 0 5
-            gattc_write_char_by_id char_id 20 1
-        """
-        cmd = "Write to GATT server characteristic ."
-        try:
-            args = line.split()
-            if len(args) != 3:
-                self.log.info("3 Arguments required: [Id] [Offset] [Size]")
-                return
-            id = int(args[0], 16)
-            offset = int(args[1])
-            size = int(args[2])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            self.test_dut.gatt_client_write_characteristic_by_handle(
-                self.unique_mac_addr_id, id, offset, write_value)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_long_char_by_id(self, line):
-        """
-        Description: Write long char by characteristic id reference.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            offset: The offset value to use
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-            reliable_mode: Optional: Reliable writes represented as bool
-        Usage:
-          Examples:
-            gattc_write_long_char_by_id char_id 0 5
-            gattc_write_long_char_by_id char_id 20 1
-            gattc_write_long_char_by_id char_id 20 1 true
-            gattc_write_long_char_by_id char_id 20 1 false
-        """
-        cmd = "Long Write to GATT server characteristic ."
-        try:
-            args = line.split()
-            if len(args) < 3:
-                self.log.info("3 Arguments required: [Id] [Offset] [Size]")
-                return
-            id = int(args[0], 16)
-            offset = int(args[1])
-            size = int(args[2])
-            reliable_mode = False
-            if len(args) > 3:
-                reliable_mode = self.str_to_bool(args[3])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            self.test_dut.gatt_client_write_long_characteristic_by_handle(
-                self.unique_mac_addr_id, id, offset, write_value,
-                reliable_mode)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_long_desc_by_id(self, line):
-        """
-        Description: Write long char by descrioptor id reference.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            offset: The offset value to use
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_long_desc_by_id char_id 0 5
-            gattc_write_long_desc_by_id char_id 20 1
-        """
-        cmd = "Long Write to GATT server descriptor ."
-        try:
-            args = line.split()
-            if len(args) != 3:
-                self.log.info("3 Arguments required: [Id] [Offset] [Size]")
-                return
-            id = int(args[0], 16)
-            offset = int(args[1])
-            size = int(args[2])
-            write_value = []
-            for i in range(size):
-                write_value.append(i % 256)
-            self.test_dut.gatt_client_write_long_descriptor_by_handle(
-                self.unique_mac_addr_id, id, offset, write_value)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_char_by_id_without_response(self, line):
-        """
-        Description: Write char by characteristic id reference without response.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_char_by_id_without_response char_id 5
-        """
-        cmd = "Write characteristic by id without response."
-        try:
-            args = line.split()
-            if len(args) != 2:
-                self.log.info("2 Arguments required: [Id] [Size]")
-                return
-            id = int(args[0], 16)
-            size = args[1]
-            write_value = []
-            for i in range(int(size)):
-                write_value.append(i % 256)
-            self.test_dut.gatt_client_write_characteristic_without_response_by_handle(
-                self.unique_mac_addr_id, id, write_value)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_enable_notify_char_by_id(self, line):
-        """
-        Description: Enable Characteristic notification on Characteristic ID.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-        Usage:
-          Examples:
-            gattc_enable_notify_char_by_id char_id
-        """
-        cmd = "Enable notifications by Characteristic id."
-        try:
-            id = int(line, 16)
-            self.test_dut.gatt_client_enable_notifiy_characteristic_by_handle(
-                self.unique_mac_addr_id, id)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_disable_notify_char_by_id(self, line):
-        """
-        Description: Disable Characteristic notification on Characteristic ID.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-        Usage:
-          Examples:
-            gattc_disable_notify_char_by_id char_id
-        """
-        cmd = "Disable notify Characteristic by id."
-        try:
-            id = int(line, 16)
-            self.test_dut.gatt_client_disable_notifiy_characteristic_by_handle(
-                self.unique_mac_addr_id, id)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_char_by_id(self, line):
-        """
-        Description: Read Characteristic by ID.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-        Usage:
-          Examples:
-            gattc_read_char_by_id char_id
-        """
-        cmd = "Read Characteristic value by ID."
-        try:
-            id = int(line, 16)
-            read_val = self.test_dut.gatt_client_read_characteristic_by_handle(
-                self.unique_mac_addr_id, id)
-            self.log.info("Characteristic Value with id {}: {}".format(
-                id, read_val))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_char_by_uuid(self, characteristic_uuid):
-        """
-        Description: Read Characteristic by UUID (read by type).
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_uuid: The characteristic id reference on the GATT
-                service
-        Usage:
-          Examples:
-            gattc_read_char_by_id char_id
-        """
-        cmd = "Read Characteristic value by ID."
-        try:
-            short_uuid_len = 4
-            if len(characteristic_uuid) == short_uuid_len:
-                characteristic_uuid = BASE_UUID.format(characteristic_uuid)
-
-            read_val = self.test_dut.gatt_client_read_characteristic_by_uuid(
-                self.unique_mac_addr_id, characteristic_uuid)
-            self.log.info("Characteristic Value with id {}: {}".format(
-                id, read_val))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_write_desc_by_id(self, line):
-        """
-        Description: Write Descriptor by characteristic id reference.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            descriptor_id: The Descriptor id reference on the GATT service
-            offset: The offset value to use
-            size: Function will generate random bytes by input size.
-                IE: Input of 5 will send a byte array of [00, 01, 02, 03, 04]
-        Usage:
-          Examples:
-            gattc_write_desc_by_id desc_id 0 5
-            gattc_write_desc_by_id desc_id 20 1
-        """
-        cmd = "Write Descriptor by id."
-        try:
-            args = line.split()
-            id = int(args[0], 16)
-            offset = int(args[1])
-            size = args[2]
-            write_value = []
-            for i in range(int(size)):
-                write_value.append(i % 256)
-            write_result = self.test_dut.gatt_client_write_descriptor_by_handle(
-                self.unique_mac_addr_id, id, offset, write_value)
-            self.log.info("Descriptor Write result {}: {}".format(
-                id, write_result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_desc_by_id(self, line):
-        """
-        Description: Read Descriptor by ID.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            descriptor_id: The Descriptor id reference on the GATT service
-        Usage:
-          Examples:
-            gattc_read_desc_by_id desc_id
-        """
-        cmd = "Read Descriptor by ID."
-        try:
-            id = int(line, 16)
-            read_val = self.test_dut.gatt_client_read_descriptor_by_handle(
-                self.unique_mac_addr_id, id)
-            self.log.info("Descriptor Value with id {}: {}".format(
-                id, read_val))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_gattc_read_long_char_by_id(self, line):
-        """
-        Description: Read long Characteristic value by id.
-        Assumptions: Already connected to a GATT server service.
-        Input(s):
-            characteristic_id: The characteristic id reference on the GATT
-                service
-            offset: The offset value to use.
-            max_bytes: The max bytes size to return.
-        Usage:
-          Examples:
-            gattc_read_long_char_by_id char_id 0 10
-            gattc_read_long_char_by_id char_id 20 1
-        """
-        cmd = "Read long Characteristic value by id."
-        try:
-            args = line.split()
-            if len(args) != 3:
-                self.log.info("3 Arguments required: [Id] [Offset] [Size]")
-                return
-            id = int(args[0], 16)
-            offset = int(args[1])
-            max_bytes = int(args[2])
-            read_val = self.test_dut.gatt_client_read_long_characteristic_by_handle(
-                self.unique_mac_addr_id, id, offset, max_bytes)
-            self.log.info("Characteristic Value with id {}: {}".format(
-                id, read_val['result']))
-
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End GATT client wrappers"""
-    """Begin LE scan wrappers"""
-
-    def _update_scan_results(self, scan_results):
-        self.le_ids = []
-        for scan in scan_results['result']:
-            self.le_ids.append(scan['id'])
-
-    def do_ble_start_scan(self, line):
-        """
-        Description: Perform a BLE scan.
-        Default filter name: ""
-        Optional input: filter_device_name
-        Usage:
-          Examples:
-            ble_start_scan
-            ble_start_scan eddystone
-        """
-        cmd = "Perform a BLE scan and list discovered devices."
-        try:
-            scan_filter = {"name_substring": ""}
-            if line:
-                scan_filter = {"name_substring": line}
-            self.pri_dut.sl4f.gattc_lib.bleStartBleScan(scan_filter)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_stop_scan(self, line):
-        """
-        Description: Stops a BLE scan and returns discovered devices.
-        Usage:
-          Examples:
-            ble_stop_scan
-        """
-        cmd = "Stops a BLE scan and returns discovered devices."
-        try:
-            scan_results = self.pri_dut.sl4f.gattc_lib.bleStopBleScan()
-            self._update_scan_results(scan_results)
-            self.log.info(pprint.pformat(scan_results))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_ble_get_discovered_devices(self, line):
-        """
-        Description: Get discovered LE devices of an active scan.
-        Usage:
-          Examples:
-            ble_stop_scan
-        """
-        cmd = "Get discovered LE devices of an active scan."
-        try:
-            scan_results = self.pri_dut.sl4f.gattc_lib.bleGetDiscoveredDevices(
-            )
-            self._update_scan_results(scan_results)
-            self.log.info(pprint.pformat(scan_results))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End LE scan wrappers"""
-    """Begin GATT Server wrappers"""
-
-    def do_gatts_close(self, line):
-        """
-        Description: Close active GATT server.
-
-        Usage:
-          Examples:
-            gatts_close
-        """
-        cmd = "Close active GATT server."
-        try:
-            result = self.pri_dut.sl4f.gatts_lib.closeServer()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_gatts_setup_database(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(
-                gatt_test_database.GATT_SERVER_DB_MAPPING.keys())
-        else:
-            completions = [
-                s for s in gatt_test_database.GATT_SERVER_DB_MAPPING.keys()
-                if s.startswith(text)
-            ]
-        return completions
-
-    def do_gatts_setup_database(self, line):
-        """
-        Description: Setup a Gatt server database based on pre-defined inputs.
-            Supports Tab Autocomplete.
-        Input(s):
-            descriptor_db_name: The descriptor db name that matches one in
-                acts_contrib.test_utils.bt.gatt_test_database
-        Usage:
-          Examples:
-            gatts_setup_database LARGE_DB_1
-        """
-        cmd = "Setup GATT Server Database Based of pre-defined dictionaries"
-        try:
-            scan_results = self.pri_dut.sl4f.gatts_lib.publishServer(
-                gatt_test_database.GATT_SERVER_DB_MAPPING.get(line))
-            self.log.info(scan_results)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End GATT Server wrappers"""
-    """Begin Bluetooth Controller wrappers"""
-
-    def complete_btc_pair(self, text, line, begidx, endidx):
-        """ Provides auto-complete for btc_pair cmd.
-
-        See Cmd module for full description.
-        """
-        arg_completion = len(line.split(" ")) - 1
-        pairing_security_level_options = ['ENCRYPTED', 'AUTHENTICATED', 'NONE']
-        bondable_options = ['BONDABLE', 'NON_BONDABLE', 'NONE']
-        transport_options = ['BREDR', 'LE']
-        if arg_completion == 1:
-            if not text:
-                completions = pairing_security_level_options
-            else:
-                completions = [
-                    s for s in pairing_security_level_options
-                    if s.startswith(text)
-                ]
-            return completions
-        if arg_completion == 2:
-            if not text:
-                completions = bondable_options
-            else:
-                completions = [
-                    s for s in bondable_options if s.startswith(text)
-                ]
-            return completions
-        if arg_completion == 3:
-            if not text:
-                completions = transport_options
-            else:
-                completions = [
-                    s for s in transport_options if s.startswith(text)
-                ]
-            return completions
-
-    def do_btc_pair(self, line):
-        """
-        Description: Sends an outgoing pairing request.
-
-        Input(s):
-            pairing security level: ENCRYPTED, AUTHENTICATED, or NONE
-            bondable: BONDABLE, NON_BONDABLE, or NONE
-            transport: BREDR or LE
-
-        Usage:
-          Examples:
-            btc_pair NONE NONE BREDR
-            btc_pair ENCRYPTED NONE LE
-            btc_pair AUTHENTICATED NONE LE
-            btc_pair NONE NON_BONDABLE BREDR
-        """
-        cmd = "Send an outgoing pairing request."
-        pairing_security_level_mapping = {
-            "ENCRYPTED": 1,
-            "AUTHENTICATED": 2,
-            "NONE": None,
-        }
-
-        bondable_mapping = {
-            "BONDABLE": True,
-            "NON_BONDABLE": False,
-            "NONE": None,
-        }
-
-        transport_mapping = {
-            "BREDR": 1,
-            "LE": 2,
-        }
-
-        try:
-            options = line.split(" ")
-            result = self.test_dut.init_pair(
-                self.unique_mac_addr_id,
-                pairing_security_level_mapping.get(options[0]),
-                bondable_mapping.get(options[1]),
-                transport_mapping.get(options[2]),
-            )
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_btc_accept_pairing(self, text, line, begidx, endidx):
-        """ Provides auto-complete for btc_set_io_capabilities cmd.
-
-        See Cmd module for full description.
-        """
-        arg_completion = len(line.split(" ")) - 1
-        input_options = ['NONE', 'CONFIRMATION', 'KEYBOARD']
-        output_options = ['NONE', 'DISPLAY']
-        if arg_completion == 1:
-            if not text:
-                completions = input_options
-            else:
-                completions = [s for s in input_options if s.startswith(text)]
-            return completions
-        if arg_completion == 2:
-            if not text:
-                completions = output_options
-            else:
-                completions = [s for s in output_options if s.startswith(text)]
-            return completions
-
-    def do_btc_accept_pairing(self, line):
-        """
-        Description: Accept all incoming pairing requests.
-
-        Input(s):
-            input: String - The input I/O capabilities to use
-                Available Values:
-                NONE - Input capability type None
-                CONFIRMATION - Input capability type confirmation
-                KEYBOARD - Input capability type Keyboard
-            output: String - The output I/O Capabilities to use
-                Available Values:
-                NONE - Output capability type None
-                DISPLAY - output capability type Display
-
-        Usage:
-          Examples:
-            btc_accept_pairing
-            btc_accept_pairing NONE DISPLAY
-            btc_accept_pairing NONE NONE
-            btc_accept_pairing KEYBOARD DISPLAY
-        """
-        cmd = "Accept incoming pairing requests"
-        try:
-            input_capabilities = "NONE"
-            output_capabilities = "NONE"
-            options = line.split(" ")
-            if len(options) > 1:
-                input_capabilities = options[0]
-                output_capabilities = options[1]
-            result = self.pri_dut.sl4f.bts_lib.acceptPairing(
-                input_capabilities, output_capabilities)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_forget_device(self, line):
-        """
-        Description: Forget pairing of the current device under test.
-            Current device under test is the device found by
-            tool_refresh_unique_id from custom user param. This function
-            will also perform a clean disconnect if actively connected.
-
-        Usage:
-          Examples:
-            btc_forget_device
-        """
-        cmd = "For pairing of the current device under test."
-        try:
-            self.log.info("Forgetting device id: {}".format(
-                self.unique_mac_addr_id))
-            result = self.pri_dut.sl4f.bts_lib.forgetDevice(
-                self.unique_mac_addr_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_set_discoverable(self, discoverable):
-        """
-        Description: Change Bluetooth Controller discoverablility.
-        Input(s):
-            discoverable: true to set discoverable
-                          false to set non-discoverable
-        Usage:
-          Examples:
-            btc_set_discoverable true
-            btc_set_discoverable false
-        """
-        cmd = "Change Bluetooth Controller discoverablility."
-        try:
-            result = self.test_dut.set_discoverable(
-                self.str_to_bool(discoverable))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_set_name(self, name):
-        """
-        Description: Change Bluetooth Controller local name.
-        Input(s):
-            name: The name to set the Bluetooth Controller name to.
-
-        Usage:
-          Examples:
-            btc_set_name fs_test
-        """
-        cmd = "Change Bluetooth Controller local name."
-        try:
-            result = self.test_dut.set_bluetooth_local_name(name)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_request_discovery(self, discover):
-        """
-        Description: Change whether the Bluetooth Controller is in active.
-            discovery or not.
-        Input(s):
-            discover: true to start discovery
-                      false to end discovery
-        Usage:
-          Examples:
-            btc_request_discovery true
-            btc_request_discovery false
-        """
-        cmd = "Change whether the Bluetooth Controller is in active."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.requestDiscovery(
-                self.str_to_bool(discover))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_get_known_remote_devices(self, line):
-        """
-        Description: Get a list of known devices.
-
-        Usage:
-          Examples:
-            btc_get_known_remote_devices
-        """
-        cmd = "Get a list of known devices."
-        self.bt_control_devices = []
-        try:
-            device_list = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for id_dict in device_list:
-                device = device_list[id_dict]
-                self.bt_control_devices.append(device)
-                self.log.info("Device found {}".format(device))
-
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_forget_all_known_devices(self, line):
-        """
-        Description: Forget all known devices.
-
-        Usage:
-          Examples:
-            btc_forget_all_known_devices
-        """
-        cmd = "Forget all known devices."
-        try:
-            device_list = self.pri_dut.sl4f.bts_lib.getKnownRemoteDevices(
-            )['result']
-            for device in device_list:
-                d = device_list[device]
-                if d['bonded'] or d['connected']:
-                    self.log.info("Unbonding deivce: {}".format(d))
-                    self.log.info(
-                        self.pri_dut.sl4f.bts_lib.forgetDevice(
-                            d['id'])['result'])
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_connect_device(self, line):
-        """
-        Description: Connect to device under test.
-            Device under test is specified by either user params
-            or
-                tool_set_target_device_name <name>
-                do_tool_refresh_unique_id_using_bt_control
-
-        Usage:
-          Examples:
-            btc_connect_device
-        """
-        cmd = "Connect to device under test."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.connectDevice(
-                self.unique_mac_addr_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_btc_connect_device_by_id(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(self.bt_control_ids)[:]
-        else:
-            completions = [
-                s for s in self.bt_control_ids if s.startswith(text)
-            ]
-        return completions
-
-    def do_btc_connect_device_by_id(self, device_id):
-        """
-        Description: Connect to device id based on pre-defined inputs.
-            Supports Tab Autocomplete.
-        Input(s):
-            device_id: The device id to connect to.
-
-        Usage:
-          Examples:
-            btc_connect_device_by_id <device_id>
-        """
-        cmd = "Connect to device id based on pre-defined inputs."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.connectDevice(device_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def complete_btc_connect_device_by_name(self, text, line, begidx, endidx):
-        if not text:
-            completions = list(self.bt_control_names)[:]
-        else:
-            completions = [
-                s for s in self.bt_control_names if s.startswith(text)
-            ]
-        return completions
-
-    def do_btc_connect_device_by_name(self, device_name):
-        """
-        Description: Connect to device id based on pre-defined inputs.
-            Supports Tab Autocomplete.
-        Input(s):
-            device_id: The device id to connect to.
-
-        Usage:
-          Examples:
-            btc_connect_device_by_name <device_id>
-        """
-        cmd = "Connect to device name based on pre-defined inputs."
-        try:
-            for device in self.bt_control_devices:
-                if device_name is device['name']:
-
-                    result = self.pri_dut.sl4f.bts_lib.connectDevice(
-                        device['id'])
-                    self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_disconnect_device(self, line):
-        """
-        Description: Disconnect to device under test.
-            Device under test is specified by either user params
-            or
-                tool_set_target_device_name <name>
-                do_tool_refresh_unique_id_using_bt_control
-
-        Usage:
-          Examples:
-            btc_disconnect_device
-        """
-        cmd = "Disconnect to device under test."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.disconnectDevice(
-                self.unique_mac_addr_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_init_bluetooth_control(self, line):
-        """
-        Description: Initialize the Bluetooth Controller.
-
-        Usage:
-          Examples:
-            btc_init_bluetooth_control
-        """
-        cmd = "Initialize the Bluetooth Controller."
-        try:
-            result = self.test_dut.initialize_bluetooth_controller()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_get_local_address(self, line):
-        """
-        Description: Get the local BR/EDR address of the Bluetooth Controller.
-
-        Usage:
-          Examples:
-            btc_get_local_address
-        """
-        cmd = "Get the local BR/EDR address of the Bluetooth Controller."
-        try:
-            result = self.test_dut.get_local_bluetooth_address()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_input_pairing_pin(self, line):
-        """
-        Description: Sends a pairing pin to SL4F's Bluetooth Control's
-        Pairing Delegate.
-
-        Usage:
-          Examples:
-            btc_input_pairing_pin 123456
-        """
-        cmd = "Input pairing pin to the Fuchsia device."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.inputPairingPin(line)['result']
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_btc_get_pairing_pin(self, line):
-        """
-        Description: Gets the pairing pin from SL4F's Bluetooth Control's
-        Pairing Delegate.
-
-        Usage:
-          Examples:
-            btc_get_pairing_pin
-        """
-        cmd = "Get the pairing pin from the Fuchsia device."
-        try:
-            result = self.pri_dut.sl4f.bts_lib.getPairingPin()['result']
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End Bluetooth Control wrappers"""
-    """Begin Profile Server wrappers"""
-
-    def do_sdp_pts_example(self, num_of_records):
-        """
-        Description: An example of how to setup a generic SDP record
-            and SDP search capabilities. This example will pass a few
-            SDP tests.
-
-        Input(s):
-            num_of_records: The number of records to add.
-
-        Usage:
-          Examples:
-            sdp_pts_example 1
-            sdp pts_example 10
-        """
-        cmd = "Setup SDP for PTS testing."
-
-        attributes = [
-            bt_attribute_values['ATTR_PROTOCOL_DESCRIPTOR_LIST'],
-            bt_attribute_values['ATTR_SERVICE_CLASS_ID_LIST'],
-            bt_attribute_values['ATTR_BLUETOOTH_PROFILE_DESCRIPTOR_LIST'],
-            bt_attribute_values['ATTR_A2DP_SUPPORTED_FEATURES'],
-        ]
-
-        try:
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['AudioSource'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['A/V_RemoteControl'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['PANU'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['SerialPort'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['DialupNetworking'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['OBEXObjectPush'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['OBEXFileTransfer'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['Headset'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['HandsfreeAudioGateway'],
-                                16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['Handsfree'], 16))
-            self.pri_dut.sl4f.sdp_lib.addSearch(
-                attributes, int(sig_uuid_constants['SIM_Access'], 16))
-            for i in range(int(num_of_records)):
-                result = self.pri_dut.sl4f.sdp_lib.addService(
-                    sdp_pts_record_list[i])
-                self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_sdp_cleanup(self, line):
-        """
-        Description: Cleanup any existing SDP records
-
-        Usage:
-          Examples:
-            sdp_cleanup
-        """
-        cmd = "Cleanup SDP objects."
-        try:
-            result = self.pri_dut.sl4f.sdp_lib.cleanUp()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_sdp_init(self, line):
-        """
-        Description: Init the profile proxy for setting up SDP records
-
-        Usage:
-          Examples:
-            sdp_init
-        """
-        cmd = "Initialize profile proxy objects for adding SDP records"
-        try:
-            result = self.pri_dut.sl4f.sdp_lib.init()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_sdp_connect_l2cap(self, line):
-        """
-        Description: Send an l2cap connection request over an input psm value.
-
-        Note: Must be already connected to a peer.
-
-        Input(s):
-            psm: The int hex value to connect over. Available PSMs:
-                SDP 0x0001  See Bluetooth Service Discovery Protocol (SDP)
-                RFCOMM  0x0003  See RFCOMM with TS 07.10
-                TCS-BIN 0x0005  See Bluetooth Telephony Control Specification /
-                    TCS Binary
-                TCS-BIN-CORDLESS    0x0007  See Bluetooth Telephony Control
-                    Specification / TCS Binary
-                BNEP    0x000F  See Bluetooth Network Encapsulation Protocol
-                HID_Control 0x0011  See Human Interface Device
-                HID_Interrupt   0x0013  See Human Interface Device
-                UPnP    0x0015  See [ESDP]
-                AVCTP   0x0017  See Audio/Video Control Transport Protocol
-                AVDTP   0x0019  See Audio/Video Distribution Transport Protocol
-                AVCTP_Browsing  0x001B  See Audio/Video Remote Control Profile
-                UDI_C-Plane 0x001D  See the Unrestricted Digital Information
-                    Profile [UDI]
-                ATT 0x001F  See Bluetooth Core Specification​
-                ​3DSP   0x0021​ ​​See 3D Synchronization Profile.
-                ​LE_PSM_IPSP    ​0x0023 ​See Internet Protocol Support Profile
-                    (IPSP)
-                OTS 0x0025  See Object Transfer Service (OTS)
-                EATT    0x0027  See Bluetooth Core Specification
-            mode: String - The channel mode to connect to. Available values:
-                Basic mode: BASIC
-                Enhanced Retransmission mode: ERTM
-
-        Usage:
-          Examples:
-            sdp_connect_l2cap 0001 BASIC
-            sdp_connect_l2cap 0019 ERTM
-        """
-        cmd = "Connect l2cap"
-        try:
-            info = line.split()
-            result = self.pri_dut.sl4f.sdp_lib.connectL2cap(
-                self.unique_mac_addr_id, int(info[0], 16), info[1])
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End Profile Server wrappers"""
-    """Begin AVDTP wrappers"""
-
-    def do_avdtp_init(self, initiator_delay):
-        """
-        Description: Init the A2DP component start and AVDTP service to
-            initiate.
-
-        Input(s):
-            initiator_delay: [Optional] The stream initiator delay to set in
-            milliseconds.
-
-        Usage:
-          Examples:
-            avdtp_init 0
-            avdtp_init 2000
-            avdtp_init
-        """
-        cmd = "Initialize AVDTP proxy"
-        try:
-            if not initiator_delay:
-                initiator_delay = None
-            result = self.pri_dut.sl4f.avdtp_lib.init(initiator_delay)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_kill_a2dp(self, line):
-        """
-        Description: Quickly kill any A2DP components.
-
-        Usage:
-          Examples:
-            avdtp_kill_a2dp
-        """
-        cmd = "Kill A2DP service"
-        try:
-            self.pri_dut.start_v1_component("bt-a2dp")
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_get_connected_peers(self, line):
-        """
-        Description: Get the connected peers for the AVDTP service
-
-        Usage:
-          Examples:
-            avdtp_get_connected_peers
-        """
-        cmd = "AVDTP get connected peers"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.getConnectedPeers()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_set_configuration(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: set configuration
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_set_configuration <peer_id>
-        """
-        cmd = "Send AVDTP set configuration to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.setConfiguration(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_get_configuration(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: get configuration
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_get_configuration <peer_id>
-        """
-        cmd = "Send AVDTP get configuration to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.getConfiguration(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_get_capabilities(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: get capabilities
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_get_capabilities <peer_id>
-        """
-        cmd = "Send AVDTP get capabilities to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.getCapabilities(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_get_all_capabilities(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: get all capabilities
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_get_all_capabilities <peer_id>
-        """
-        cmd = "Send AVDTP get all capabilities to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.getAllCapabilities(
-                int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_reconfigure_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: reconfigure stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_reconfigure_stream <peer_id>
-        """
-        cmd = "Send AVDTP reconfigure stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.reconfigureStream(
-                int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_suspend_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: suspend stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_suspend_stream <peer_id>
-        """
-        cmd = "Send AVDTP suspend stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.suspendStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_suspend_reconfigure(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: suspend reconfigure
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_suspend_reconfigure <peer_id>
-        """
-        cmd = "Send AVDTP suspend reconfigure to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.suspendAndReconfigure(
-                int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_release_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: release stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_release_stream <peer_id>
-        """
-        cmd = "Send AVDTP release stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.releaseStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_establish_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: establish stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_establish_stream <peer_id>
-        """
-        cmd = "Send AVDTP establish stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.establishStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_start_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: start stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_start_stream <peer_id>
-        """
-        cmd = "Send AVDTP start stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.startStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_abort_stream(self, peer_id):
-        """
-        Description: Send AVDTP command to connected peer: abort stream
-
-        Input(s):
-            peer_id: The specified peer_id.
-
-        Usage:
-          Examples:
-            avdtp_abort_stream <peer_id>
-        """
-        cmd = "Send AVDTP abort stream to connected peer"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.abortStream(int(peer_id))
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_avdtp_remove_service(self, line):
-        """
-        Description: Removes the AVDTP service in use.
-
-        Usage:
-          Examples:
-            avdtp_establish_stream <peer_id>
-        """
-        cmd = "Remove AVDTP service"
-        try:
-            result = self.pri_dut.sl4f.avdtp_lib.removeService()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End AVDTP wrappers"""
-    """Begin Audio wrappers"""
-
-    def do_audio_start_output_save(self, line):
-        """
-        Description: Start audio output save
-
-        Usage:
-          Examples:
-            audio_start_output_save
-        """
-        cmd = "Start audio capture"
-        try:
-            result = self.pri_dut.sl4f.audio_lib.startOutputSave()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_audio_stop_output_save(self, line):
-        """
-        Description: Stop audio output save
-
-        Usage:
-          Examples:
-            audio_stop_output_save
-        """
-        cmd = "Stop audio capture"
-        try:
-            result = self.pri_dut.sl4f.audio_lib.stopOutputSave()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_audio_get_output_audio(self, line):
-        """
-        Description: Get the audio output saved to a local file
-
-        Usage:
-          Examples:
-            audio_get_output_audio
-        """
-        cmd = "Get audio capture"
-        try:
-            save_path = "{}/{}".format(self.pri_dut.log_path, "audio.raw")
-            result = self.pri_dut.sl4f.audio_lib.getOutputAudio(save_path)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_audio_5_min_test(self, line):
-        """
-        Description: Capture and anlyize sine audio waves played from a Bluetooth A2DP
-        Source device.
-
-        Pre steps:
-        1. Pair A2DP source device
-        2. Prepare generated SOX file over preferred codec on source device.
-            Quick way to generate necessary audio files:
-            sudo apt-get install sox
-            sox -b 16 -r 48000 -c 2 -n audio_file_2k1k_5_min.wav synth 300 sine 2000 sine 3000
-
-        Usage:
-          Examples:
-            audio_5_min_test
-        """
-        cmd = "5 min audio capture test"
-        input("Press Enter once Source device is streaming audio file")
-        try:
-            result = self.pri_dut.sl4f.audio_lib.startOutputSave()
-            self.log.info(result)
-            for i in range(5):
-                print("Minutes left: {}".format(10 - i))
-                time.sleep(60)
-            result = self.pri_dut.sl4f.audio_lib.stopOutputSave()
-            log_time = int(time.time())
-            save_path = "{}/{}".format(self.pri_dut.log_path,
-                                       "{}_audio.raw".format(log_time))
-            analysis_path = "{}/{}".format(
-                self.pri_dut.log_path,
-                "{}_audio_analysis.txt".format(log_time))
-            result = self.pri_dut.sl4f.audio_lib.getOutputAudio(save_path)
-
-            channels = 1
-            try:
-                quality_analysis(filename=save_path,
-                                 output_file=analysis_path,
-                                 bit_width=audio_bits_per_sample_32,
-                                 rate=audio_sample_rate_48000,
-                                 channel=channels,
-                                 spectral_only=False)
-
-            except Exception as err:
-                self.log.error("Failed to analyze raw audio: {}".format(err))
-                return False
-
-            self.log.info("Analysis output here: {}".format(analysis_path))
-            self.log.info("Analysis Results: {}".format(
-                open(analysis_path).readlines()))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End Audio wrappers"""
-    """Begin HFP wrappers"""
-
-    def do_hfp_init(self, line):
-        """
-        Description: Init the HFP component initiate.
-
-        Usage:
-          Examples:
-            hfp_init
-        """
-        cmd = "Initialize HFP proxy"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.init()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_remove_service(self, line):
-        """
-        Description: Removes the HFP service in use.
-
-        Usage:
-          Examples:
-            hfp_remove_service
-        """
-        cmd = "Remove HFP service"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.removeService()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_list_peers(self, line):
-        """
-        Description: List all HFP Hands-Free peers connected to the DUT.
-
-        Input(s):
-
-        Usage:
-          Examples:
-            hfp_list_peers
-        """
-        cmd = "Lists connected peers"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.listPeers()
-            self.log.info(pprint.pformat(result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_active_peer(self, line):
-        """
-        Description: Set the active HFP Hands-Free peer for the DUT.
-
-        Input(s):
-            peer_id: The id of the peer to be set active.
-
-        Usage:
-          Examples:
-            hfp_set_active_peer <peer_id>
-        """
-        cmd = "Set the active peer"
-        try:
-            peer_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setActivePeer(peer_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_list_calls(self, line):
-        """
-        Description: List all calls known to the sl4f component on the DUT.
-
-        Input(s):
-
-        Usage:
-          Examples:
-            hfp_list_calls
-        """
-        cmd = "Lists all calls"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.listCalls()
-            self.log.info(pprint.pformat(result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_new_call(self, line):
-        """
-        Description: Simulate a call on the call manager
-
-        Input(s):
-            remote: The number of the remote party on the simulated call
-            state: The state of the call. Must be one of "ringing", "waiting",
-                   "dialing", "alerting", "active", "held".
-            direction: The direction of the call. Must be one of "incoming", "outgoing".
-
-        Usage:
-          Examples:
-            hfp_new_call <remote> <state> <direction>
-            hfp_new_call 14085555555 active incoming
-            hfp_new_call 14085555555 held outgoing
-            hfp_new_call 14085555555 ringing incoming
-            hfp_new_call 14085555555 waiting incoming
-            hfp_new_call 14085555555 alerting outgoing
-            hfp_new_call 14085555555 dialing outgoing
-        """
-        cmd = "Simulates a call"
-        try:
-            info = line.strip().split()
-            if len(info) != 3:
-                raise ValueError(
-                    "Exactly three command line arguments required: <remote> <state> <direction>"
-                )
-            remote, state, direction = info[0], info[1], info[2]
-            result = self.pri_dut.sl4f.hfp_lib.newCall(remote, state,
-                                                       direction)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_incoming_call(self, line):
-        """
-        Description: Simulate an incoming call on the call manager
-
-        Input(s):
-            remote: The number of the remote party on the incoming call
-
-        Usage:
-          Examples:
-            hfp_incoming_call <remote>
-            hfp_incoming_call 14085555555
-        """
-        cmd = "Simulates an incoming call"
-        try:
-            remote = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.initiateIncomingCall(remote)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_waiting_call(self, line):
-        """
-        Description: Simulate an incoming call on the call manager when there is
-        an onging active call already.
-
-        Input(s):
-            remote: The number of the remote party on the incoming call
-
-        Usage:
-          Examples:
-            hfp_waiting_call <remote>
-            hfp_waiting_call 14085555555
-        """
-        cmd = "Simulates an incoming call"
-        try:
-            remote = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.initiateIncomingWaitingCall(
-                remote)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_outgoing_call(self, line):
-        """
-        Description: Simulate an outgoing call on the call manager
-
-        Input(s):
-            remote: The number of the remote party on the outgoing call
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <remote>
-        """
-        cmd = "Simulates an outgoing call"
-        try:
-            remote = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.initiateOutgoingCall(remote)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_call_active(self, line):
-        """
-        Description: Set the specified call to the "OngoingActive" state.
-
-        Input(s):
-            call_id: The unique id of the call.
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <call_id>
-        """
-        cmd = "Set the specified call to active"
-        try:
-            call_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setCallActive(call_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_call_held(self, line):
-        """
-        Description: Set the specified call to the "OngoingHeld" state.
-
-        Input(s):
-            call_id: The unique id of the call.
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <call_id>
-        """
-        cmd = "Set the specified call to held"
-        try:
-            call_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setCallHeld(call_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_call_terminated(self, line):
-        """
-        Description: Set the specified call to the "Terminated" state.
-
-        Input(s):
-            call_id: The unique id of the call.
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <call_id>
-        """
-        cmd = "Set the specified call to terminated"
-        try:
-            call_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setCallTerminated(call_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_call_transferred_to_ag(self, line):
-        """
-        Description: Set the specified call to the "TransferredToAg" state.
-
-        Input(s):
-            call_id: The unique id of the call.
-
-        Usage:
-          Examples:
-            hfp_outgoing_call <call_id>
-        """
-        cmd = "Set the specified call to TransferredToAg"
-        try:
-            call_id = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setCallTransferredToAg(call_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_speaker_gain(self, line):
-        """
-        Description: Set the active peer's speaker gain.
-
-        Input(s):
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Usage:
-          Examples:
-            hfp_set_speaker_gain <value>
-        """
-        cmd = "Set the active peer's speaker gain"
-        try:
-            value = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setSpeakerGain(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_microphone_gain(self, line):
-        """
-        Description: Set the active peer's microphone gain.
-
-        Input(s):
-            value: The gain value to set. Must be between 0-15 inclusive.
-
-        Usage:
-          Examples:
-            hfp_set_microphone_gain <value>
-        """
-        cmd = "Set the active peer's microphone gain"
-        try:
-            value = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setMicrophoneGain(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_service_available(self, line):
-        """
-        Description: Sets the simulated network service status reported by the call manager.
-
-        Input(s):
-            value: "true" to set the network connection to available.
-
-        Usage:
-          Examples:
-            hfp_set_service_available <value>
-            hfp_set_service_available true
-            hfp_set_service_available false
-        """
-        cmd = "Sets the simulated network service status reported by the call manager"
-        try:
-            value = line.strip() == "true"
-            result = self.pri_dut.sl4f.hfp_lib.setServiceAvailable(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_roaming(self, line):
-        """
-        Description: Sets the simulated roaming status reported by the call manager.
-
-        Input(s):
-            value: "true" to set the network connection to roaming.
-
-        Usage:
-          Examples:
-            hfp_set_roaming <value>
-            hfp_set_roaming true
-            hfp_set_roaming false
-        """
-        cmd = "Sets the simulated roaming status reported by the call manager"
-        try:
-            value = line.strip() == "true"
-            result = self.pri_dut.sl4f.hfp_lib.setRoaming(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_signal_strength(self, line):
-        """
-        Description: Sets the simulated signal strength reported by the call manager.
-
-        Input(s):
-            value: The signal strength value to set. Must be between 0-5 inclusive.
-
-        Usage:
-          Examples:
-            hfp_set_signal_strength <value>
-            hfp_set_signal_strength 0
-            hfp_set_signal_strength 3
-            hfp_set_signal_strength 5
-        """
-        cmd = "Sets the simulated signal strength reported by the call manager"
-        try:
-            value = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setSignalStrength(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_subscriber_number(self, line):
-        """
-        Description: Sets the subscriber number reported by the call manager.
-
-        Input(s):
-            value: The subscriber number to set. Maximum length 128 characters.
-
-        Usage:
-          Examples:
-            hfp_set_subscriber_number <value>
-            hfp_set_subscriber_number 14085555555
-        """
-        cmd = "Sets the subscriber number reported by the call manager"
-        try:
-            value = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.setSubscriberNumber(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_operator(self, line):
-        """
-        Description: Sets the operator value reported by the call manager.
-
-        Input(s):
-            value: The operator value to set. Maximum length 16 characters.
-
-        Usage:
-          Examples:
-            hfp_set_operator <value>
-            hfp_set_operator GoogleFi
-        """
-        cmd = "Sets the operator value reported by the call manager"
-        try:
-            value = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.setOperator(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_nrec_support(self, line):
-        """
-        Description: Sets the noise reduction/echo cancelation support reported by the call manager.
-
-        Input(s):
-            value: The nrec support bool.
-
-        Usage:
-          Examples:
-            hfp_set_nrec_support <value>
-            hfp_set_nrec_support true
-            hfp_set_nrec_support false
-        """
-        cmd = "Sets the noise reduction/echo cancelation support reported by the call manager"
-        try:
-            value = line.strip() == "true"
-            result = self.pri_dut.sl4f.hfp_lib.setNrecSupport(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_battery_level(self, line):
-        """
-        Description: Sets the battery level reported by the call manager.
-
-        Input(s):
-            value: The integer battery level value. Must be 0-5 inclusive.
-
-        Usage:
-          Examples:
-            hfp_set_battery_level <value>
-            hfp_set_battery_level 0
-            hfp_set_battery_level 3
-        """
-        cmd = "Set the battery level reported by the call manager"
-        try:
-            value = int(line.strip())
-            result = self.pri_dut.sl4f.hfp_lib.setBatteryLevel(value)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_last_dialed(self, line):
-        """
-        Description: Sets the last dialed number in the call manager.
-
-        Input(s):
-            number: The number of the remote party.
-
-        Usage:
-          Examples:
-            hfp_set_last_dialed <number>
-            hfp_set_last_dialed 14085555555
-        """
-        cmd = "Sets the last dialed number in the call manager."
-        try:
-            number = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.setLastDialed(number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_clear_last_dialed(self, line):
-        """
-        Description: Clears the last dialed number in the call manager.
-
-        Usage:
-          Examples:
-            hfp_clear_last_dialed
-        """
-        cmd = "Clears the last dialed number in the call manager."
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.clearLastDialed()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_memory_location(self, line):
-        """
-        Description: Sets a memory location to point to a remote number.
-
-        Input(s):
-            location: The memory location at which to store the number.
-            number: The number of the remote party to be stored.
-
-        Usage:
-          Examples:
-            hfp_set_memory_location <location> <number>
-            hfp_set_memory_location 0 14085555555
-        """
-        cmd = "Sets a memory location to point to a remote number."
-        try:
-            info = line.strip().split()
-            if len(info) != 2:
-                raise ValueError(
-                    "Exactly two command line arguments required: <location> <number>"
-                )
-            location, number = info[0], info[1]
-            result = self.pri_dut.sl4f.hfp_lib.setMemoryLocation(
-                location, number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_clear_memory_location(self, line):
-        """
-        Description: Sets a memory location to point to a remote number.
-
-        Input(s):
-            localtion: The memory location to clear.
-
-        Usage:
-          Examples:
-            hfp_clear_memory_location <location>
-            hfp_clear_memory_location 0
-        """
-        cmd = "Sets a memory location to point to a remote number."
-        try:
-            location = line.strip()
-            result = self.pri_dut.sl4f.hfp_lib.clearMemoryLocation(location)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_dial_result(self, line):
-        """
-        Description: Sets the status result to be returned when the number is dialed.
-
-        Input(s):
-            number: The number of the remote party.
-            status: The status to be returned when an outgoing call is initiated to the number.
-
-        Usage:
-          Examples:
-            hfp_set_battery_level <value>
-        """
-        cmd = "Sets the status result to be returned when the number is dialed."
-        try:
-            info = line.strip().split()
-            if len(info) != 2:
-                raise ValueError(
-                    "Exactly two command line arguments required: <number> <status>"
-                )
-            number, status = info[0], int(info[1])
-            result = self.pri_dut.sl4f.hfp_lib.setDialResult(number, status)
-            self.log.info(pprint.pformat(result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_get_state(self, line):
-        """
-        Description: Get the call manager's complete state
-
-        Usage:
-          Examples:
-            hfp_get_state
-        """
-        cmd = "Get the call manager's state"
-        try:
-            result = self.pri_dut.sl4f.hfp_lib.getState()
-            self.log.info(pprint.pformat(result))
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_hfp_set_connection_behavior(self, line):
-        """
-        Description: Set the Service Level Connection (SLC) behavior when a new peer connects.
-
-        Input(s):
-            autoconnect: Enable/Disable autoconnection of SLC.
-
-        Usage:
-          Examples:
-            hfp_set_connection_behavior <autoconnect>
-            hfp_set_connection_behavior true
-            hfp_set_connection_behavior false
-        """
-        cmd = "Set the Service Level Connection (SLC) behavior"
-        try:
-            autoconnect = line.strip().lower() == "true"
-            result = self.pri_dut.sl4f.hfp_lib.setConnectionBehavior(
-                autoconnect)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End HFP wrappers"""
-    """Begin RFCOMM wrappers"""
-
-    def do_rfcomm_init(self, line):
-        """
-        Description: Initialize the RFCOMM component services.
-
-        Usage:
-          Examples:
-            rfcomm_init
-        """
-        cmd = "Initialize RFCOMM proxy"
-        try:
-            result = self.pri_dut.sl4f.rfcomm_lib.init()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_remove_service(self, line):
-        """
-        Description: Removes the RFCOMM service in use.
-
-        Usage:
-          Examples:
-            rfcomm_remove_service
-        """
-        cmd = "Remove RFCOMM service"
-        try:
-            result = self.pri_dut.sl4f.rfcomm_lib.removeService()
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_disconnect_session(self, line):
-        """
-        Description: Closes the RFCOMM Session.
-
-        Usage:
-          Examples:
-            rfcomm_disconnect_session
-            rfcomm_disconnect_session
-        """
-        cmd = "Disconnect the RFCOMM Session"
-        try:
-            result = self.pri_dut.sl4f.rfcomm_lib.disconnectSession(
-                self.unique_mac_addr_id)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_connect_rfcomm_channel(self, line):
-        """
-        Description: Make an outgoing RFCOMM connection.
-
-        Usage:
-          Examples:
-            rfcomm_connect_rfcomm_channel <server_channel_number>
-            rfcomm_connect_rfcomm_channel 2
-        """
-        cmd = "Make an outgoing RFCOMM connection"
-        try:
-            server_channel_number = int(line.strip())
-            result = self.pri_dut.sl4f.rfcomm_lib.connectRfcommChannel(
-                self.unique_mac_addr_id, server_channel_number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_disconnect_rfcomm_channel(self, line):
-        """
-        Description: Close the RFCOMM connection with the peer
-
-        Usage:
-          Examples:
-            rfcomm_disconnect_rfcomm_channel <server_channel_number>
-            rfcomm_disconnect_rfcomm_channel 2
-        """
-        cmd = "Close the RFCOMM channel"
-        try:
-            server_channel_number = int(line.strip())
-            result = self.pri_dut.sl4f.rfcomm_lib.disconnectRfcommChannel(
-                self.unique_mac_addr_id, server_channel_number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_send_remote_line_status(self, line):
-        """
-        Description: Send a remote line status for the RFCOMM channel.
-
-        Usage:
-          Examples:
-            rfcomm_send_remote_line_status <server_channel_number>
-            rfcomm_send_remote_line_status 2
-        """
-        cmd = "Send a remote line status update for the RFCOMM channel"
-        try:
-            server_channel_number = int(line.strip())
-            result = self.pri_dut.sl4f.rfcomm_lib.sendRemoteLineStatus(
-                self.unique_mac_addr_id, server_channel_number)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    def do_rfcomm_write_rfcomm(self, line):
-        """
-        Description: Send data over the RFCOMM channel.
-
-        Usage:
-          Examples:
-            rfcomm_write_rfcomm <server_channel_number> <data>
-            rfcomm_write_rfcomm 2 foobar
-        """
-        cmd = "Send data using the RFCOMM channel"
-        try:
-            info = line.strip().split()
-            if len(info) != 2:
-                raise ValueError(
-                    "Exactly two command line arguments required: <server_channel_number> <data>"
-                )
-            server_channel_number = int(info[0])
-            data = info[1]
-            result = self.pri_dut.sl4f.rfcomm_lib.writeRfcomm(
-                self.unique_mac_addr_id, server_channel_number, data)
-            self.log.info(result)
-        except Exception as err:
-            self.log.error(FAILURE.format(cmd, err))
-
-    """End RFCOMM wrappers"""
diff --git a/src/antlion/tests/bt/ep/BtFuchsiaEPTest.py b/src/antlion/tests/bt/ep/BtFuchsiaEPTest.py
deleted file mode 100644
index 626c259..0000000
--- a/src/antlion/tests/bt/ep/BtFuchsiaEPTest.py
+++ /dev/null
@@ -1,310 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Setup:
-This test only requires two fuchsia devices.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHError
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-from antlion.test_utils.fuchsia.bt_test_utils import bredr_scan_for_device_by_name
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-from antlion.test_utils.fuchsia.bt_test_utils import unbond_all_known_devices
-from antlion.test_utils.fuchsia.bt_test_utils import verify_device_state_by_name
-import time
-
-
-class BtFuchsiaEPTest(BaseTestClass):
-    ble_advertise_interval = 50
-    scan_timeout_seconds = 60
-    default_iterations = 1000
-    adv_name = generate_id_by_size(10)
-    test_adv_data = {
-        "name": adv_name,
-        "appearance": None,
-        "service_data": None,
-        "tx_power_level": None,
-        "service_uuids": None,
-        "manufacturer_data": None,
-        "uris": None,
-    }
-    test_connectable = True
-    test_scan_response = None
-
-    def setup_class(self):
-        super().setup_class()
-        for fd in self.fuchsia_devices:
-            fd.sl4f.bts_lib.initBluetoothSys()
-        self.pri_dut = self.fuchsia_devices[0]
-        self.sec_dut = self.fuchsia_devices[1]
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            fd.take_bug_report(test_name, begin_time)
-        self._unbond_all_known_devices()
-        self.sec_dut.sl4f.ble_lib.bleStopBleAdvertising()
-        self._kill_media_services()
-
-    def teardown_class(self):
-        self._kill_media_services()
-
-    def _kill_media_services(self):
-        """Kill any BT services related to A2DP/AVRCP on all Fuchsia devices.
-        """
-        ssh_timeout = 30
-        for fd in self.fuchsia_devices:
-            try:
-                fd.ssh.run("killall bt-a2dp*", timeout_sec=ssh_timeout)
-                fd.ssh.run("killall bt-avrcp*", timeout_sec=ssh_timeout)
-            except FuchsiaSSHError:
-                pass
-
-    def _unbond_all_known_devices(self):
-        """For all Fuchsia devices, unbond any known pairings.
-        """
-        time.sleep(5)
-        for fd in self.fuchsia_devices:
-            unbond_all_known_devices(fd, self.log)
-
-    def test_ble_awareness(self):
-        """Verify that Fuchsia devices can advertise and scan each other
-
-        Verify a Fuchsia device that starts a BLE advertisesement can be
-        found by a Fuchsia BLE scanner.
-
-        Steps:
-        1. On one Fuchsia device set an advertisement
-        2. On one Fuchsia device, scan for the advertisement by name
-
-        Expected Result:
-        Verify that there are no errors after each GATT connection.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BLE
-        Priority: 0
-        """
-
-        self.sec_dut.sl4f.ble_lib.bleStartBleAdvertising(
-            self.test_adv_data, self.test_scan_response,
-            self.ble_advertise_interval, self.test_connectable)
-
-        device = le_scan_for_device_by_name(self.pri_dut, self.log,
-                                            self.adv_name,
-                                            self.scan_timeout_seconds)
-        self.sec_dut.sl4f.ble_lib.bleStopBleAdvertising()
-        if device is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-        raise signals.TestPass("Success")
-
-    def test_gatt_central_peripheral(self):
-        """Verify that Fuchsia devices can perform GATT operations
-
-        Verify a Fuchsia devices can perform GATT connections and interactions.
-
-        Steps:
-        1. On one Fuchsia device set an advertisement
-        2. On one Fuchsia device, scan for the advertisement by name
-        3. Perform GATT connection over LE
-        4. Pair both devices.
-        5. Perform GATT read/write operations.
-        6. Perform GATT disconnection.
-
-        Expected Result:
-        Verify that there are no errors after each GATT connection.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: BLE
-        Priority: 0
-        """
-        self._unbond_all_known_devices()
-
-        source_device_name = generate_id_by_size(10)
-        self.pri_dut.sl4f.bts_lib.setName(source_device_name)
-
-        self.sec_dut.sl4f.ble_lib.bleStartBleAdvertising(
-            self.test_adv_data, self.test_scan_response,
-            self.ble_advertise_interval, self.test_connectable)
-
-        device = le_scan_for_device_by_name(self.pri_dut, self.log,
-                                            self.adv_name,
-                                            self.scan_timeout_seconds)
-        if device is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-
-        connect_result = self.pri_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-            device["id"])
-        if connect_result.get("error") is not None:
-            raise signals.TestFailure("GATT Connection failed with: {}".format(
-                connect_result.get("error")))
-
-        if not verify_device_state_by_name(self.pri_dut, self.log,
-                                           self.adv_name, "CONNECTED", None):
-            raise signals.TestFailure(
-                "Failed to connect to device {}.".format(target_device_name))
-
-        if not verify_device_state_by_name(
-                self.sec_dut, self.log, source_device_name, "CONNECTED", None):
-            raise signals.TestFailure(
-                "Failed to connect to device {}.".format(source_device_name))
-
-        security_level = "ENCRYPTED"
-        non_bondable = False
-        transport = 2  #LE
-        self.pri_dut.sl4f.bts_lib.pair(device["id"], security_level,
-                                       non_bondable, transport)
-
-        services = None
-        if not verify_device_state_by_name(self.pri_dut, self.log,
-                                           self.adv_name, "BONDED", services):
-            raise signals.TestFailure(
-                "Failed to pair device {}.".format(target_device_name))
-
-        if not verify_device_state_by_name(self.sec_dut, self.log,
-                                           source_device_name, "BONDED",
-                                           services):
-            raise signals.TestFailure(
-                "Failed to pair device {}.".format(source_device_name))
-
-        disconnect_result = self.pri_dut.sl4f.gattc_lib.bleDisconnectPeripheral(
-            device["id"])
-        if disconnect_result.get("error") is not None:
-            raise signals.TestFailure(
-                "GATT Disconnection failed with: {}".format(
-                    connect_result.get("error")))
-
-        self.sec_dut.sl4f.ble_lib.bleStopBleAdvertising()
-
-        # TODO: Setup Proper GATT server and verify services published are found
-
-        raise signals.TestPass("Success")
-
-    def test_pairing_a2dp(self):
-        """Verify that Fuchsia devices can pair to each other and establish
-            an A2DP connection
-
-            Verify that Fuchsia devices can pair to each other and establish
-            an A2DP connection
-
-            Steps:
-            1. Clear out all bonded devices
-            2. Stop any A2DP services running on the device
-                Needed to take ownership of the services
-            3. Init sink and source opposite devices
-            4. Start pairing delegate for all Fuchsia devices
-            5. Set sink device to be discoverable
-            6. Discover sink device from source device
-            7. Connect to sink device from source device
-            8. Pair to sink device
-            9. Validate paired devices and services present
-
-            Expected Result:
-            Verify devices are successfully paired and appropriate a2dp
-            services are running.
-
-            Returns:
-            signals.TestPass if no errors
-            signals.TestFailure if there are any errors during the test.
-
-            TAGS: BREDR, A2DP
-            Priority: 0
-        """
-        self._unbond_all_known_devices()
-        self._kill_media_services()
-
-        source_device_name = generate_id_by_size(10)
-        target_device_name = generate_id_by_size(10)
-
-        self.pri_dut.sl4f.bts_lib.setName(source_device_name)
-        self.sec_dut.sl4f.bts_lib.setName(target_device_name)
-
-        input_capabilities = "NONE"
-        output_capabilities = "NONE"
-
-        # Initialize a2dp on both devices.
-        self.pri_dut.sl4f.avdtp_lib.init()
-        self.sec_dut.sl4f.avdtp_lib.init()
-
-        self.pri_dut.sl4f.bts_lib.acceptPairing(input_capabilities,
-                                                output_capabilities)
-
-        self.sec_dut.sl4f.bts_lib.acceptPairing(input_capabilities,
-                                                output_capabilities)
-        self.sec_dut.sl4f.bts_lib.setDiscoverable(True)
-
-        unique_mac_addr_id = bredr_scan_for_device_by_name(
-            self.pri_dut, self.log, target_device_name,
-            self.scan_timeout_seconds)
-
-        if not unique_mac_addr_id:
-            raise signals.TestFailure(
-                "Failed to find device {}.".format(target_device_name))
-
-        connect_result = self.pri_dut.sl4f.bts_lib.connectDevice(
-            unique_mac_addr_id)
-        if connect_result.get("error") is not None:
-            raise signals.TestFailure("Failed to connect with {}.".format(
-                connect_result.get("error")))
-
-        # We pair before checking the CONNECTED status because BR/EDR semantics
-        # were recently changed such that if pairing is not confirmed, then bt
-        # does not report connected = True.
-        security_level = "NONE"
-        bondable = True
-        transport = 1  #BREDR
-        pair_result = self.pri_dut.sl4f.bts_lib.pair(unique_mac_addr_id,
-                                                     security_level, bondable,
-                                                     transport)
-        if pair_result.get("error") is not None:
-            raise signals.TestFailure("Failed to pair with {}.".format(
-                pair_result.get("error")))
-
-        if not verify_device_state_by_name(
-                self.pri_dut, self.log, target_device_name, "CONNECTED", None):
-            raise signals.TestFailure(
-                "Failed to connect to device {}.".format(target_device_name))
-
-        if not verify_device_state_by_name(
-                self.sec_dut, self.log, source_device_name, "CONNECTED", None):
-            raise signals.TestFailure(
-                "Failed to connect to device {}.".format(source_device_name))
-
-        #TODO: Validation of services and paired devices (b/175641870)
-        # A2DP sink: 0000110b-0000-1000-8000-00805f9b34fb
-        # A2DP source: 0000110a-0000-1000-8000-00805f9b34fb
-        #TODO: Make an easy function for checking/updating devices
-        services = None
-        if not verify_device_state_by_name(self.pri_dut, self.log,
-                                           target_device_name, "BONDED",
-                                           services):
-            raise signals.TestFailure(
-                "Failed to pair device {}.".format(target_device_name))
-
-        if not verify_device_state_by_name(self.sec_dut, self.log,
-                                           source_device_name, "BONDED",
-                                           services):
-            raise signals.TestFailure(
-                "Failed to pair device {}.".format(source_device_name))
-
-        raise signals.TestPass("Success")
diff --git a/src/antlion/tests/bt/gatt/GattConnectionStressTest.py b/src/antlion/tests/bt/gatt/GattConnectionStressTest.py
deleted file mode 100644
index 42e2f92..0000000
--- a/src/antlion/tests/bt/gatt/GattConnectionStressTest.py
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This is a stress test for Fuchsia GATT connections.
-
-Custom Params:
-gatt_connect_stress_test_iterations
-
-    Example:
-    "gatt_connect_stress_test_iterations": 10
-
-Setup:
-This test only requires two fuchsia devices as the purpose is to test
-the robusntess of GATT connections.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.test_decorators import test_tracker_info
-from antlion.test_utils.bt.bt_test_utils import generate_id_by_size
-from antlion.test_utils.fuchsia.bt_test_utils import le_scan_for_device_by_name
-
-
-class GattConnectionStressTest(BaseTestClass):
-    gatt_connect_err_message = "Gatt connection failed with: {}"
-    gatt_disconnect_err_message = "Gatt disconnection failed with: {}"
-    ble_advertise_interval = 50
-    scan_timeout_seconds = 60
-    default_iterations = 1000
-
-    def setup_class(self):
-        super().setup_class()
-        self.fuchsia_client_dut = self.fuchsia_devices[0]
-        self.fuchsia_server_dut = self.fuchsia_devices[1]
-        self.default_iterations = self.user_params.get(
-            "gatt_connect_stress_test_iterations", self.default_iterations)
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            fd.take_bug_report(test_name, begin_time)
-
-    def _orchestrate_single_connect_disconnect(self):
-        adv_name = generate_id_by_size(10)
-        adv_data = {
-            "name": adv_name,
-            "appearance": None,
-            "service_data": None,
-            "tx_power_level": None,
-            "service_uuids": None,
-            "manufacturer_data": None,
-            "uris": None,
-        }
-        scan_response = None
-        connectable = True
-        self.fuchsia_server_dut.sl4f.ble_lib.bleStartBleAdvertising(
-            adv_data, scan_response, self.ble_advertise_interval, connectable)
-        device = le_scan_for_device_by_name(self.fuchsia_client_dut, self.log,
-                                            adv_name,
-                                            self.scan_timeout_seconds)
-        if device is None:
-            raise signals.TestFailure("Scanner unable to find advertisement.")
-        connect_result = self.fuchsia_client_dut.sl4f.gattc_lib.bleConnectToPeripheral(
-            device["id"])
-        if connect_result.get("error") is not None:
-            raise signals.TestFailure(
-                self.gatt_connect_err_message.format(
-                    connect_result.get("error")))
-        self.log.info("Connection Successful...")
-        disconnect_result = self.fuchsia_client_dut.sl4f.gattc_lib.bleDisconnectPeripheral(
-            device["id"])
-        if disconnect_result.get("error") is not None:
-            raise signals.TestFailure(
-                self.gatt_disconnect_err_message.format(
-                    connect_result.get("error")))
-        self.log.info("Disconnection Successful...")
-        self.fuchsia_server_dut.sl4f.ble_lib.bleStopBleAdvertising()
-
-    def test_connect_reconnect_n_iterations_over_le(self):
-        """Test GATT reconnection n times.
-
-        Verify that the GATT client device can discover and connect to
-        a perpheral n times. Default value is 1000.
-
-        Steps:
-        1. Setup Ble advertisement on peripheral with unique advertisement
-            name.
-        2. GATT client scans for peripheral advertisement.
-        3. Upon find the advertisement, send a connection request to
-            peripheral.
-
-        Expected Result:
-        Verify that there are no errors after each GATT connection.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: GATT
-        Priority: 1
-        """
-        for i in range(self.default_iterations):
-            self.log.info("Starting iteration {}".format(i + 1))
-            self._orchestrate_single_connect_disconnect()
-            self.log.info("Iteration {} successful".format(i + 1))
-        raise signals.TestPass("Success")
diff --git a/src/antlion/tests/bt/gatt/GattServerSetupTest.py b/src/antlion/tests/bt/gatt/GattServerSetupTest.py
deleted file mode 100644
index 035374a..0000000
--- a/src/antlion/tests/bt/gatt/GattServerSetupTest.py
+++ /dev/null
@@ -1,890 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This test exercises basic setup of various GATT server configurations.
-
-Setup:
-This test only requires one fuchsia device as the purpose is to test
-different configurations of valid GATT services.
-"""
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-
-import gatt_server_databases as database
-
-
-class GattServerSetupTest(BaseTestClass):
-    err_message = "Setting up database failed with: {}"
-
-    def setup_class(self):
-        super().setup_class()
-        self.fuchsia_dut = self.fuchsia_devices[0]
-
-    def setup_database(self, database):
-        setup_result = self.fuchsia_dut.sl4f.gatts_lib.publishServer(database)
-        if setup_result.get("error") is None:
-            signals.TestPass(setup_result.get("result"))
-        else:
-            raise signals.TestFailure(
-                self.err_message.format(setup_result.get("error")))
-
-    def test_teardown(self):
-        self.fuchsia_dut.sl4f.gatts_lib.closeServer()
-
-    def test_single_primary_service(self):
-        """Test GATT Server Setup: Single Primary Service
-
-        Test a single primary service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.SINGLE_PRIMARY_SERVICE)
-
-    def test_single_secondary_service(self):
-        """Test GATT Server Setup: Single Secondary Service
-
-        Test a single secondary service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.SINGLE_SECONDARY_SERVICE)
-
-    def test_primary_and_secondary_service(self):
-        """Test GATT Server Setup: Primary and secondary service
-
-        Test primary and secondary service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.PRIMARY_AND_SECONDARY_SERVICES)
-
-    def test_duplicate_services(self):
-        """Test GATT Server Setup: Duplicate service uuids
-
-        Test duplicate service uuids as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.DUPLICATE_SERVICES)
-
-    ### Begin SIG defined services ###
-
-    def test_alert_notification_service(self):
-        """Test GATT Server Setup: Alert Notification Service
-
-        Test Alert Notification Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.ALERT_NOTIFICATION_SERVICE)
-
-    def test_automation_io_service(self):
-        """Test GATT Server Setup: Automation IO
-
-        Test Automation IO as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.AUTOMATION_IO_SERVICE)
-
-    def test_battery_service(self):
-        """Test GATT Server Setup: Battery Service
-
-        Test Battery Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.BATTERY_SERVICE)
-
-    def test_blood_pressure_service(self):
-        """Test GATT Server Setup: Blood Pressure
-
-        Test Blood Pressure as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.BLOOD_PRESSURE_SERVICE)
-
-    def test_body_composition_service(self):
-        """Test GATT Server Setup: Body Composition
-
-        Test Body Composition as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.BODY_COMPOSITION_SERVICE)
-
-    def test_bond_management_service(self):
-        """Test GATT Server Setup: Bond Management Service
-
-        Test Bond Management Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.BOND_MANAGEMENT_SERVICE)
-
-    def test_continuous_glucose_monitoring_service(self):
-        """Test GATT Server Setup: Continuous Glucose Monitoring
-
-        Test Continuous Glucose Monitoring as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.CONTINUOUS_GLUCOSE_MONITORING_SERVICE)
-
-    def test_current_time_service(self):
-        """Test GATT Server Setup: Current Time Service
-
-        Test Current Time Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.CURRENT_TIME_SERVICE)
-
-    def test_cycling_power_service(self):
-        """Test GATT Server Setup: Cycling Power
-
-        Test Cycling Power as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.CYCLING_POWER_SERVICE)
-
-    def test_cycling_speed_and_cadence_service(self):
-        """Test GATT Server Setup: Cycling Speed and Cadence
-
-        Test Cycling Speed and Cadence as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.CYCLING_SPEED_AND_CADENCE_SERVICE)
-
-    def test_device_information_service(self):
-        """Test GATT Server Setup: Device Information
-
-        Test Device Information as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.DEVICE_INFORMATION_SERVICE)
-
-    def test_environmental_sensing_service(self):
-        """Test GATT Server Setup: Environmental Sensing
-
-        Test Environmental Sensing as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.ENVIRONMENTAL_SENSING_SERVICE)
-
-    def test_fitness_machine_service(self):
-        """Test GATT Server Setup: Fitness Machine
-
-        Test Fitness Machine as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.FITNESS_MACHINE_SERVICE)
-
-    def test_glucose_service(self):
-        """Test GATT Server Setup: Glucose
-
-        Test Glucose as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.GLUCOSE_SERVICE)
-
-    def test_health_thermometer_service(self):
-        """Test GATT Server Setup: Health Thermometer
-
-        Test Health Thermometer as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.HEALTH_THERMOMETER_SERVICE)
-
-    def test_heart_rate_service(self):
-        """Test GATT Server Setup: Heart Rate
-
-        Test Heart Rate as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.HEART_RATE_SERVICE)
-
-    def test_http_proxy_service(self):
-        """Test GATT Server Setup: HTTP Proxy
-
-        Test HTTP Proxy as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.HTTP_PROXY_SERVICE)
-
-    def test_human_interface_device_service(self):
-        """Test GATT Server Setup: Human Interface Device
-
-    Test Human Interface Device as a GATT server input.
-
-    Steps:
-    1. Setup input database
-
-    Expected Result:
-    Verify that there are no errors after setting up the input database.
-
-    Returns:
-      signals.TestPass if no errors
-      signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.HUMAN_INTERFACE_DEVICE_SERVICE)
-
-    def test_immediate_alert_service(self):
-        """Test GATT Server Setup: Immediate Alert
-
-        Test Immediate Alert as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.IMMEDIATE_ALERT_SERVICE)
-
-    def test_indoor_positioning_service(self):
-        """Test GATT Server Setup: Indoor Positioning
-
-        Test Indoor Positioning as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.INDOOR_POSITIONING_SERVICE)
-
-    def test_insulin_delivery_service(self):
-        """Test GATT Server Setup: Insulin Delivery
-
-        Test Insulin Delivery as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.INSULIN_DELIVERY_SERVICE)
-
-    def test_internet_protocol_support_service(self):
-        """Test GATT Server Setup: Internet Protocol Support Service
-
-        Test Internet Protocol Support Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.INTERNET_PROTOCOL_SUPPORT_SERVICE)
-
-    def test_link_loss_service(self):
-        """Test GATT Server Setup: Link Loss
-
-        Test Link Loss as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.LINK_LOSS_SERVICE)
-
-    def test_location_and_navigation_service(self):
-        """Test GATT Server Setup: Location and Navigation
-
-        Test Location and Navigation as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.LOCATION_AND_NAVIGATION_SERVICE)
-
-    def test_mesh_provisioning_service(self):
-        """Test GATT Server Setup: Mesh Provisioning Service
-
-        Test Mesh Provisioning Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.MESH_PROVISIONING_SERVICE)
-
-    def test_mesh_proxy_service(self):
-        """Test GATT Server Setup: Mesh Proxy Service
-
-        Test Mesh Proxy Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.MESH_PROXY_SERVICE)
-
-    def test_next_dst_change_service(self):
-        """Test GATT Server Setup: Next DST Change Service
-
-        Test Next DST Change Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.NEXT_DST_CHANGE_SERVICE)
-
-    def test_object_transfer_service(self):
-        """Test GATT Server Setup: Object Transfer Service
-
-        Test Object Transfer Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.OBJECT_TRANSFER_SERVICE)
-
-    def test_phone_alert_status_service(self):
-        """Test GATT Server Setup: Phone Alert Status Service
-
-        Test Phone Alert Status Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.PHONE_ALERT_STATUS_SERVICE)
-
-    def test_pulse_oximeter_service(self):
-        """Test GATT Server Setup: Pulse Oximeter Service
-
-        Test Pulse Oximeter Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.PULSE_OXIMETER_SERVICE)
-
-    def test_reconnection_configuration_service(self):
-        """Test GATT Server Setup: Reconnection Configuration
-
-        Test Reconnection Configuration as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.RECONNECTION_CONFIGURATION_SERVICE)
-
-    def test_reference_time_update_service(self):
-        """Test GATT Server Setup: Reference Time Update Service
-
-        Test Reference Time Update Service as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.REFERENCE_TIME_UPDATE_SERVICE)
-
-    def test_running_speed_and_cadence_service(self):
-        """Test GATT Server Setup: Running Speed and Cadence
-
-        Test Running Speed and Cadence as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.RUNNING_SPEED_AND_CADENCE_SERVICE)
-
-    def test_scan_parameters_service(self):
-        """Test GATT Server Setup: Scan Parameters
-
-        Test Scan Parameters as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.SCAN_PARAMETERS_SERVICE)
-
-    def test_transport_discovery_service(self):
-        """Test GATT Server Setup: Transport Discovery
-
-        Test Transport Discovery as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.TRANSPORT_DISCOVERY_SERVICE)
-
-    def test_tx_power_service(self):
-        """Test GATT Server Setup: Tx Power
-
-        Test Tx Power as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.TX_POWER_SERVICE)
-
-    def test_user_data_service(self):
-        """Test GATT Server Setup: User Data
-
-        Test User Data as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.USER_DATA_SERVICE)
-
-    def test_weight_scale_service(self):
-        """Test GATT Server Setup: Weight Scale
-
-        Test Weight Scale as a GATT server input.
-
-        Steps:
-        1. Setup input database
-
-        Expected Result:
-        Verify that there are no errors after setting up the input database.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during setup
-
-        TAGS: GATT
-        Priority: 1
-        """
-        self.setup_database(database.WEIGHT_SCALE_SERVICE)
-
-    ### End SIG defined services ###
diff --git a/src/antlion/tests/bt/gatt/gatt_server_databases.py b/src/antlion/tests/bt/gatt/gatt_server_databases.py
deleted file mode 100644
index ecdf40d..0000000
--- a/src/antlion/tests/bt/gatt/gatt_server_databases.py
+++ /dev/null
@@ -1,2491 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-GATT server dictionaries which will be setup in various tests.
-"""
-
-from antlion.test_utils.bt.bt_constants import gatt_characteristic
-from antlion.test_utils.bt.bt_constants import gatt_descriptor
-from antlion.test_utils.bt.bt_constants import gatt_service_types
-from antlion.test_utils.bt.bt_constants import gatt_char_types
-from antlion.test_utils.bt.bt_constants import gatt_characteristic_value_format
-from antlion.test_utils.bt.bt_constants import gatt_char_desc_uuids
-
-SINGLE_PRIMARY_SERVICE = {
-    'services': [{
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }]
-}
-
-SINGLE_SECONDARY_SERVICE = {
-    'services': [{
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['secondary'],
-    }]
-}
-
-PRIMARY_AND_SECONDARY_SERVICES = {
-    'services': [{
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }, {
-        'uuid': '00001803-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['secondary'],
-    }]
-}
-
-DUPLICATE_SERVICES = {
-    'services': [{
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }, {
-        'uuid': '00001802-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }]
-}
-
-### Begin SIG defined services ###
-# yapf: disable
-
-# TODO: Reconcile all the proper security parameters of each service.
-# Some are correct, others are not.
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.alert_notification.xml
-ALERT_NOTIFICATION_SERVICE = {
-    'services': [{
-        'uuid': '00001811-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a47-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a46-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a48-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }, {
-            'uuid': '00002a45-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a44-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.automation_io.xml
-AUTOMATION_IO_SERVICE = {
-    'services': [{
-        'uuid': '00001815-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a56-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': '00002904-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_ext_props']
-            }, {
-                'uuid': '0000290a-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290e-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '00002909-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }, {
-            'uuid': '00002a58-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'] |
-            gatt_characteristic['write_type_signed'] |
-            gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': '00002904-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_ext_props']
-            }, {
-                'uuid': '0000290a-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290e-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '00002909-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-            }, {
-                'uuid': '00002906-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }, {
-            'uuid': '00002a5a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.battery_service.xml
-BATTERY_SERVICE = {
-    'services': [{
-        'uuid': '0000180f-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a19-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': '00002904-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.blood_pressure.xml
-BLOOD_PRESSURE_SERVICE = {
-    'services': [{
-        'uuid': '00001810-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a35-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a36-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }],
-        }, {
-            'uuid': '00002a49-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.body_composition.xml
-BODY_COMPOSITION_SERVICE = {
-    'services': [{
-        'uuid': '0000181b-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a9b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a9c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }],
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.bond_management.xml
-BOND_MANAGEMENT_SERVICE = {
-    'services': [{
-        'uuid': '0000181e-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002aac-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002aa4-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }
-
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.continuous_glucose_monitoring.xml
-CONTINUOUS_GLUCOSE_MONITORING_SERVICE = {
-    'services': [{
-        'uuid': '0000180f-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002aa7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aa7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002aa8-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002aa9-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002aaa-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aab-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a52-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aac-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.current_time.xml
-CURRENT_TIME_SERVICE = {
-    'services': [{
-        'uuid': '00001805-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a2b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a0f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }],
-        }, {
-            'uuid': '00002a14-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.cycling_power.xml
-CYCLING_POWER_SERVICE = {
-    'services': [{
-        'uuid': '00001818-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a63-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            }, {
-                'uuid': gatt_char_desc_uuids['server_char_cfg'],
-            }]
-        }, {
-            'uuid': '00002a65-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a5d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a64-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a66-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.cycling_speed_and_cadence.xml
-CYCLING_SPEED_AND_CADENCE_SERVICE = {
-    'services': [{
-        'uuid': '00001816-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a5b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            }]
-        }, {
-            'uuid': '00002a5c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a5d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a55-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.device_information.xml
-DEVICE_INFORMATION_SERVICE = {
-    'services': [{
-        'uuid': '00001816-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a29-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a24-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a25-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a27-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a26-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a28-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a23-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a2a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a50-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.environmental_sensing.xml
-ENVIRONMENTAL_SENSING_SERVICE = {
-    'services': [{
-        'uuid': '0000181a-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a7d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_extended_props'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a73-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        }, {
-            'uuid': '00002a72-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        }, {
-            'uuid': '00002a7b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a6c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a74-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a7a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a6f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a77-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a75-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a78-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a6d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a6e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a71-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a76-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a79-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002aa3-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002a2c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002aa0-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-        {
-            'uuid': '00002aa1-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_extended_props'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': '0000290c-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290d-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': '0000290b-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_user_desc'],
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            },
-            ]
-        },
-
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.fitness_machine.xml
-FITNESS_MACHINE_SERVICE = {
-    'services': [{
-        'uuid': '00001826-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002acc-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002acd-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ace-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002acf-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad0-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad1-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad2-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad3-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ad4-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002ad5-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002ad6-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002ad8-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002ad7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test'
-        }, {
-            'uuid': '00002ad9-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ada-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-
-
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.glucose.xml
-GLUCOSE_SERVICE = {
-    'services': [{
-        'uuid': '00001808-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a18-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            }]
-        }, {
-            'uuid': '00002a34-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a51-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a52-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.health_thermometer.xml
-HEALTH_THERMOMETER_SERVICE = {
-    'services': [{
-        'uuid': '00001809-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a1c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg'],
-            }]
-        }, {
-            'uuid': '00002a1d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a1e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a21-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': gatt_char_desc_uuids['char_valid_range'],
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.heart_rate.xml
-HEART_RATE_SERVICE = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a37-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a38-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-        }, {
-            'uuid': '00002a39-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.http_proxy.xml
-HTTP_PROXY_SERVICE = {
-    'services': [{
-        'uuid': '00001823-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002ab6-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['property_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002ab7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-        }, {
-            'uuid': '00002ab9-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 8,
-        }, {
-            'uuid': '00002aba-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 8,
-        }, {
-            'uuid': '00002ab8-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 2,
-        }, {
-            'uuid': '00002abb-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.human_interface_device.xml
-HUMAN_INTERFACE_DEVICE_SERVICE = {
-    'services': [{
-        'uuid': '00001812-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a4e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a4d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }, {
-                'uuid': '00002908-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'] |
-                gatt_descriptor['permission_write'],
-            }]
-        }, {
-            'uuid': '00002a4b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': '00002907-0000-1000-8000-00805f9b34fb',
-                'permissions': gatt_descriptor['permission_read'],
-            }]
-        }, {
-            'uuid': '00002a22-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        },{
-            'uuid': '00002a32-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_write'] |
-            gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }, {
-            'uuid': '00002a33-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a4a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }, {
-            'uuid': '00002a4c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.immediate_alert.xml
-IMMEDIATE_ALERT_SERVICE = {
-    'services': [{
-        'uuid': '0000180d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a06-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.indoor_positioning.xml
-INDOOR_POSITIONING_SERVICE = {
-    'services': [{
-        'uuid': '00001821-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a06-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a38-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aad-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aae-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aaf-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab0-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab1-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab2-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab3-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab4-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }, {
-            'uuid': '00002ab5-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'] |
-            gatt_characteristic['property_read'] | gatt_characteristic['property_broadcast'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write_signed_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['server_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.insulin_delivery.xml
-INSULIN_DELIVERY_SERVICE = {
-    'services': [{
-        'uuid': '0000183a-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002b20-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b21-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b22-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b23-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1
-        }, {
-            'uuid': '00002b24-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b25-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b26-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b27-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b28-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.internet_protocol_support.xml
-INTERNET_PROTOCOL_SUPPORT_SERVICE = {
-    'services': [{
-        'uuid': '00001820-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.link_loss.xml
-LINK_LOSS_SERVICE = {
-    'services': [{
-        'uuid': '00001803-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a06-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'] |
-            gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.location_and_navigation.xml
-LOCATION_AND_NAVIGATION_SERVICE = {
-    'services': [{
-        'uuid': '00001819-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a6a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a67-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'body',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a69-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-        }, {
-            'uuid': '00002a6b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a68-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.mesh_provisioning.xml
-MESH_PROVISIONING_SERVICE = {
-    'services': [{
-        'uuid': '00001827-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002adb-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002adc-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.mesh_proxy.xml
-MESH_PROXY_SERVICE = {
-    'services': [{
-        'uuid': '00001828-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002add-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002ade-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.next_dst_change.xml
-NEXT_DST_CHANGE_SERVICE = {
-    'services': [{
-        'uuid': '00001807-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a11-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint32'],
-            'value': 1549903904,
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.object_transfer.xml
-OBJECT_TRANSFER_SERVICE = {
-    'services': [{
-        'uuid': '00001825-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002abd-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002abe-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002abf-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac0-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac1-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac2-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac3-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac4-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac5-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac6-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac7-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_read'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'] |
-            gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }, {
-            'uuid': '00002ac8-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 0,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.phone_alert_status.xml
-PHONE_ALERT_STATUS_SERVICE = {
-    'services': [{
-        'uuid': '0000180e-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a3f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a41-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a40-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        },
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.pulse_oximeter.xml
-PULSE_OXIMETER_SERVICE = {
-    'services': [{
-        'uuid': '00001822-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a5e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a5f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a60-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }, {
-            'uuid': '00002a52-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.reconnection_configuration.xml
-RECONNECTION_CONFIGURATION_SERVICE = {
-    'services': [{
-        'uuid': '00001829-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002b1d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002b1e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'] |
-            gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002b1f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.reference_time_update.xml
-REFERENCE_TIME_UPDATE_SERVICE = {
-    'services': [{
-        'uuid': '00001806-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a16-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-        }, {
-            'uuid': '00002a17-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.running_speed_and_cadence.xml
-RUNNING_SPEED_AND_CADENCE_SERVICE = {
-    'services': [{
-        'uuid': '00001814-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a53-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['string'],
-            'value': 'test',
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a54-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }, {
-            'uuid': '00002a5d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }, {
-            'uuid': '00002a55-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write'] |
-            gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.scan_parameters.xml
-SCAN_PARAMETERS_SERVICE = {
-    'services': [{
-        'uuid': '00001813-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a4f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_write_no_response'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-        }, {
-            'uuid': '00002a31-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_notify'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.transport_discovery.xml
-TRANSPORT_DISCOVERY_SERVICE = {
-    'services': [{
-        'uuid': '00001824-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002abc-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.tx_power.xml
-TX_POWER_SERVICE = {
-    'services': [{
-        'uuid': '00001804-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a07-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['uint8'],
-            'value': -24,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.user_data.xml
-USER_DATA_SERVICE = {
-    'services': [{
-        'uuid': '0000181c-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a8a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a90-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a87-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a80-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a85-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a8c-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a98-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a8e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a96-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a92-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a91-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a7f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a83-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a93-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a86-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a97-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a8f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a88-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a89-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a7e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a84-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a81-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a82-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a8b-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a94-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a95-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a99-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'] |
-            gatt_characteristic['property_notify'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002a9a-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }, {
-            'uuid': '00002a9f-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }, {
-            'uuid': '00002aa2-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'] |
-            gatt_characteristic['property_write'],
-            'permissions': gatt_characteristic['permission_read_encrypted_mitm'] |
-            gatt_characteristic['permission_write_encrypted_mitm'],
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 10,
-        }]
-    }]
-}
-
-# https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.weight_scale.xml
-WEIGHT_SCALE_SERVICE = {
-    'services': [{
-        'uuid': '0000181d-0000-1000-8000-00805f9b34fb',
-        'type': gatt_service_types['primary'],
-        'characteristics': [{
-            'uuid': '00002a9e-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_read'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 1
-        }, {
-            'uuid': '00002a9d-0000-1000-8000-00805f9b34fb',
-            'properties': gatt_characteristic['property_indicate'],
-            'permissions': 0x0,
-            'value_type': gatt_characteristic_value_format['sint8'],
-            'value': 100,
-            'descriptors': [{
-                'uuid': gatt_char_desc_uuids['client_char_cfg']
-            }]
-        }
-        ]
-    }]
-}
-
-
-# yapf: enable
-### End SIG defined services ###
diff --git a/src/antlion/tests/bt/pts/GATT_PTS_INSTRUCTIONS b/src/antlion/tests/bt/pts/GATT_PTS_INSTRUCTIONS
deleted file mode 100644
index fcfccf4..0000000
--- a/src/antlion/tests/bt/pts/GATT_PTS_INSTRUCTIONS
+++ /dev/null
@@ -1,198 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-GATT
-=================================================================
-Note: Bug in PTS forces GATT operations to be over BR/EDR. To run tests over LE disable BR/EDR in ICS when running tests (ICS Name TSCP_GATT_2_1). To Run over BR/EDR re-enable the same ICS value.
-
-Note: While using ACTS cmd line tools, if there is ever an issue with connecting to PTS make sure the
-unique ID is properly set by running these commands:
-     tool_set_target_device_name PTS
-     tool_refresh_unique_id
-
-Cmd Line Tools in use:
-    ACTS:
-        FuchsiaCmdLineTest
-    Fuchsia CLI:
-        ...
-
-GATT/CL/GAC/BV-01-C
-    TBD
-
-GATT/CL/GAD/BV-01-C
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_list_services
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-02-C
-    Bug: BT-764
-
-GATT/CL/GAD/BV-03-C
-    Note: Bug BT-764 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify confirmation
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    [Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-04-C
-    Note: Bug BT-764 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify confirmation
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    [Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-05-C
-    Note: Bug BT-764 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify confirmation
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    [Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_chars
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-06-C
-    Note: Bug BT-764 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_real_all_desc
-    [PTS Interaction] Verify confirmation
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_desc
-    [PTS Interaction] Verify values
-    [Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_desc
-    [PTS Interaction] Verify values
-    gattc_disconnect
-    gattc_connect
-    gattc_real_all_desc
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
-GATT/CL/GAD/BV-07-C
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-
-GATT/CL/GAD/BV-08-C
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-    [PTS Interaction] Verify values
-
-GATTT/CL/GAR/BV-01-C
-    Note: Bug BT-451 would simplify this testcase.
-    Note: If device is already paired, pairing tool instructions are not needed.
-    Fuchsia cmd-line-tool: bt-pairing-tool
-    gattc_connect
-    gattc_read_all_chars
-    Fuchsia interaction] Type 'y' on the bt-pairing-tool
-    [PTS Interaction] Enter pin from bt-pairing-tool to PTS
-    [PTS Interaction] Verify values
-    gattc_disconnect
-
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropTest.py b/src/antlion/tests/dhcp/Dhcpv4InteropTest.py
deleted file mode 100644
index 89aef29..0000000
--- a/src/antlion/tests/dhcp/Dhcpv4InteropTest.py
+++ /dev/null
@@ -1,515 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import random
-import time
-import re
-
-from antlion import asserts
-from antlion import utils
-from antlion.controllers.access_point import setup_ap, AccessPoint
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.utils_lib.commands import ip
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-
-class Dhcpv4InteropFixture(WifiBaseTest):
-    """Test helpers for validating DHCPv4 Interop
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-    access_point: AccessPoint
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.access_point = self.access_points[0]
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.access_point.stop_all_aps()
-
-    def connect(self, ap_params):
-        asserts.assert_true(
-            self.dut.associate(ap_params['ssid'],
-                               target_pwd=ap_params['password'],
-                               target_security=ap_params['target_security']),
-            'Failed to connect.')
-
-    def setup_ap(self):
-        """Generates a hostapd config and sets up the AP with that config.
-        Does not run a DHCP server.
-
-        Returns: A dictionary of information about the AP.
-        """
-        ssid = utils.rand_ascii_str(20)
-        security_mode = hostapd_constants.WPA2_STRING
-        security_profile = Security(
-            security_mode=security_mode,
-            password=generate_random_password(length=20),
-            wpa_cipher='CCMP',
-            wpa2_cipher='CCMP')
-        password = security_profile.password
-        target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-            security_mode)
-
-        ap_ids = setup_ap(access_point=self.access_point,
-                          profile_name='whirlwind',
-                          mode=hostapd_constants.MODE_11N_MIXED,
-                          channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                          n_capabilities=[],
-                          ac_capabilities=[],
-                          force_wmm=True,
-                          ssid=ssid,
-                          security=security_profile,
-                          password=password)
-
-        if len(ap_ids) > 1:
-            raise Exception("Expected only one SSID on AP")
-
-        configured_subnets = self.access_point.get_configured_subnets()
-        if len(configured_subnets) > 1:
-            raise Exception("Expected only one subnet on AP")
-        router_ip = configured_subnets[0].router
-        network = configured_subnets[0].network
-
-        self.access_point.stop_dhcp()
-
-        return {
-            'ssid': ssid,
-            'password': password,
-            'target_security': target_security,
-            'ip': router_ip,
-            'network': network,
-            'id': ap_ids[0],
-        }
-
-    def device_can_ping(self, dest_ip):
-        """Checks if the DUT can ping the given address.
-
-        Returns: True if can ping, False otherwise"""
-        self.log.info('Attempting to ping %s...' % dest_ip)
-        ping_result = self.dut.can_ping(dest_ip, count=2)
-        if ping_result:
-            self.log.info('Success pinging: %s' % dest_ip)
-        else:
-            self.log.info('Failure pinging: %s' % dest_ip)
-        return ping_result
-
-    def get_device_ipv4_addr(self, interface=None, timeout=20):
-        """Checks if device has an ipv4 private address. Sleeps 1 second between
-        retries.
-
-        Args:
-            interface: string, name of interface from which to get ipv4 address.
-
-        Raises:
-            ConnectionError, if DUT does not have an ipv4 address after all
-            timeout.
-
-        Returns:
-            The device's IP address
-
-        """
-        self.log.debug('Fetching updated WLAN interface list')
-        if interface is None:
-            interface = self.dut.device.wlan_client_test_interface_name
-        self.log.info(
-            'Checking if DUT has received an ipv4 addr on iface %s. Will retry for %s '
-            'seconds.' % (interface, timeout))
-        timeout = time.time() + timeout
-        while time.time() < timeout:
-            ip_addrs = self.dut.device.get_interface_ip_addresses(interface)
-
-            if len(ip_addrs['ipv4_private']) > 0:
-                ip = ip_addrs['ipv4_private'][0]
-                self.log.info('DUT has an ipv4 address: %s' % ip)
-                return ip
-            else:
-                self.log.debug(
-                    'DUT does not yet have an ipv4 address...retrying in 1 '
-                    'second.')
-                time.sleep(1)
-        else:
-            raise ConnectionError('DUT failed to get an ipv4 address.')
-
-    def run_test_case_expect_dhcp_success(self, settings):
-        """Starts the AP and DHCP server, and validates that the client
-        connects and obtains an address.
-
-        Args:
-            settings: a dictionary containing:
-                dhcp_parameters: a dictionary of DHCP parameters
-                dhcp_options: a dictionary of DHCP options
-        """
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params['network'],
-            router=ap_params['ip'],
-            additional_parameters=settings['dhcp_parameters'],
-            additional_options=settings['dhcp_options'])
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-
-        self.log.debug('DHCP Configuration:\n' +
-                       dhcp_conf.render_config_file() + "\n")
-
-        dhcp_logs_before = self.access_point.get_dhcp_logs().split('\n')
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-        self.connect(ap_params=ap_params)
-
-        # Typical log lines look like:
-        # dhcpd[26695]: DHCPDISCOVER from f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPOFFER on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPREQUEST for 192.168.9.2 (192.168.9.1) from f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
-
-        try:
-            ip = self.get_device_ipv4_addr()
-        except ConnectionError:
-            self.log.warn(dhcp_logs)
-            asserts.fail(f'DUT failed to get an IP address')
-
-        # Get updates to DHCP logs
-        dhcp_logs = self.access_point.get_dhcp_logs()
-        for line in dhcp_logs_before:
-            dhcp_logs = dhcp_logs.replace(line, '')
-
-        expected_string = f'DHCPDISCOVER from'
-        asserts.assert_equal(
-            dhcp_logs.count(expected_string), 1,
-            f'Incorrect count of DHCP Discovers ("{expected_string}") in logs:\n'
-            + dhcp_logs + "\n")
-
-        expected_string = f'DHCPOFFER on {ip}'
-        asserts.assert_equal(
-            dhcp_logs.count(expected_string), 1,
-            f'Incorrect count of DHCP Offers ("{expected_string}") in logs:\n'
-            + dhcp_logs + "\n")
-
-        expected_string = f'DHCPREQUEST for {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
-            + dhcp_logs + "\n")
-
-        expected_string = f'DHCPACK on {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: ' +
-            dhcp_logs + "\n")
-
-        asserts.assert_true(self.device_can_ping(ap_params['ip']),
-                            f'DUT failed to ping router at {ap_params["ip"]}')
-
-
-class Dhcpv4InteropFixtureTest(Dhcpv4InteropFixture):
-    """Tests which validate the behavior of the Dhcpv4InteropFixture.
-
-    In theory, these are more similar to unit tests than ACTS tests, but
-    since they interact with hardware (specifically, the AP), we have to
-    write and run them like the rest of the ACTS tests."""
-
-    def test_invalid_options_not_accepted(self):
-        """Ensures the DHCP server doesn't accept invalid options"""
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(subnet=ap_params['network'],
-                                         router=ap_params['ip'],
-                                         additional_options={'foo': 'bar'})
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-        with asserts.assert_raises_regex(Exception, r'failed to start'):
-            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
-    def test_invalid_parameters_not_accepted(self):
-        """Ensures the DHCP server doesn't accept invalid parameters"""
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(subnet=ap_params['network'],
-                                         router=ap_params['ip'],
-                                         additional_parameters={'foo': 'bar'})
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-        with asserts.assert_raises_regex(Exception, r'failed to start'):
-            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
-    def test_no_dhcp_server_started(self):
-        """Validates that the test fixture does not start a DHCP server."""
-        ap_params = self.setup_ap()
-        self.connect(ap_params=ap_params)
-        with asserts.assert_raises(ConnectionError):
-            self.get_device_ipv4_addr()
-
-
-class Dhcpv4InteropBasicTest(Dhcpv4InteropFixture):
-    """DhcpV4 tests which validate basic DHCP client/server interactions."""
-
-    def test_basic_dhcp_assignment(self):
-        self.run_test_case_expect_dhcp_success(settings={
-            'dhcp_options': {},
-            'dhcp_parameters': {}
-        })
-
-    def test_pool_allows_unknown_clients(self):
-        self.run_test_case_expect_dhcp_success(settings={
-            'dhcp_options': {},
-            'dhcp_parameters': {
-                'allow': 'unknown-clients'
-            }
-        })
-
-    def test_pool_disallows_unknown_clients(self):
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params['network'],
-            router=ap_params['ip'],
-            additional_parameters={'deny': 'unknown-clients'})
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
-        self.connect(ap_params=ap_params)
-        with asserts.assert_raises(ConnectionError):
-            self.get_device_ipv4_addr()
-
-        dhcp_logs = self.access_point.get_dhcp_logs()
-        asserts.assert_true(
-            re.search(r'DHCPDISCOVER from .*no free leases', dhcp_logs),
-            "Did not find expected message in dhcp logs: " + dhcp_logs + "\n")
-
-    def test_lease_renewal(self):
-        """Validates that a client renews their DHCP lease."""
-        LEASE_TIME = 30
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(subnet=ap_params['network'],
-                                         router=ap_params['ip'])
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf],
-                                           default_lease_time=LEASE_TIME,
-                                           max_lease_time=LEASE_TIME)
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-        self.connect(ap_params=ap_params)
-        ip = self.get_device_ipv4_addr()
-
-        dhcp_logs_before = self.access_point.get_dhcp_logs()
-        SLEEP_TIME = LEASE_TIME + 3
-        self.log.info(f'Sleeping {SLEEP_TIME}s to await DHCP renewal')
-        time.sleep(SLEEP_TIME)
-
-        dhcp_logs_after = self.access_point.get_dhcp_logs()
-        dhcp_logs = dhcp_logs_after.replace(dhcp_logs_before, '')
-        # Fuchsia renews at LEASE_TIME / 2, so there should be at least 2 DHCPREQUESTs in logs.
-        # The log lines look like:
-        # INFO dhcpd[17385]: DHCPREQUEST for 192.168.9.2 from f8:0f:f9:3d:ce:d1 via wlan1
-        # INFO dhcpd[17385]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
-        expected_string = f'DHCPREQUEST for {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 2,
-            f'Not enough DHCP renewals ("{expected_string}") in logs: ' +
-            dhcp_logs + "\n")
-
-
-class Dhcpv4DuplicateAddressTest(Dhcpv4InteropFixture):
-
-    def setup_test(self):
-        super().setup_test()
-        self.extra_addresses = []
-        self.ap_params = self.setup_ap()
-        self.ap_ip_cmd = ip.LinuxIpCommand(self.access_point.ssh)
-
-    def teardown_test(self):
-        super().teardown_test()
-        for ip in self.extra_addresses:
-            self.ap_ip_cmd.remove_ipv4_address(self.ap_params['id'], ip)
-
-    def test_duplicate_address_assignment(self):
-        """It's possible for a DHCP server to assign an address that already exists on the network.
-        DHCP clients are expected to perform a "gratuitous ARP" of the to-be-assigned address, and
-        refuse to assign that address. Clients should also recover by asking for a different
-        address.
-        """
-        # Modify subnet to hold fewer addresses.
-        # A '/29' has 8 addresses (6 usable excluding router / broadcast)
-        subnet = next(self.ap_params['network'].subnets(new_prefix=29))
-        subnet_conf = dhcp_config.Subnet(
-            subnet=subnet,
-            router=self.ap_params['ip'],
-            # When the DHCP server is considering dynamically allocating an IP address to a client,
-            # it first sends an ICMP Echo request (a ping) to the address being assigned. It waits
-            # for a second, and if no ICMP Echo response has been heard, it assigns the address.
-            # If a response is heard, the lease is abandoned, and the server does not respond to
-            # the client.
-            # The ping-check configuration parameter can be used to control checking - if its value
-            # is false, no ping check is done.
-            additional_parameters={'ping-check': 'false'})
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-
-        # Add each of the usable IPs as an alias for the router's interface, such that the router
-        # will respond to any pings on it.
-        for ip in subnet.hosts():
-            self.ap_ip_cmd.add_ipv4_address(self.ap_params['id'], ip)
-            # Ensure we remove the address in self.teardown_test() even if the test fails
-            self.extra_addresses.append(ip)
-
-        self.connect(ap_params=self.ap_params)
-        with asserts.assert_raises(ConnectionError):
-            self.get_device_ipv4_addr()
-
-        # Per spec, the flow should be:
-        # Discover -> Offer -> Request -> Ack -> client optionally performs DAD
-        dhcp_logs = self.access_point.get_dhcp_logs()
-        for expected_message in [
-                r'DHCPDISCOVER from \S+',
-                r'DHCPOFFER on [0-9.]+ to \S+',
-                r'DHCPREQUEST for [0-9.]+',
-                r'DHCPACK on [0-9.]+',
-                r'DHCPDECLINE of [0-9.]+ from \S+ via .*: abandoned',
-                r'Abandoning IP address [0-9.]+: declined',
-        ]:
-            asserts.assert_true(
-                re.search(expected_message, dhcp_logs),
-                f'Did not find expected message ({expected_message}) in dhcp logs: {dhcp_logs}'
-                + "\n")
-
-        # Remove each of the IP aliases.
-        # Note: this also removes the router's address (e.g. 192.168.1.1), so pinging the
-        # router after this will not work.
-        while self.extra_addresses:
-            self.ap_ip_cmd.remove_ipv4_address(self.ap_params['id'],
-                                               self.extra_addresses.pop())
-
-        # Now, we should get an address successfully
-        ip = self.get_device_ipv4_addr()
-        dhcp_logs = self.access_point.get_dhcp_logs()
-
-        expected_string = f'DHCPREQUEST for {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
-            + dhcp_logs + "\n")
-
-        expected_string = f'DHCPACK on {ip}'
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: ' +
-            dhcp_logs + "\n")
-
-
-class Dhcpv4InteropCombinatorialOptionsTest(Dhcpv4InteropFixture):
-    """DhcpV4 tests which validate combinations of DHCP options."""
-    OPT_NUM_DOMAIN_SEARCH = 119
-    OPT_NUM_DOMAIN_NAME = 15
-
-    def setup_generated_tests(self):
-        self._generate_dhcp_options()
-
-        test_args = []
-        for test in self.DHCP_OPTIONS:
-            for option_list in self.DHCP_OPTIONS[test]:
-                test_args.append(({
-                    'dhcp_options': option_list,
-                    'dhcp_parameters': {}
-                }, ))
-
-        self.generate_tests(test_logic=self.run_test_case_expect_dhcp_success,
-                            name_func=self.generate_test_name,
-                            arg_sets=test_args)
-
-    def generate_test_name(self, settings):
-        return settings["dhcp_options"]["test_name"]
-
-    def _generate_dhcp_options(self):
-        self.DHCP_OPTIONS = {
-            'domain-name-tests': [{
-                'domain-name': '"example.invalid"',
-                'dhcp-parameter-request-list': self.OPT_NUM_DOMAIN_NAME,
-                'test_name': "test_domain_name_invalid_tld"
-            }, {
-                'domain-name': '"example.test"',
-                'dhcp-parameter-request-list': self.OPT_NUM_DOMAIN_NAME,
-                'test_name': "test_domain_name_valid_tld"
-            }],
-            'domain-search-tests': [{
-                'domain-search':
-                '"example.invalid"',
-                'dhcp-parameter-request-list':
-                self.OPT_NUM_DOMAIN_SEARCH,
-                'test_name':
-                "test_domain_search_invalid_tld"
-            }, {
-                'domain-search': '"example.test"',
-                'dhcp-parameter-request-list': self.OPT_NUM_DOMAIN_SEARCH,
-                'test_name': "test_domain_search_valid_tld"
-            }]
-        }
-
-        # The RFC limits DHCP payloads to 576 bytes unless the client signals it can handle larger
-        # payloads, which it does by sending DHCP option 57, "Maximum DHCP Message Size". Despite
-        # being able to accept larger payloads, clients typically don't advertise this.
-        # The test verifies that the client accepts a large message split across multiple ethernet
-        # frames.
-        # The test is created by sending many bytes of options through the domain-name-servers
-        # option, which is of unbounded length (though is compressed per RFC1035 section 4.1.4).
-        typical_ethernet_mtu = 1500
-        self.DHCP_OPTIONS['max-message-size-tests'] = []
-
-        long_dns_setting = ', '.join(
-            f'"ns{num}.example"'
-            for num in random.sample(range(100_000, 1_000_000), 250))
-        # RFC1035 compression means any shared suffix ('.example' in this case) will
-        # be deduplicated. Calculate approximate length by removing that suffix.
-        long_dns_setting_len = len(
-            long_dns_setting.replace(', ', '').replace('"', '').replace(
-                '.example', '').encode('utf-8'))
-        asserts.assert_true(
-            long_dns_setting_len > typical_ethernet_mtu,
-            "Expected to generate message greater than ethernet mtu")
-        self.DHCP_OPTIONS['max-message-size-tests'].append({
-            'dhcp-max-message-size':
-            long_dns_setting_len * 2,
-            'domain-search':
-            long_dns_setting,
-            'dhcp-parameter-request-list':
-            self.OPT_NUM_DOMAIN_SEARCH,
-            'test_name':
-            "test_max_sized_message",
-        })
diff --git a/src/antlion/tests/examples/Sl4fSanityTest.py b/src/antlion/tests/examples/Sl4fSanityTest.py
deleted file mode 100644
index b8429af..0000000
--- a/src/antlion/tests/examples/Sl4fSanityTest.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for verifying SL4F is running on a Fuchsia device and
-can communicate to ACTS successfully.
-
-"""
-from typing import List
-
-from antlion import asserts
-from antlion.base_test import BaseTestClass
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-
-
-class Sl4fSanityTest(BaseTestClass):
-    fuchsia_devices: List[FuchsiaDevice]
-
-    def setup_class(self):
-        super().setup_class()
-
-        asserts.abort_class_if(
-            len(self.fuchsia_devices) == 0,
-            "Sorry, please try verifying FuchsiaDevice is in your config file and try again."
-        )
-
-        self.log.info(
-            "Congratulations! Fuchsia controllers have been initialized successfully!"
-        )
-
-    def test_example(self):
-        for fuchsia_device in self.fuchsia_devices:
-            res = fuchsia_device.sl4f.netstack_lib.netstackListInterfaces()
-            self.log.info(res)
-        self.log.info("Congratulations! You've run your first test.")
-        return True
diff --git a/src/antlion/tests/flash/FlashTest.py b/src/antlion/tests/flash/FlashTest.py
deleted file mode 100644
index 32676d4..0000000
--- a/src/antlion/tests/flash/FlashTest.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for to flash Fuchsia devices and reports the DUT's version of Fuchsia in
-the Sponge test result properties. Uses the built in flashing tool for
-fuchsia_devices.
-"""
-from antlion import asserts
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion.utils import get_device
-
-MAX_FLASH_ATTEMPTS = 3
-
-
-class FlashTest(BaseTestClass):
-
-    def setup_class(self):
-        super().setup_class()
-        self.failed_to_get_version = False
-
-    def teardown_class(self):
-        # Verify that FlashTest successfully reported the DUT version. This is
-        # working around a flaw in ACTS where signals.TestAbortAll does not
-        # report any errors.
-        #
-        # TODO(http://b/253515812): This has been fixed in Mobly already. Remove
-        # teardown_class and change "TestError" to "abort_all" in
-        # test_flash_devices once we move to Mobly.
-        if self.failed_to_get_version:
-            asserts.abort_all('Failed to get DUT version')
-
-        return super().teardown_class()
-
-    def test_flash_devices(self):
-        for device in self.fuchsia_devices:
-            flash_counter = 0
-            while True:
-                try:
-                    device.reboot(reboot_type='flash',
-                                  use_ssh=True,
-                                  unreachable_timeout=120,
-                                  ping_timeout=120)
-                    self.log.info(f'{device.orig_ip} has been flashed.')
-                    break
-                except Exception as err:
-                    self.log.error(
-                        f'Failed to flash {device.orig_ip} with error:\n{err}')
-
-                    if not device.device_pdu_config:
-                        asserts.abort_all(
-                            f'Failed to flash {device.orig_ip} and no PDU available for hard reboot'
-                        )
-
-                    flash_counter = flash_counter + 1
-                    if flash_counter == MAX_FLASH_ATTEMPTS:
-                        asserts.abort_all(
-                            f'Failed to flash {device.orig_ip} after {MAX_FLASH_ATTEMPTS} attempts'
-                        )
-
-                    self.log.info(
-                        f'Hard rebooting {device.orig_ip} and retrying flash.')
-                    device.reboot(reboot_type='hard',
-                                  testbed_pdus=self.pdu_devices)
-
-        # Report the new Fuchsia version
-        try:
-            dut = get_device(self.fuchsia_devices, 'DUT')
-            version = dut.version()
-            self.record_data({'sponge_properties': {
-                'DUT_VERSION': version,
-            }})
-            self.log.info("DUT version found: {}".format(version))
-        except Exception as e:
-            self.failed_to_get_version = True
-            raise signals.TestError(f'Failed to get DUT version: {e}') from e
diff --git a/src/antlion/tests/logging/FuchsiaLoggingTest.py b/src/antlion/tests/logging/FuchsiaLoggingTest.py
deleted file mode 100644
index 782a733..0000000
--- a/src/antlion/tests/logging/FuchsiaLoggingTest.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import signals
-from antlion.base_test import BaseTestClass
-from antlion import asserts
-
-
-class FuchsiaLoggingTest(BaseTestClass):
-
-    def setup_class(self):
-        super().setup_class()
-        self.dut = self.fuchsia_devices[0]
-        self.message = "Logging Test"
-
-    def test_log_err(self):
-        result = self.dut.sl4f.logging_lib.logE(self.message)
-        if result.get("error") is None:
-            signals.TestPass(result.get("result"))
-        else:
-            signals.TestFailure(result.get("error"))
-
-    def test_log_info(self):
-        result = self.dut.sl4f.logging_lib.logI(self.message)
-        if result.get("error") is None:
-            signals.TestPass(result.get("result"))
-        else:
-            signals.TestFailure(result.get("error"))
-
-    def test_log_warn(self):
-        result = self.dut.sl4f.logging_lib.logW(self.message)
-        if result.get("error") is None:
-            signals.TestPass(result.get("result"))
-        else:
-            signals.TestFailure(result.get("error"))
diff --git a/src/antlion/tests/netstack/NetstackIfaceTest.py b/src/antlion/tests/netstack/NetstackIfaceTest.py
deleted file mode 100644
index 5df07c6..0000000
--- a/src/antlion/tests/netstack/NetstackIfaceTest.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import signals
-
-from antlion.base_test import BaseTestClass
-from antlion import asserts
-
-
-class NetstackIfaceTest(BaseTestClass):
-    default_timeout = 10
-    active_scan_callback_list = []
-    active_adv_callback_list = []
-    droid = None
-
-    def setup_class(self):
-        super().setup_class()
-        if (len(self.fuchsia_devices) < 1):
-            self.log.error(
-                "NetstackFuchsiaTest Init: Not enough fuchsia devices.")
-        self.log.info("Running testbed setup with one fuchsia devices")
-        self.dut = self.fuchsia_devices[0]
-
-    def _enable_all_interfaces(self):
-        interfaces = self.dut.sl4f.netstack_lib.netstackListInterfaces()
-        for item in interfaces.get("result"):
-            identifier = item.get('id')
-            self.dut.sl4f.netstack_lib.enableInterface(identifier)
-
-    def setup_test(self):
-        # Always make sure all interfaces listed are in an up state.
-        self._enable_all_interfaces()
-
-    def teardown_test(self):
-        # Always make sure all interfaces listed are in an up state.
-        self._enable_all_interfaces()
-
-    def test_list_interfaces(self):
-        """Test listing all interfaces.
-
-        Steps:
-        1. Call ListInterfaces FIDL api.
-        2. Verify there is at least one interface returned.
-
-        Expected Result:
-        There were no errors in retrieving the list of interfaces.
-        There was at least one interface in the list.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: Netstack
-        Priority: 1
-        """
-        interfaces = self.dut.sl4f.netstack_lib.netstackListInterfaces()
-        if interfaces.get('error') is not None:
-            raise signals.TestFailure("Failed with {}".format(
-                interfaces.get('error')))
-        if len(interfaces.get('result')) < 1:
-            raise signals.TestFailure("No interfaces found.")
-        self.log.info("Interfaces found: {}".format(interfaces.get('result')))
-        raise signals.TestPass("Success")
-
-    def test_toggle_wlan_interface(self):
-        """Test toggling the wlan interface if it exists.
-
-        Steps:
-        1. Call ListInterfaces FIDL api.
-        2. Find the wlan interface.
-        3. Disable the interface.
-        4. Verify interface attributes in a down state.
-        5. Enable the interface.
-        6. Verify interface attributes in an up state.
-
-        Expected Result:
-        WLAN interface was successfully brought down and up again.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-          signals.TestSkip if there are no wlan interfaces.
-
-        TAGS: Netstack
-        Priority: 1
-        """
-
-        def get_wlan_interfaces():
-            result = self.dut.sl4f.netstack_lib.netstackListInterfaces()
-            if (error := result.get('error')):
-                raise signals.TestFailure(
-                    f'unable to list interfaces: {error}')
-            return [
-                interface for interface in result.get('result')
-                if 'wlan' in interface.get('name')
-            ]
-
-        def get_ids(interfaces):
-            return [get_id(interface) for interface in interfaces]
-
-        wlan_interfaces = get_wlan_interfaces()
-        if not wlan_interfaces:
-            raise signals.TestSkip('no wlan interface found')
-        interface_ids = get_ids(wlan_interfaces)
-
-        # Disable the interfaces.
-        for identifier in interface_ids:
-            result = self.dut.sl4f.netstack_lib.disableInterface(identifier)
-            if (error := result.get('error')):
-                raise signals.TestFailure(
-                    f'failed to disable wlan interface {identifier}: {error}')
-
-        # Retrieve the interfaces again.
-        disabled_wlan_interfaces = get_wlan_interfaces()
-        disabled_interface_ids = get_ids(wlan_interfaces)
-
-        if not disabled_interface_ids == interface_ids:
-            raise signals.TestFailure(
-                f'disabled interface IDs do not match original interface IDs: original={interface_ids} disabled={disabled_interface_ids}'
-            )
-
-        # Check the current state of the interfaces.
-        for interface in disabled_interfaces:
-            if len(interface_info.get('ipv4_addresses')) > 0:
-                raise signals.TestFailure(
-                    f'no Ipv4 Address should be present: {interface}')
-
-            # TODO (35981): Verify other values when interface down.
-
-        # Re-enable the interfaces.
-        for identifier in disabled_interface_ids:
-            result = self.dut.sl4f.netstack_lib.enableInterface(identifier)
-            if (error := result.get('error')):
-                raise signals.TestFailure(
-                    f'failed to enable wlan interface {identifier}: {error}')
-
-        # TODO (35981): Verify other values when interface up.
-        raise signals.TestPass("Success")
diff --git a/src/antlion/tests/netstack/NetstackIxiaTest.py b/src/antlion/tests/netstack/NetstackIxiaTest.py
deleted file mode 100644
index bbe90a6..0000000
--- a/src/antlion/tests/netstack/NetstackIxiaTest.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_bss_settings
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-
-from antlion.test_utils.net.NetstackBaseTest import NetstackBaseTest
-
-from antlion.utils import rand_ascii_str
-
-
-class NetstackIxiaTest(NetstackBaseTest):
-    def __init__(self, controllers):
-        NetstackBaseTest.__init__(self, controllers)
-
-    def setup_class(self):
-        self.log.info('Setup {cls}'.format(cls=type(self)))
-
-        if not self.fuchsia_devices:
-            self.log.error(
-                "NetstackFuchsiaTest Init: Not enough fuchsia devices.")
-        self.log.info("Running testbed setup with one fuchsia devices")
-        self.fuchsia_dev = self.fuchsia_devices[0]
-
-        # We want to bring up several 2GHz and 5GHz BSSes.
-        wifi_bands = ['2g', '5g']
-
-        # Currently AP_DEFAULT_CHANNEL_2G is 6
-        # and AP_DEFAULT_CHANNEL_5G is 36.
-        wifi_channels = [
-            hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            hostapd_constants.AP_DEFAULT_CHANNEL_5G
-        ]
-
-        # Each band will start up an Open BSS (security_mode=None)
-        # and a WPA2 BSS (security_mode=hostapd_constants.WPA2_STRING)
-        security_modes = [None, hostapd_constants.WPA2_STRING]
-
-        # All secure BSSes will use the same password.
-        wifi_password = rand_ascii_str(10)
-        self.log.info('Wi-Fi password for this test: {wifi_password}'.format(
-            wifi_password=wifi_password))
-        hostapd_configs = []
-        wifi_interfaces = {}
-        bss_settings = {}
-
-        # Build a configuration for each sub-BSSID
-        for band_index, wifi_band in enumerate(wifi_bands):
-            ssid_name = 'Ixia_{wifi_band}_#{bss_number}_{security_mode}'
-            bss_settings[wifi_band] = []
-
-            # Prepare the extra SSIDs.
-            for mode_index, security_mode in enumerate(security_modes):
-
-                # Skip the first SSID because we configure that separately.
-                # due to the way the APIs work.  This loop is only concerned
-                # with the sub-BSSIDs.
-                if mode_index == 0:
-                    continue
-
-                bss_name = ssid_name.format(wifi_band=wifi_band,
-                                            security_mode=security_mode,
-                                            bss_number=mode_index + 1)
-
-                bss_setting = hostapd_bss_settings.BssSettings(
-                    name=bss_name,
-                    ssid=bss_name,
-                    security=hostapd_security.Security(
-                        security_mode=security_mode, password=wifi_password))
-                bss_settings[wifi_band].append(bss_setting)
-
-            # This is the configuration for the first SSID.
-            ssid_name = ssid_name.format(wifi_band=wifi_band,
-                                         security_mode=security_modes[0],
-                                         bss_number=1)
-
-            hostapd_configs.append(
-                hostapd_ap_preset.create_ap_preset(
-                    profile_name='whirlwind',
-                    iface_wlan_2g='wlan0',
-                    iface_wlan_5g='wlan1',
-                    ssid=ssid_name,
-                    channel=wifi_channels[band_index],
-                    security=hostapd_security.Security(
-                        security_mode=security_modes[0],
-                        password=wifi_password),
-                    bss_settings=bss_settings[wifi_band]))
-
-            access_point = self.access_points[band_index]
-
-            # Now bring up the AP and track the interfaces we're using for
-            # each BSSID.  All BSSIDs are now beaconing.
-            wifi_interfaces[wifi_band] = access_point.start_ap(
-                hostapd_configs[band_index])
-
-            # Disable DHCP on this Wi-Fi band.
-            # Note: This also disables DHCP on each sub-BSSID due to how
-            # the APIs are built.
-            #
-            # We need to do this in order to enable IxANVL testing across
-            # Wi-Fi, which needs to configure the IP addresses per-interface
-            # on the client device.
-            access_point.stop_dhcp()
-
-            # Disable NAT.
-            # NAT config in access_point.py is global at the moment, but
-            # calling it twice (once per band) won't hurt anything.  This is
-            # easier than trying to conditionalize per band.
-            #
-            # Note that we could make this per-band, but it would require
-            # refactoring the access_point.py code that turns on NAT, however
-            # if that ever does happen then this code will work as expected
-            # without modification.
-            #
-            # This is also required for IxANVL testing.  NAT would interfere
-            # with IxANVL because IxANVL needs to see the raw frames
-            # sourcing/sinking from/to the DUT for protocols such as ARP and
-            # DHCP, but it also needs the MAC/IP of the source and destination
-            # frames and packets to be from the DUT, so we want the AP to act
-            # like a bridge for these tests.
-            access_point.stop_nat()
-
-        # eth1 is the LAN port, which will always be a part of the bridge.
-        bridge_interfaces = ['eth1']
-
-        # This adds each bssid interface to the bridge.
-        for wifi_band in wifi_bands:
-            for wifi_interface in wifi_interfaces[wifi_band]:
-                bridge_interfaces.append(wifi_interface)
-
-        # Each interface can only be a member of 1 bridge, so we're going to use
-        # the last access_point object to set the bridge up for all interfaces.
-        access_point.create_bridge(bridge_name='ixia_bridge0',
-                                   interfaces=bridge_interfaces)
-
-    def setup_test(self):
-        pass
-
-    def teardown_test(self):
-        pass
-
-    def teardown_class(self):
-        self.log.info('Teardown {cls}'.format(cls=type(self)))
-
-        import pdb
-        pdb.set_trace()
-
-        for access_point in self.access_points:
-            access_point.remove_bridge(bridge_name='ixia_bridge0')
-
-    """Tests"""
-
-    def test_do_nothing(self):
-        return True
diff --git a/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py b/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py
deleted file mode 100644
index 5ce5fda..0000000
--- a/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import signals
-import time
-from antlion.base_test import BaseTestClass
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-
-class ToggleWlanInterfaceStressTest(BaseTestClass):
-
-    def setup_class(self):
-        dut = self.user_params.get('dut', None)
-        if dut:
-            if dut == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif dut == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an Fuchsia device
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-    def test_iface_toggle_and_ping(self):
-        """Test that we don't error out when toggling WLAN interfaces.
-
-        Steps:
-        1. Find a WLAN interface
-        2. Destroy it
-        3. Create a new WLAN interface
-        4. Ping after association
-        5. Repeat 1-4 1,000 times
-
-        Expected Result:
-        Verify there are no errors in destroying the wlan interface.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: WLAN, Stability
-        Priority: 1
-        """
-
-        # Test assumes you've already connected to some AP.
-
-        for i in range(1000):
-            wlan_interfaces = self.dut.get_wlan_interface_id_list()
-            print(wlan_interfaces)
-            if len(wlan_interfaces) < 1:
-                raise signals.TestFailure(
-                    "Not enough wlan interfaces for test")
-            if not self.dut.destroy_wlan_interface(wlan_interfaces[0]):
-                raise signals.TestFailure("Failed to destroy WLAN interface")
-            # Really make sure it is dead
-            self.fuchsia_devices[0].ssh.run(
-                f"wlan iface del {wlan_interfaces[0]}")
-            # Grace period
-            time.sleep(2)
-            self.fuchsia_devices[0].ssh.run(
-                'wlan iface new --phy 0 --role Client')
-            end_time = time.time() + 300
-            while time.time() < end_time:
-                time.sleep(1)
-                if self.dut.is_connected():
-                    try:
-                        ping_result = self.dut.ping("8.8.8.8", 10, 1000, 1000,
-                                                    25)
-                        print(ping_result)
-                    except Exception as err:
-                        # TODO: Once we gain more stability, fail test when pinging fails
-                        print("some err {}".format(err))
-                    time.sleep(2)  #give time for some traffic
-                    break
-            if not self.dut.is_connected():
-                raise signals.TestFailure("Failed at iteration {}".format(i +
-                                                                          1))
-            self.log.info("Iteration {} successful".format(i + 1))
-        raise signals.TestPass("Success")
diff --git a/src/antlion/tests/wlan/compliance/VapeInteropTest.py b/src/antlion/tests/wlan/compliance/VapeInteropTest.py
deleted file mode 100644
index 86bc3e3..0000000
--- a/src/antlion/tests/wlan/compliance/VapeInteropTest.py
+++ /dev/null
@@ -1,732 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-
-class VapeInteropTest(WifiBaseTest):
-    """Tests interoperability with mock third party AP profiles.
-
-    Test Bed Requirement:
-    * One Android or Fuchsia Device
-    * One Whirlwind Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.access_point = self.access_points[0]
-
-        # Same for both 2g and 5g
-        self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        self.password = utils.rand_ascii_str(
-            hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
-        self.security_profile_wpa2 = Security(
-            security_mode=hostapd_constants.WPA2_STRING,
-            password=self.password,
-            wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER)
-
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def test_associate_actiontec_pk5000_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='actiontec_pk5000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_actiontec_pk5000_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='actiontec_pk5000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_actiontec_mi424wr_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='actiontec_mi424wr',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_actiontec_mi424wr_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='actiontec_mi424wr',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtac66u_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtac66u_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtac66u_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtac66u_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtac86u_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac86u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtac86u_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac86u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtac86u_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac86u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtac86u_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac86u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtac5300_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac5300',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtac5300_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac5300',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtac5300_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac5300',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtac5300_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtac5300',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtn56u_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn56u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtn56u_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn56u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtn56u_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn56u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtn56u_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn56u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtn66u_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtn66u_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_asus_rtn66u_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_asus_rtn66u_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='asus_rtn66u',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_belkin_f9k1001v5_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='belkin_f9k1001v5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_belkin_f9k1001v5_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='belkin_f9k1001v5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_linksys_ea4500_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea4500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_linksys_ea4500_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea4500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_linksys_ea4500_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea4500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_linksys_ea4500_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea4500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_linksys_ea9500_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea9500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_linksys_ea9500_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea9500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_linksys_ea9500_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea9500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_linksys_ea9500_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_ea9500',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_linksys_wrt1900acv2_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_wrt1900acv2',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_linksys_wrt1900acv2_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_wrt1900acv2',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_linksys_wrt1900acv2_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_wrt1900acv2',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_linksys_wrt1900acv2_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='linksys_wrt1900acv2',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_netgear_r7000_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_r7000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_netgear_r7000_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_r7000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_netgear_r7000_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_r7000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_netgear_r7000_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_r7000',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_netgear_wndr3400_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_wndr3400',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_netgear_wndr3400_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_wndr3400',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_netgear_wndr3400_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_wndr3400',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_netgear_wndr3400_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='netgear_wndr3400',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_securifi_almond_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='securifi_almond',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_securifi_almond_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='securifi_almond',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_tplink_archerc5_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_tplink_archerc5_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_tplink_archerc5_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_tplink_archerc5_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc5',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_tplink_archerc7_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc7',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_tplink_archerc7_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc7',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_tplink_archerc7_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc7',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_tplink_archerc7_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_archerc7',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_tplink_c1200_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_c1200',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_tplink_c1200_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_c1200',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_tplink_c1200_5ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_c1200',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_tplink_c1200_5ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_c1200',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
-
-    def test_associate_tplink_tlwr940n_24ghz_open(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_tlwr940n',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        asserts.assert_true(self.dut.associate(self.ssid),
-                            'Failed to connect.')
-
-    def test_associate_tplink_tlwr940n_24ghz_wpa2(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='tplink_tlwr940n',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile_wpa2,
-                 password=self.password)
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_pwd=self.password,
-                               target_security=hostapd_constants.WPA2_STRING),
-            'Failed to connect.')
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
deleted file mode 100644
index 5efe1f4..0000000
--- a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
+++ /dev/null
@@ -1,262 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-
-from antlion import asserts
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.utils import rand_ascii_str
-
-# AC Capabilities
-"""
-Capabilities Not Supported on Whirlwind:
-    - Supported Channel Width ([VHT160], [VHT160-80PLUS80]): 160mhz and 80+80
-        unsupported
-    - SU Beamformer [SU-BEAMFORMER]
-    - SU Beamformee [SU-BEAMFORMEE]
-    - MU Beamformer [MU-BEAMFORMER]
-    - MU Beamformee [MU-BEAMFORMEE]
-    - BF Antenna ([BF-ANTENNA-2], [BF-ANTENNA-3], [BF-ANTENNA-4])
-    - Rx STBC 2, 3, & 4 ([RX-STBC-12],[RX-STBC-123],[RX-STBC-124])
-    - VHT Link Adaptation ([VHT-LINK-ADAPT2],[VHT-LINK-ADAPT3])
-    - VHT TXOP Power Save [VHT-TXOP-PS]
-    - HTC-VHT [HTC-VHT]
-"""
-VHT_MAX_MPDU_LEN = [
-    hostapd_constants.AC_CAPABILITY_MAX_MPDU_7991,
-    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454, ''
-]
-RXLDPC = [hostapd_constants.AC_CAPABILITY_RXLDPC, '']
-SHORT_GI_80 = [hostapd_constants.AC_CAPABILITY_SHORT_GI_80, '']
-TX_STBC = [hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, '']
-RX_STBC = [hostapd_constants.AC_CAPABILITY_RX_STBC_1, '']
-MAX_A_MPDU = [
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7, ''
-]
-RX_ANTENNA = [hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, '']
-TX_ANTENNA = [hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, '']
-
-# Default 11N Capabilities
-N_CAPABS_40MHZ = [
-    hostapd_constants.N_CAPABILITY_LDPC, hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_RX_STBC1,
-    hostapd_constants.N_CAPABILITY_SGI20, hostapd_constants.N_CAPABILITY_SGI40,
-    hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-    hostapd_constants.N_CAPABILITY_HT40_PLUS
-]
-
-N_CAPABS_20MHZ = [
-    hostapd_constants.N_CAPABILITY_LDPC, hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_RX_STBC1,
-    hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-    hostapd_constants.N_CAPABILITY_HT20
-]
-
-# Default wpa2 profile.
-WPA2_SECURITY = Security(security_mode=hostapd_constants.WPA2_STRING,
-                         password=rand_ascii_str(20),
-                         wpa_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
-                         wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER)
-
-SECURITIES = [None, WPA2_SECURITY]
-
-
-def generate_test_name(settings):
-    """Generates a test name string based on the ac_capabilities for
-    a test case.
-
-    Args:
-        settings: a dict with the test settings (bandwidth, security, ac_capabs)
-
-    Returns:
-        A string test case name
-    """
-    chbw = settings['chbw']
-    sec = 'wpa2' if settings['security'] else 'open'
-    ret = []
-    for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
-        if cap in settings['ac_capabilities']:
-            ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
-    return 'test_11ac_%smhz_%s_%s' % (chbw, sec, ''.join(ret))
-
-
-# 6912 test cases
-class WlanPhyCompliance11ACTest(WifiBaseTest):
-    """Tests for validating 11ac PHYS.
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-
-    def setup_generated_tests(self):
-        test_args = self._generate_20mhz_test_args() + \
-            self._generate_40mhz_test_args() + \
-            self._generate_80mhz_test_args()
-        self.generate_tests(test_logic=self.setup_and_connect,
-                            name_func=generate_test_name,
-                            arg_sets=test_args)
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.access_point = self.access_points[0]
-        self.android_devices = getattr(self, 'android_devices', [])
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def setup_and_connect(self, ap_settings):
-        """Uses ap_settings to set up ap and then attempts to associate a DUT.
-
-        Args:
-            ap_settings: a dict containing test case settings, including
-                bandwidth, security, n_capabilities, and ac_capabilities
-
-        """
-        ssid = rand_ascii_str(20)
-        security = ap_settings['security']
-        chbw = ap_settings['chbw']
-        password = None
-        target_security = None
-        if security:
-            password = security.password
-            target_security = security.security_mode_string
-        n_capabilities = ap_settings['n_capabilities']
-        ac_capabilities = ap_settings['ac_capabilities']
-
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 mode=hostapd_constants.MODE_11AC_MIXED,
-                 channel=36,
-                 n_capabilities=n_capabilities,
-                 ac_capabilities=ac_capabilities,
-                 force_wmm=True,
-                 ssid=ssid,
-                 security=security,
-                 vht_bandwidth=chbw,
-                 password=password)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_pwd=password,
-                               target_security=target_security),
-            'Failed to associate.')
-
-    # 1728 tests
-    def _generate_20mhz_test_args(self):
-        test_args = []
-
-        # 864 test cases for open security
-        # 864 test cases for wpa2 security
-        for combination in itertools.product(SECURITIES, VHT_MAX_MPDU_LEN,
-                                             RXLDPC, RX_STBC, TX_STBC,
-                                             MAX_A_MPDU, RX_ANTENNA,
-                                             TX_ANTENNA):
-            security = combination[0]
-            ac_capabilities = combination[1:]
-            test_args.append(({
-                'chbw': 20,
-                'security': security,
-                'n_capabilities': N_CAPABS_20MHZ,
-                'ac_capabilities': ac_capabilities
-            }, ))
-
-        return test_args
-
-    # 1728 tests
-    def _generate_40mhz_test_args(self):
-        test_args = []
-
-        # 864 test cases for open security
-        # 864 test cases for wpa2 security
-        for combination in itertools.product(SECURITIES, VHT_MAX_MPDU_LEN,
-                                             RXLDPC, RX_STBC, TX_STBC,
-                                             MAX_A_MPDU, RX_ANTENNA,
-                                             TX_ANTENNA):
-            security = combination[0]
-            ac_capabilities = combination[1:]
-            test_args.append(({
-                'chbw': 40,
-                'security': security,
-                'n_capabilities': N_CAPABS_40MHZ,
-                'ac_capabilities': ac_capabilities
-            }, ))
-
-        return test_args
-
-    # 3456 tests
-    def _generate_80mhz_test_args(self):
-        test_args = []
-
-        # 1728 test cases for open security
-        # 1728 test cases for wpa2 security
-        for combination in itertools.product(SECURITIES, VHT_MAX_MPDU_LEN,
-                                             RXLDPC, SHORT_GI_80, RX_STBC,
-                                             TX_STBC, MAX_A_MPDU, RX_ANTENNA,
-                                             TX_ANTENNA):
-            security = combination[0]
-            ac_capabilities = combination[1:]
-            test_args.append(({
-                'chbw': 80,
-                'security': security,
-                'n_capabilities': N_CAPABS_40MHZ,
-                'ac_capabilities': ac_capabilities
-            }, ))
-        return test_args
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py
deleted file mode 100644
index d30982c..0000000
--- a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py
+++ /dev/null
@@ -1,437 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-
-from antlion import asserts
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-FREQUENCY_24 = ['2.4GHz']
-FREQUENCY_5 = ['5GHz']
-CHANNEL_BANDWIDTH_20 = ['HT20']
-CHANNEL_BANDWIDTH_40_LOWER = ['HT40-']
-CHANNEL_BANDWIDTH_40_UPPER = ['HT40+']
-SECURITY_OPEN = 'open'
-SECURITY_WPA2 = 'wpa2'
-N_MODE = [hostapd_constants.MODE_11N_PURE, hostapd_constants.MODE_11N_MIXED]
-LDPC = [hostapd_constants.N_CAPABILITY_LDPC, '']
-TX_STBC = [hostapd_constants.N_CAPABILITY_TX_STBC, '']
-RX_STBC = [hostapd_constants.N_CAPABILITY_RX_STBC1, '']
-SGI_20 = [hostapd_constants.N_CAPABILITY_SGI20, '']
-SGI_40 = [hostapd_constants.N_CAPABILITY_SGI40, '']
-DSSS_CCK = [hostapd_constants.N_CAPABILITY_DSSS_CCK_40, '']
-INTOLERANT_40 = [hostapd_constants.N_CAPABILITY_40_INTOLERANT, '']
-MAX_AMPDU_7935 = [hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, '']
-SMPS = [hostapd_constants.N_CAPABILITY_SMPS_STATIC, '']
-
-
-def generate_test_name(settings):
-    """Generates a string based on the n_capabilities for a test case
-
-    Args:
-        settings: A dictionary of hostapd constant n_capabilities.
-
-    Returns:
-        A string that represents a test case name.
-    """
-    ret = []
-    for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
-        if cap in settings['n_capabilities']:
-            ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
-    # '+' is used by Mobile Harness as special character, don't use it in test names
-    if settings['chbw'] == 'HT40-':
-        chbw = "HT40Lower"
-    elif settings['chbw'] == 'HT40+':
-        chbw = "HT40Upper"
-    else:
-        chbw = settings['chbw']
-    return 'test_11n_%s_%s_%s_%s_%s' % (settings['frequency'], chbw,
-                                        settings['security'],
-                                        settings['n_mode'], ''.join(ret))
-
-
-class WlanPhyCompliance11NTest(WifiBaseTest):
-    """Tests for validating 11n PHYS.
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-
-    def setup_generated_tests(self):
-        test_args = self._generate_24_HT20_test_args() + \
-            self._generate_24_HT40_lower_test_args() + \
-            self._generate_24_HT40_upper_test_args() + \
-            self._generate_5_HT20_test_args() + \
-            self._generate_5_HT40_lower_test_args() + \
-            self._generate_5_HT40_upper_test_args() + \
-            self._generate_24_HT20_wpa2_test_args() + \
-            self._generate_24_HT40_lower_wpa2_test_args() + \
-            self._generate_24_HT40_upper_wpa2_test_args() + \
-            self._generate_5_HT20_wpa2_test_args() + \
-            self._generate_5_HT40_lower_wpa2_test_args() + \
-            self._generate_5_HT40_upper_wpa2_test_args()
-
-        self.generate_tests(test_logic=self.setup_and_connect,
-                            name_func=generate_test_name,
-                            arg_sets=test_args)
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.access_point = self.access_points[0]
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def setup_and_connect(self, ap_settings):
-        """Generates a hostapd config, setups up the AP with that config, then
-           attempts to associate a DUT
-
-        Args:
-               ap_settings: A dictionary of hostapd constant n_capabilities.
-        """
-        ssid = utils.rand_ascii_str(20)
-        security_profile = None
-        password = None
-        temp_n_capabilities = list(ap_settings['n_capabilities'])
-        n_capabilities = []
-        for n_capability in temp_n_capabilities:
-            if n_capability in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
-                n_capabilities.append(n_capability)
-
-        if ap_settings['chbw'] == 'HT20' or ap_settings['chbw'] == 'HT40+':
-            if ap_settings['frequency'] == '2.4GHz':
-                channel = 1
-            elif ap_settings['frequency'] == '5GHz':
-                channel = 36
-            else:
-                raise ValueError('Invalid frequence: %s' %
-                                 ap_settings['frequency'])
-
-        elif ap_settings['chbw'] == 'HT40-':
-            if ap_settings['frequency'] == '2.4GHz':
-                channel = 11
-            elif ap_settings['frequency'] == '5GHz':
-                channel = 60
-            else:
-                raise ValueError('Invalid frequency: %s' %
-                                 ap_settings['frequency'])
-
-        else:
-            raise ValueError('Invalid channel bandwidth: %s' %
-                             ap_settings['chbw'])
-
-        if ap_settings['chbw'] == 'HT40-' or ap_settings['chbw'] == 'HT40+':
-            if hostapd_config.ht40_plus_allowed(channel):
-                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
-            elif hostapd_config.ht40_minus_allowed(channel):
-                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
-            else:
-                raise ValueError('Invalid channel: %s' % channel)
-            n_capabilities.append(extended_channel)
-
-        if ap_settings['security'] == 'wpa2':
-            security_profile = Security(
-                security_mode=SECURITY_WPA2,
-                password=generate_random_password(length=20),
-                wpa_cipher='CCMP',
-                wpa2_cipher='CCMP')
-            password = security_profile.password
-        target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-            ap_settings['security'], None)
-
-        mode = ap_settings['n_mode']
-        if mode not in N_MODE:
-            raise ValueError('Invalid n-mode: %s' % ap_settings['n-mode'])
-
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 mode=mode,
-                 channel=channel,
-                 n_capabilities=n_capabilities,
-                 ac_capabilities=[],
-                 force_wmm=True,
-                 ssid=ssid,
-                 security=security_profile,
-                 password=password)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_pwd=password,
-                               target_security=target_security),
-            'Failed to connect.')
-
-    def _generate_24_HT20_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_20, N_MODE,
-                                             LDPC, TX_STBC, RX_STBC, SGI_20,
-                                             INTOLERANT_40, MAX_AMPDU_7935,
-                                             SMPS):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_mode = combination[2]
-            n_capabilities = combination[3:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': n_mode,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities,
-            }, ))
-        return test_args
-
-    def _generate_24_HT40_lower_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_40_LOWER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_24_HT40_upper_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_40_UPPER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_5_HT20_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_5, CHANNEL_BANDWIDTH_20,
-                                             LDPC, TX_STBC, RX_STBC, SGI_20,
-                                             INTOLERANT_40, MAX_AMPDU_7935,
-                                             SMPS):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_5_HT40_lower_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_5,
-                                             CHANNEL_BANDWIDTH_40_LOWER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_5_HT40_upper_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_5,
-                                             CHANNEL_BANDWIDTH_40_UPPER,
-                                             N_MODE, LDPC, TX_STBC, RX_STBC,
-                                             SGI_20, SGI_40, MAX_AMPDU_7935,
-                                             SMPS, DSSS_CCK):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_mode = combination[2]
-            n_capabilities = combination[3:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': n_mode,
-                'security': SECURITY_OPEN,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_24_HT20_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_20, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20,
-                                             INTOLERANT_40, MAX_AMPDU_7935,
-                                             SMPS):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_24_HT40_lower_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_40_LOWER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_24_HT40_upper_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_24,
-                                             CHANNEL_BANDWIDTH_40_UPPER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_5_HT20_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_5, CHANNEL_BANDWIDTH_20,
-                                             LDPC, TX_STBC, RX_STBC, SGI_20,
-                                             INTOLERANT_40, MAX_AMPDU_7935,
-                                             SMPS):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_5_HT40_lower_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_5,
-                                             CHANNEL_BANDWIDTH_40_LOWER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
-
-    def _generate_5_HT40_upper_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(FREQUENCY_5,
-                                             CHANNEL_BANDWIDTH_40_UPPER, LDPC,
-                                             TX_STBC, RX_STBC, SGI_20, SGI_40,
-                                             MAX_AMPDU_7935, SMPS, DSSS_CCK):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(({
-                'frequency': test_frequency,
-                'chbw': test_chbw,
-                'n_mode': hostapd_constants.MODE_11N_MIXED,
-                'security': SECURITY_WPA2,
-                'n_capabilities': n_capabilities
-            }, ))
-        return test_args
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py b/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py
deleted file mode 100644
index dc2116e..0000000
--- a/src/antlion/tests/wlan/compliance/WlanPhyComplianceABGTest.py
+++ /dev/null
@@ -1,1498 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion import utils
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-
-class WlanPhyComplianceABGTest(WifiBaseTest):
-    """Tests for validating 11a, 11b, and 11g PHYS.
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.access_point = self.access_points[0]
-        open_network = self.get_open_network(False, [])
-        open_network_min_len = self.get_open_network(
-            False, [],
-            ssid_length_2g=hostapd_constants.AP_SSID_MIN_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_MIN_LENGTH_5G)
-        open_network_max_len = self.get_open_network(
-            False, [],
-            ssid_length_2g=hostapd_constants.AP_SSID_MAX_LENGTH_2G,
-            ssid_length_5g=hostapd_constants.AP_SSID_MAX_LENGTH_5G)
-        self.open_network_2g = open_network['2g']
-        self.open_network_5g = open_network['5g']
-        self.open_network_max_len_2g = open_network_max_len['2g']
-        self.open_network_max_len_2g['SSID'] = (
-            self.open_network_max_len_2g['SSID'][3:])
-        self.open_network_max_len_5g = open_network_max_len['5g']
-        self.open_network_max_len_5g['SSID'] = (
-            self.open_network_max_len_5g['SSID'][3:])
-        self.open_network_min_len_2g = open_network_min_len['2g']
-        self.open_network_min_len_2g['SSID'] = (
-            self.open_network_min_len_2g['SSID'][3:])
-        self.open_network_min_len_5g = open_network_min_len['5g']
-        self.open_network_min_len_5g['SSID'] = (
-            self.open_network_min_len_5g['SSID'][3:])
-
-        self.utf8_ssid_2g = '2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢'
-        self.utf8_ssid_5g = '5𝔤_𝔊𝔬𝔬𝔤𝔩𝔢'
-
-        self.utf8_ssid_2g_french = 'Château du Feÿ'
-        self.utf8_password_2g_french = 'du Feÿ Château'
-
-        self.utf8_ssid_2g_german = 'Rat für Straßenatlas'
-        self.utf8_password_2g_german = 'für Straßenatlas Rat'
-
-        self.utf8_ssid_2g_dutch = 'Die niet óúd, is níéuw!'
-        self.utf8_password_2g_dutch = 'niet óúd, is níéuw! Die'
-
-        self.utf8_ssid_2g_swedish = 'Det är femtioåtta'
-        self.utf8_password_2g_swedish = 'femtioåtta Det är'
-
-        self.utf8_ssid_2g_norwegian = 'Curaçao ØÆ æ å å å'
-        self.utf8_password_2g_norwegian = 'ØÆ Curaçao æ å å å'
-
-        #Danish and Norwegian has the same alphabet
-        self.utf8_ssid_2g_danish = self.utf8_ssid_2g_norwegian
-        self.utf8_password_2g_danish = self.utf8_password_2g_norwegian
-
-        self.utf8_ssid_2g_japanese = 'あなた はお母さん'
-        self.utf8_password_2g_japanese = 'そっくりね。あな'
-
-        self.utf8_ssid_2g_spanish = '¡No á,é,í,ó,ú,ü,ñ,¿,¡'
-        self.utf8_password_2g_spanish = 'á,é,í,ó,ú,ü,ñ,¿,¡ ¡No'
-
-        self.utf8_ssid_2g_italian = 'caffè Pinocchio è italiano?'
-        self.utf8_password_2g_italian = 'Pinocchio è italiano? caffè'
-
-        self.utf8_ssid_2g_korean = 'ㅘㅙㅚㅛㅜㅝㅞㅟㅠ'
-        self.utf8_password_2g_korean = 'ㅜㅝㅞㅟㅠㅘㅙㅚㅛ'
-
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def test_associate_11b_only_long_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=False)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_short_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=True)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_minimal_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=15)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_maximum_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=1024)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_frag_threshold_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_rts_threshold_256(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_rts_256_frag_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256,
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_high_dtim_low_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=3,
-                 beacon_interval=100)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_low_dtim_high_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=1,
-                 beacon_interval=300)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_with_default_values(self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name='whirlwind_11ab_legacy',
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.open_network_2g['SSID'],
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_with_non_default_values(self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name='whirlwind_11ab_legacy',
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.open_network_2g['SSID'],
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_ACM_on_BK(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_ACM_on_BE(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_ACM_on_VI(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_ACM_on_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VI(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_ACM_on_BK_VI_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_WMM_ACM_on_BE_VI_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_country_code(self):
-        country_info = utils.merge_dicts(
-            hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['UNITED_STATES'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_non_country_code(self):
-        country_info = utils.merge_dicts(
-            hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['NON_COUNTRY'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_hidden_ssid(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 hidden=True)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_vendor_ie_in_beacon_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_vendor_ie_in_beacon_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_vendor_ie_in_assoc_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_association_response'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11b_only_with_vendor_ie_in_assoc_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.VENDOR_IE[
-                     'zero_length_association_'
-                     'response_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_long_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 preamble=False)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_short_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 preamble=True)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_minimal_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 beacon_interval=15)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_maximum_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 beacon_interval=1024)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_frag_threshold_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_rts_threshold_256(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 rts_threshold=256)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_rts_256_frag_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 rts_threshold=256,
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_high_dtim_low_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 dtim_period=3,
-                 beacon_interval=100)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_low_dtim_high_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 dtim_period=1,
-                 beacon_interval=300)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_with_default_values(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_with_non_default_values(self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name='whirlwind_11ab_legacy',
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=self.open_network_5g['SSID'],
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_ACM_on_BK(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_ACM_on_BE(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_ACM_on_VI(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_ACM_on_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VI(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_ACM_on_BK_VI_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_WMM_ACM_on_BE_VI_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_country_code(self):
-        country_info = utils.merge_dicts(
-            hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['UNITED_STATES'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_non_country_code(self):
-        country_info = utils.merge_dicts(
-            hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['NON_COUNTRY'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_hidden_ssid(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 hidden=True)
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_vendor_ie_in_beacon_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'])
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_vendor_ie_in_beacon_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_vendor_ie_in_assoc_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_association_response'])
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11a_only_with_vendor_ie_in_assoc_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_5g['SSID'],
-                 additional_ap_parameters=hostapd_constants.VENDOR_IE[
-                     'zero_length_association_'
-                     'response_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_5g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_long_preamble(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=False,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_short_preamble(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=True,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_minimal_beacon_interval(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=15,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_maximum_beacon_interval(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=1024,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_frag_threshold_430(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 frag_threshold=430,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_rts_threshold_256(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_rts_256_frag_430(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_high_dtim_low_beacon_interval(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=3,
-                 beacon_interval=100,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_low_dtim_high_beacon_interval(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=1,
-                 beacon_interval=300,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_with_default_values(self):
-        data_rates = utils.merge_dicts(
-            hostapd_constants.OFDM_DATA_RATES,
-            hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_with_non_default_values(self):
-        data_rates = utils.merge_dicts(
-            hostapd_constants.OFDM_DATA_RATES,
-            hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.WMM_NON_DEFAULT_PARAMS)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_ACM_on_BK(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_ACM_on_BE(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_ACM_on_VI(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_ACM_on_VO(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VO, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VI(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VI, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VO(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VO, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_ACM_on_BK_VI_VO(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_WMM_ACM_on_BE_VI_VO(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO, data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_country_code(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        country_info = utils.merge_dicts(
-            hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['UNITED_STATES'], data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_non_country_code(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        country_info = utils.merge_dicts(
-            hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['NON_COUNTRY'], data_rates)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_hidden_ssid(self):
-        data_rates = utils.merge_dicts(hostapd_constants.OFDM_DATA_RATES,
-                                       hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 hidden=True,
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_vendor_ie_in_beacon_correct_length(self):
-        data_rates = utils.merge_dicts(
-            hostapd_constants.OFDM_DATA_RATES,
-            hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_beacon'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_vendor_ie_in_beacon_zero_length(self):
-        data_rates = utils.merge_dicts(
-            hostapd_constants.OFDM_DATA_RATES,
-            hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['zero_length_beacon_without_data'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_vendor_ie_in_assoc_correct_length(self):
-        data_rates = utils.merge_dicts(
-            hostapd_constants.OFDM_DATA_RATES,
-            hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_association_response'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_vendor_ie_in_assoc_zero_length(self):
-        data_rates = utils.merge_dicts(
-            hostapd_constants.OFDM_DATA_RATES,
-            hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_association_response'],
-            hostapd_constants.VENDOR_IE['zero_length_association_'
-                                        'response_without_data'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_only_long_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=False)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_short_preamble(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 preamble=True)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_minimal_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=15)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_maximum_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 beacon_interval=1024)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_frag_threshold_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_rts_threshold_256(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_rts_256_frag_430(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 rts_threshold=256,
-                 frag_threshold=430)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_high_dtim_low_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=3,
-                 beacon_interval=100)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_low_dtim_high_beacon_interval(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 dtim_period=1,
-                 beacon_interval=300)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_with_default_values(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_with_non_default_values(self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name='whirlwind_11ag_legacy',
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.open_network_2g['SSID'],
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_ACM_on_BK(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_ACM_on_BE(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_ACM_on_VI(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_ACM_on_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_ACM_on_BK_BE_VI(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VI)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_ACM_on_BK_BE_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_BE,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_ACM_on_BK_VI_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BK, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_WMM_ACM_on_BE_VI_VO(self):
-        wmm_acm_bits_enabled = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_BE, hostapd_constants.WMM_ACM_VI,
-            hostapd_constants.WMM_ACM_VO)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 force_wmm=True,
-                 additional_ap_parameters=wmm_acm_bits_enabled)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_country_code(self):
-        country_info = utils.merge_dicts(
-            hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['UNITED_STATES'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_non_country_code(self):
-        country_info = utils.merge_dicts(
-            hostapd_constants.ENABLE_IEEE80211D,
-            hostapd_constants.COUNTRY_STRING['ALL'],
-            hostapd_constants.COUNTRY_CODE['NON_COUNTRY'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=country_info)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_only_with_hidden_ssid(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 hidden=True)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'])
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_vendor_ie_in_assoc_correct_length(self):
-        data_rates = utils.merge_dicts(
-            hostapd_constants.OFDM_DATA_RATES,
-            hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_association_response'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_associate_11g_only_with_vendor_ie_in_assoc_zero_length(self):
-        data_rates = utils.merge_dicts(
-            hostapd_constants.OFDM_DATA_RATES,
-            hostapd_constants.OFDM_ONLY_BASIC_RATES,
-            hostapd_constants.VENDOR_IE['correct_length_association_response'],
-            hostapd_constants.VENDOR_IE['zero_length_association_'
-                                        'response_without_data'])
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ag_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_2g['SSID'],
-                 additional_ap_parameters=data_rates)
-        asserts.assert_true(self.dut.associate(self.open_network_2g['SSID']),
-                            'Failed to associate.')
-
-    def test_minimum_ssid_length_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_min_len_2g['SSID'])
-        asserts.assert_true(
-            self.dut.associate(self.open_network_min_len_2g['SSID']),
-            'Failed to associate.')
-
-    def test_minimum_ssid_length_5g_11ac_80mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_min_len_5g['SSID'])
-        asserts.assert_true(
-            self.dut.associate(self.open_network_min_len_5g['SSID']),
-            'Failed to associate.')
-
-    def test_maximum_ssid_length_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.open_network_max_len_2g['SSID'])
-        asserts.assert_true(
-            self.dut.associate(self.open_network_max_len_2g['SSID']),
-            'Failed to associate.')
-
-    def test_maximum_ssid_length_5g_11ac_80mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.open_network_max_len_5g['SSID'])
-        asserts.assert_true(
-            self.dut.associate(self.open_network_max_len_5g['SSID']),
-            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_5g_11ac_80mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.utf8_ssid_5g)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_5g),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_french_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_french)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_french),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_german_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_german)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_german),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_dutch_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_dutch)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_dutch),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_swedish_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_swedish)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_swedish),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_norwegian_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_norwegian)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_norwegian),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_danish_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_danish)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_danish),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_japanese_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_japanese)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_japanese),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_spanish_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_spanish)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_spanish),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_italian_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_italian)
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_italian),
-                            'Failed to associate.')
-
-    def test_ssid_with_UTF8_characters_korean_2g_11n_20mhz(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind_11ab_legacy',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.utf8_ssid_2g_korean)
-
-        asserts.assert_true(self.dut.associate(self.utf8_ssid_2g_korean),
-                            'Failed to associate.')
diff --git a/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py b/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
deleted file mode 100644
index ade2036..0000000
--- a/src/antlion/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
+++ /dev/null
@@ -1,6475 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import re
-
-from antlion import asserts
-from antlion import utils
-from functools import wraps
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-AP_11ABG_PROFILE_NAME = 'whirlwind_11ag_legacy'
-SSID_LENGTH_DEFAULT = 15
-
-
-def create_security_profile(test_func):
-    """Decorator for generating hostapd security profile object based on the
-    test name.
-    Args:
-        test_func: The test function
-    Returns:
-        security_profile_generator: The function that generates the security
-            profile object
-    """
-
-    @wraps(test_func)
-    def security_profile_generator(self, *args, **kwargs):
-        """Function that looks at the name of the function and determines what
-        the security profile should be based on what items are in the name
-
-        Example: A function with the name sec_wpa_wpa2_ptk_ccmp_tkip would
-            return a security profile that has wpa and wpa2 configure with a
-            ptk cipher of ccmp or tkip. Removing one of those options would
-            drop it from the config.
-
-        Args:
-            self: The object of the WlanSecurityComplianceABGTest class.
-            *args: args that were sent to the original test function
-            **kwargs: kwargs that were sent to the original test function
-        Returns:
-            The original function that was called
-        """
-        utf8_password_2g = '2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢'
-        utf8_password_2g_french = 'du Feÿ Château'
-        utf8_password_2g_german = 'für Straßenatlas Rat'
-        utf8_password_2g_dutch = 'niet óúd, is níéuw! Die'
-        utf8_password_2g_swedish = 'femtioåtta Det är'
-        utf8_password_2g_norwegian = 'ØÆ Curaçao æ å å å'
-        #Danish and Norwegian has the same alphabet
-        utf8_password_2g_danish = utf8_password_2g_norwegian
-        utf8_password_2g_japanese = 'そっくりね。あな'
-        utf8_password_2g_spanish = 'á,é,í,ó,ú,ü,ñ,¿,¡ ¡No'
-        utf8_password_2g_italian = 'Pinocchio è italiano? caffè'
-        utf8_password_2g_korean = 'ㅜㅝㅞㅟㅠㅘㅙㅚㅛ'
-
-        security = re.search(r'sec(.*?)ptk_(.*)', test_func.__name__)
-        security_mode = security.group(1)
-        ptk_type = security.group(2)
-        wpa_cipher = None
-        wpa2_cipher = None
-        if '_wpa_wpa2_wpa3_' in security_mode:
-            security_mode = hostapd_constants.WPA_WPA2_WPA3_MIXED_STRING
-        elif '_wpa_wpa2_' in security_mode:
-            security_mode = hostapd_constants.WPA_MIXED_STRING
-        elif '_wpa2_wpa3_' in security_mode:
-            security_mode = hostapd_constants.WPA2_WPA3_MIXED_STRING
-        elif '_wep_' in security_mode:
-            security_mode = hostapd_constants.WEP_STRING
-        elif '_wpa_' in security_mode:
-            security_mode = hostapd_constants.WPA_STRING
-        elif '_wpa2_' in security_mode:
-            security_mode = hostapd_constants.WPA2_STRING
-        elif '_wpa3_' in security_mode:
-            security_mode = hostapd_constants.WPA3_STRING
-        if 'tkip' in ptk_type and 'ccmp' in ptk_type:
-            wpa_cipher = 'TKIP CCMP'
-            wpa2_cipher = 'TKIP CCMP'
-        elif 'tkip' in ptk_type:
-            wpa_cipher = 'TKIP'
-            wpa2_cipher = 'TKIP'
-        elif 'ccmp' in ptk_type:
-            wpa_cipher = 'CCMP'
-            wpa2_cipher = 'CCMP'
-        if 'max_length_password' in test_func.__name__:
-            password = generate_random_password(
-                length=hostapd_constants.MAX_WPA_PASSWORD_LENGTH)
-        elif 'max_length_psk' in test_func.__name__:
-            password = str(
-                generate_random_password(
-                    length=hostapd_constants.MAX_WPA_PSK_LENGTH,
-                    hex=True)).lower()
-        elif 'wep_5_chars' in test_func.__name__:
-            password = generate_random_password(length=5)
-        elif 'wep_13_chars' in test_func.__name__:
-            password = generate_random_password(length=13)
-        elif 'wep_10_hex' in test_func.__name__:
-            password = str(generate_random_password(length=10,
-                                                    hex=True)).lower()
-        elif 'wep_26_hex' in test_func.__name__:
-            password = str(generate_random_password(length=26,
-                                                    hex=True)).lower()
-        elif 'utf8' in test_func.__name__:
-            if 'french' in test_func.__name__:
-                password = utf8_password_2g_french
-            elif 'german' in test_func.__name__:
-                password = utf8_password_2g_german
-            elif 'dutch' in test_func.__name__:
-                password = utf8_password_2g_dutch
-            elif 'swedish' in test_func.__name__:
-                password = utf8_password_2g_swedish
-            elif 'norwegian' in test_func.__name__:
-                password = utf8_password_2g_norwegian
-            elif 'danish' in test_func.__name__:
-                password = utf8_password_2g_danish
-            elif 'japanese' in test_func.__name__:
-                password = utf8_password_2g_japanese
-            elif 'spanish' in test_func.__name__:
-                password = utf8_password_2g_spanish
-            elif 'italian' in test_func.__name__:
-                password = utf8_password_2g_italian
-            elif 'korean' in test_func.__name__:
-                password = utf8_password_2g_korean
-            else:
-                password = utf8_password_2g
-        else:
-            password = generate_random_password()
-        target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-            security_mode, None)
-
-        self.security_profile = Security(security_mode=security_mode,
-                                         password=password,
-                                         wpa_cipher=wpa_cipher,
-                                         wpa2_cipher=wpa2_cipher)
-        self.client_password = password
-        self.target_security = target_security
-        self.ssid = utils.rand_ascii_str(SSID_LENGTH_DEFAULT)
-        return test_func(self, *args, *kwargs)
-
-    return security_profile_generator
-
-
-class WlanSecurityComplianceABGTest(WifiBaseTest):
-    """Tests for validating 11a, 11b, and 11g PHYS.
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.access_point = self.access_points[0]
-
-        self.ssid = None
-        self.security_profile = None
-        self.client_password = None
-
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    @create_security_profile
-    def test_associate_11a_sec_open_wep_5_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_open_wep_13_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_open_wep_10_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_open_wep_26_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_shared_wep_5_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_shared_wep_13_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_shared_wep_10_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_shared_wep_26_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_open_wep_5_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_open_wep_13_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_open_wep_10_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_open_wep_26_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['open'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_shared_wep_5_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_shared_wep_13_chars_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_shared_wep_10_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_shared_wep_26_hex_ptk_none(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False,
-                 additional_ap_parameters=hostapd_constants.WEP_AUTH['shared'])
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_false(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Expected failure to associate. This device must support TKIP and '
-            'PMF, which is not supported on Fuchsia. If this device is a '
-            'mainstream device, we need to reconsider adding support for TKIP '
-            'and PMF on Fuchsia.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 force_wmm=True,
-                 additional_ap_parameters=hostapd_constants.
-                 WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 rts_threshold=256,
-                 frag_threshold=430,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.HIGH_DTIM,
-                 beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 dtim_period=hostapd_constants.LOW_DTIM,
-                 beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=AP_11ABG_PROFILE_NAME,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-            force_wmm=True,
-            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
-            security=self.security_profile,
-            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-            password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['correct_length_beacon'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['zero_length_beacon_without_data'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 additional_ap_parameters=hostapd_constants.
-                 VENDOR_IE['simliar_to_wpa'],
-                 security=self.security_profile,
-                 pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
-                 password=self.client_password)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_french_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_german_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_dutch_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_swedish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_norwegian_password_11bg_sec_wpa2_psk_ptk_ccmp(
-            self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_danish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_japanese_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_spanish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_italian_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
-
-    @create_security_profile
-    def test_associate_utf8_korean_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
-        setup_ap(access_point=self.access_point,
-                 profile_name=AP_11ABG_PROFILE_NAME,
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 security=self.security_profile,
-                 password=self.client_password,
-                 force_wmm=False)
-
-        asserts.assert_true(
-            self.dut.associate(self.ssid,
-                               target_security=self.target_security,
-                               target_pwd=self.client_password),
-            'Failed to associate.')
diff --git a/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py b/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
deleted file mode 100644
index 843d18f..0000000
--- a/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion import utils
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-AP_ROLE = 'Ap'
-DEFAULT_SSID = 'testssid'
-DEFAULT_SECURITY = 'none'
-DEFAULT_PASSWORD = ''
-DEFAULT_CONNECTIVITY_MODE = 'local_only'
-DEFAULT_OPERATING_BAND = 'any'
-TEST_MAC_ADDR = '12:34:56:78:9a:bc'
-TEST_MAC_ADDR_SECONDARY = 'bc:9a:78:56:34:12'
-
-
-class WlanDeprecatedConfigurationTest(WifiBaseTest):
-    """Tests for WlanDeprecatedConfigurationFacade"""
-
-    def setup_class(self):
-        super().setup_class()
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-    def setup_test(self):
-        self._stop_soft_aps()
-
-    def teardown_test(self):
-        self._stop_soft_aps()
-
-    def _get_ap_interface_mac_address(self):
-        """Retrieves mac address from wlan interface with role ap
-
-        Returns:
-            string, the mac address of the AP interface
-
-        Raises:
-            ConnectionError, if SL4F calls fail
-            AttributeError, if no interface has role 'Ap'
-        """
-        wlan_ifaces = self.dut.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if wlan_ifaces.get('error'):
-            raise ConnectionError('Failed to get wlan interface IDs: %s' %
-                                  wlan_ifaces['error'])
-
-        for wlan_iface in wlan_ifaces['result']:
-            iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(
-                wlan_iface)
-            if iface_info.get('error'):
-                raise ConnectionError('Failed to query wlan iface: %s' %
-                                      iface_info['error'])
-
-            if iface_info['result']['role'] == AP_ROLE:
-                if 'mac_addr' in iface_info['result']:
-                    return utils.mac_address_list_to_str(
-                            iface_info['result']['mac_addr'])
-                elif 'sta_addr' in iface_info['result']:
-                    return utils.mac_address_list_to_str(
-                            iface_info['result']['sta_addr'])
-                raise AttributeError(
-                    'AP iface info does not contain MAC address.')
-        raise AttributeError(
-            'Failed to get ap interface mac address. No AP interface found.')
-
-    def _start_soft_ap(self):
-        """Starts SoftAP on DUT.
-
-        Raises:
-            ConnectionError, if SL4F call fails.
-        """
-        self.log.info('Starting SoftAP on Fuchsia device (%s).' %
-                      self.dut.device.ip)
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            DEFAULT_SSID, DEFAULT_SECURITY, DEFAULT_PASSWORD,
-            DEFAULT_CONNECTIVITY_MODE, DEFAULT_OPERATING_BAND)
-        if response.get('error'):
-            raise ConnectionError('Failed to setup SoftAP: %s' %
-                                  response['error'])
-
-    def _stop_soft_aps(self):
-        """Stops SoftAP on DUT.
-
-        Raises:
-            ConnectionError, if SL4F call fails.
-        """
-        self.log.info('Stopping SoftAP.')
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint(
-        )
-        if response.get('error'):
-            raise ConnectionError('Failed to stop SoftAP: %s' %
-                                  response['error'])
-
-    def _suggest_ap_mac_addr(self, mac_addr):
-        """Suggests mac address for AP interface.
-        Args:
-            mac_addr: string, mac address to suggest.
-
-        Raises:
-            TestFailure, if SL4F call fails.
-        """
-        self.log.info(
-            'Suggesting AP mac addr (%s) via wlan_deprecated_configuration_lib.'
-            % mac_addr)
-        response = (self.dut.device.sl4f.wlan_deprecated_configuration_lib.
-                    wlanSuggestAccessPointMacAddress(mac_addr))
-        if response.get('error'):
-            asserts.fail('Failed to suggest AP mac address (%s): %s' %
-                         (mac_addr, response['error']))
-
-    def _verify_mac_addr(self, expected_addr):
-        """ Verifies mac address of ap interface is set to expected mac address.
-
-        Args:
-            Args:
-                expected_addr: string, expected mac address
-
-            Raises:
-                TestFailure, if actual mac address is not expected mac address.
-        """
-        set_mac_addr = self._get_ap_interface_mac_address()
-        if set_mac_addr != expected_addr:
-            asserts.fail(
-                'Failed to set AP mac address '
-                'via wlan_deprecated_configuration_lib. Expected mac addr: %s,'
-                ' Actual mac addr: %s' % (expected_addr, set_mac_addr))
-        else:
-            self.log.info('AP mac address successfully set to %s' %
-                          expected_addr)
-
-    def test_suggest_ap_mac_address(self):
-        """Tests suggest ap mac address SL4F call
-
-        1. Get initial mac address
-        2. Suggest new mac address
-        3. Verify new mac address is set successfully
-        4. Reset to initial mac address
-        5. Verify initial mac address is reset successfully
-
-
-        Raises:
-            TestFailure, if wlanSuggestAccessPointMacAddress call fails or
-                of mac address is not the suggest value
-            ConnectionError, if other SL4F calls fail
-        """
-        # Retrieve initial ap mac address
-        self._start_soft_ap()
-
-        self.log.info('Getting initial mac address.')
-        initial_mac_addr = self._get_ap_interface_mac_address()
-        self.log.info('Initial mac address: %s' % initial_mac_addr)
-
-        if initial_mac_addr != TEST_MAC_ADDR:
-            suggested_mac_addr = TEST_MAC_ADDR
-        else:
-            suggested_mac_addr = TEST_MAC_ADDR_SECONDARY
-
-        self._stop_soft_aps()
-
-        # Suggest and verify new mac address
-        self._suggest_ap_mac_addr(suggested_mac_addr)
-
-        self._start_soft_ap()
-
-        self._verify_mac_addr(suggested_mac_addr)
-
-        self._stop_soft_aps()
-
-        # Reset to initial mac address and verify
-        self.log.info('Resetting to initial mac address (%s).' %
-                      initial_mac_addr)
-        self._suggest_ap_mac_addr(initial_mac_addr)
-
-        self._start_soft_ap()
-
-        self._verify_mac_addr(initial_mac_addr)
diff --git a/src/antlion/tests/wlan/facade/WlanFacadeTest.py b/src/antlion/tests/wlan/facade/WlanFacadeTest.py
deleted file mode 100644
index 5a1d133..0000000
--- a/src/antlion/tests/wlan/facade/WlanFacadeTest.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for verifying that we can invoke methods of the WlanFacade.
-
-"""
-import array
-
-from antlion import asserts, signals
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-
-class WlanFacadeTest(WifiBaseTest):
-
-    def setup_class(self):
-        super().setup_class()
-        if len(self.fuchsia_devices) < 1:
-            raise signals.TestAbortClass(
-                "Sorry, please try verifying FuchsiaDevice is in your "
-                "config file and try again.")
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-    def test_get_phy_id_list(self):
-        result = self.dut.device.sl4f.wlan_lib.wlanPhyIdList()
-        error = result['error']
-        asserts.assert_true(error is None, error)
-
-        self.log.info('Got Phy IDs %s' % result['result'])
-        return True
-
-    def test_get_country(self):
-        wlan_lib = self.dut.device.sl4f.wlan_lib
-
-        result = wlan_lib.wlanPhyIdList()
-        error = result['error']
-        asserts.assert_true(error is None, error)
-        phy_id = result['result'][0]
-
-        result = wlan_lib.wlanGetCountry(phy_id)
-        error = result['error']
-        asserts.assert_true(error is None, error)
-
-        country_bytes = result['result']
-        country_string = str(array.array('b', country_bytes),
-                             encoding='us-ascii')
-        self.log.info('Got country %s (%s)', country_string, country_bytes)
-        return True
-
-    def test_get_dev_path(self):
-        wlan_lib = self.dut.device.sl4f.wlan_lib
-
-        result = wlan_lib.wlanPhyIdList()
-        error = result['error']
-        asserts.assert_true(error is None, error)
-        phy_id = result['result'][0]
-
-        result = wlan_lib.wlanGetDevPath(phy_id)
-        error = result['error']
-        asserts.assert_true(error is None, error)
-
-        dev_path = result['result']
-        self.log.info('Got device path: %s', dev_path)
-        return True
diff --git a/src/antlion/tests/wlan/facade/WlanStatusTest.py b/src/antlion/tests/wlan/facade/WlanStatusTest.py
deleted file mode 100644
index 4c35c1e..0000000
--- a/src/antlion/tests/wlan/facade/WlanStatusTest.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test to verify that a DUT's client interface's status can be queried.
-"""
-
-from antlion import signals
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-
-class WlanStatusTest(WifiBaseTest):
-    """WLAN status test class.
-
-    Test Bed Requirements:
-    * One or more Fuchsia devices with WLAN client capabilities.
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            super().on_device_fail(fd, test_name, begin_time)
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
-
-    def test_wlan_stopped_client_status(self):
-        """Queries WLAN status on DUTs with no WLAN ifaces.
-
-        Tests that DUTs without WLAN interfaces have empty results and return
-        an error when queried for status.
-        """
-        for fd in self.fuchsia_devices:
-            fd.deconfigure_wlan()
-
-            status = fd.sl4f.wlan_lib.wlanStatus()
-            self.log.debug(status)
-            if not status["error"] or status["result"]:
-                raise signals.TestFailure(
-                    "DUT's WLAN client status should be empty")
-
-        raise signals.TestPass("Success")
-
-    def test_wlan_started_client_status(self):
-        """Queries WLAN status on DUTs with WLAN ifaces.
-
-        Tests that, once WLAN client interfaces have been created, each one
-        returns a result and that none of them return errors when queried for
-        status.
-        """
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
-
-            status = fd.sl4f.wlan_lib.wlanStatus()
-            self.log.debug(status)
-            if status["error"] or not status["result"]:
-                raise signals.TestFailure(
-                    "DUT's WLAN client status should be populated")
-
-        raise signals.TestPass("Success")
diff --git a/src/antlion/tests/wlan/functional/BeaconLossTest.py b/src/antlion/tests/wlan/functional/BeaconLossTest.py
deleted file mode 100644
index 6c8b74a..0000000
--- a/src/antlion/tests/wlan/functional/BeaconLossTest.py
+++ /dev/null
@@ -1,142 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for testing WiFi recovery after rebooting the AP.
-
-Override default number of iterations using the following
-parameter in the test config file.
-
-"beacon_loss_test_iterations": "5"
-"""
-
-import time
-
-from antlion import asserts
-from antlion import signals
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.utils import rand_ascii_str
-
-
-class BeaconLossTest(WifiBaseTest):
-    # Default number of test iterations here.
-    # Override using parameter in config file.
-    # Eg: "beacon_loss_test_iterations": "10"
-    num_of_iterations = 5
-
-    # Time to wait for AP to startup
-    wait_ap_startup_s = 15
-
-    # Default wait time in seconds for the AP radio to turn back on
-    wait_to_connect_after_ap_txon_s = 5
-
-    # Time to wait for device to disconnect after AP radio of
-    wait_after_ap_txoff_s = 15
-
-    # Time to wait for device to complete connection setup after
-    # given an associate command
-    wait_client_connection_setup_s = 15
-
-    def setup_class(self):
-        super().setup_class()
-        self.ssid = rand_ascii_str(10)
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-        self.access_point = self.access_points[0]
-        self.num_of_iterations = int(
-            self.user_params.get("beacon_loss_test_iterations",
-                                 self.num_of_iterations))
-        self.in_use_interface = None
-
-    def teardown_test(self):
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        # ensure radio is on, in case the test failed while the radio was off
-        self.access_point.iwconfig.ap_iwconfig(self.in_use_interface,
-                                               "txpower on")
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def beacon_loss(self, channel):
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=channel,
-                 ssid=self.ssid)
-        time.sleep(self.wait_ap_startup_s)
-        if channel > 14:
-            self.in_use_interface = self.access_point.wlan_5g
-        else:
-            self.in_use_interface = self.access_point.wlan_2g
-
-        # TODO(b/144505723): [ACTS] update BeaconLossTest.py to handle client
-        # roaming, saved networks, etc.
-        self.log.info("sending associate command for ssid %s", self.ssid)
-        self.dut.associate(target_ssid=self.ssid)
-
-        asserts.assert_true(self.dut.is_connected(), 'Failed to connect.')
-
-        time.sleep(self.wait_client_connection_setup_s)
-
-        for _ in range(0, self.num_of_iterations):
-            # Turn off AP radio
-            self.log.info("turning off radio")
-            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface,
-                                                   "txpower off")
-            time.sleep(self.wait_after_ap_txoff_s)
-
-            # Did we disconnect from AP?
-            asserts.assert_false(self.dut.is_connected(),
-                                 'Failed to disconnect.')
-
-            # Turn on AP radio
-            self.log.info("turning on radio")
-            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface,
-                                                   "txpower on")
-            time.sleep(self.wait_to_connect_after_ap_txon_s)
-
-            # Tell the client to connect
-            self.log.info("sending associate command for ssid %s" % self.ssid)
-            self.dut.associate(target_ssid=self.ssid)
-            time.sleep(self.wait_client_connection_setup_s)
-
-            # Did we connect back to WiFi?
-            asserts.assert_true(self.dut.is_connected(),
-                                'Failed to connect back.')
-
-        return True
-
-    def test_beacon_loss_2g(self):
-        self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G)
-
-    def test_beacon_loss_5g(self):
-        self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G)
diff --git a/src/antlion/tests/wlan/functional/ChannelSwitchTest.py b/src/antlion/tests/wlan/functional/ChannelSwitchTest.py
deleted file mode 100644
index 1c62919..0000000
--- a/src/antlion/tests/wlan/functional/ChannelSwitchTest.py
+++ /dev/null
@@ -1,379 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Tests STA handling of channel switch announcements.
-"""
-
-import random
-import time
-
-from antlion import asserts
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.utils import rand_ascii_str
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from typing import Sequence
-
-
-class ChannelSwitchTest(WifiBaseTest):
-    # Time to wait between issuing channel switches
-    WAIT_BETWEEN_CHANNEL_SWITCHES_S = 15
-
-    # For operating class 115 tests.
-    GLOBAL_OPERATING_CLASS_115_CHANNELS = [36, 40, 44, 48]
-    # A channel outside the operating class.
-    NON_GLOBAL_OPERATING_CLASS_115_CHANNEL = 52
-
-    # For operating class 124 tests.
-    GLOBAL_OPERATING_CLASS_124_CHANNELS = [149, 153, 157, 161]
-    # A channel outside the operating class.
-    NON_GLOBAL_OPERATING_CLASS_124_CHANNEL = 52
-
-    def setup_class(self) -> None:
-        super().setup_class()
-        self.ssid = rand_ascii_str(10)
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-        self.access_point = self.access_points[0]
-        self._stop_all_soft_aps()
-        self.in_use_interface = None
-
-    def teardown_test(self) -> None:
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    # TODO(fxbug.dev/85738): Change band type to an enum.
-    def channel_switch(self,
-                       band: str,
-                       starting_channel: int,
-                       channel_switches: Sequence[int],
-                       test_with_soft_ap: bool = False) -> None:
-        """Setup and run a channel switch test with the given parameters.
-
-        Creates an AP, associates to it, and then issues channel switches
-        through the provided channels. After each channel switch, the test
-        checks that the DUT is connected for a period of time before considering
-        the channel switch successful. If directed to start a SoftAP, the test
-        will also check that the SoftAP is on the expected channel after each
-        channel switch.
-
-        Args:
-            band: band that AP will use, must be a valid band (e.g.
-                hostapd_constants.BAND_2G)
-            starting_channel: channel number that AP will use at startup
-            channel_switches: ordered list of channels that the test will
-                attempt to switch to
-            test_with_soft_ap: whether to start a SoftAP before beginning the
-                channel switches (default is False); note that if a SoftAP is
-                started, the test will also check that the SoftAP handles
-                channel switches correctly
-        """
-        asserts.assert_true(
-            band in [hostapd_constants.BAND_2G, hostapd_constants.BAND_5G],
-            'Failed to setup AP, invalid band {}'.format(band))
-
-        self.current_channel_num = starting_channel
-        if band == hostapd_constants.BAND_5G:
-            self.in_use_interface = self.access_point.wlan_5g
-        elif band == hostapd_constants.BAND_2G:
-            self.in_use_interface = self.access_point.wlan_2g
-        asserts.assert_true(
-            self._channels_valid_for_band([self.current_channel_num], band),
-            'starting channel {} not a valid channel for band {}'.format(
-                self.current_channel_num, band))
-
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=self.current_channel_num,
-                 ssid=self.ssid)
-        if test_with_soft_ap:
-            self._start_soft_ap()
-        self.log.info('sending associate command for ssid %s', self.ssid)
-        self.dut.associate(target_ssid=self.ssid)
-        asserts.assert_true(self.dut.is_connected(), 'Failed to connect.')
-
-        asserts.assert_true(channel_switches,
-                            'Cannot run test, no channels to switch to')
-        asserts.assert_true(
-            self._channels_valid_for_band(channel_switches, band),
-            'channel_switches {} includes invalid channels for band {}'.format(
-                channel_switches, band))
-
-        for channel_num in channel_switches:
-            if channel_num == self.current_channel_num:
-                continue
-            self.log.info('channel switch: {} -> {}'.format(
-                self.current_channel_num, channel_num))
-            self.access_point.channel_switch(self.in_use_interface,
-                                             channel_num)
-            channel_num_after_switch = self.access_point.get_current_channel(
-                self.in_use_interface)
-            asserts.assert_equal(channel_num_after_switch, channel_num,
-                                 'AP failed to channel switch')
-            self.current_channel_num = channel_num
-
-            # Check periodically to see if DUT stays connected. Sometimes
-            # CSA-induced disconnects occur seconds after last channel switch.
-            for _ in range(self.WAIT_BETWEEN_CHANNEL_SWITCHES_S):
-                asserts.assert_true(
-                    self.dut.is_connected(),
-                    'Failed to stay connected after channel switch.')
-                client_channel = self._client_channel()
-                asserts.assert_equal(
-                    client_channel, channel_num,
-                    'Client interface on wrong channel ({})'.format(
-                        client_channel))
-                if test_with_soft_ap:
-                    soft_ap_channel = self._soft_ap_channel()
-                    asserts.assert_equal(
-                        soft_ap_channel, channel_num,
-                        'SoftAP interface on wrong channel ({})'.format(
-                            soft_ap_channel))
-                time.sleep(1)
-
-    def test_channel_switch_2g(self) -> None:
-        """Channel switch through all (US only) channels in the 2 GHz band."""
-        self.channel_switch(
-            band=hostapd_constants.BAND_2G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_switches=hostapd_constants.US_CHANNELS_2G)
-
-    def test_channel_switch_2g_with_soft_ap(self) -> None:
-        """Channel switch through (US only) 2 Ghz channels with SoftAP up."""
-        self.channel_switch(
-            band=hostapd_constants.BAND_2G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_switches=hostapd_constants.US_CHANNELS_2G,
-            test_with_soft_ap=True)
-
-    def test_channel_switch_2g_shuffled_with_soft_ap(self) -> None:
-        """Switch through shuffled (US only) 2 Ghz channels with SoftAP up."""
-        channels = hostapd_constants.US_CHANNELS_2G
-        random.shuffle(channels)
-        self.log.info('Shuffled channel switch sequence: {}'.format(channels))
-        self.channel_switch(
-            band=hostapd_constants.BAND_2G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_switches=channels,
-            test_with_soft_ap=True)
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_5g(self) -> None:
-        """Channel switch through all (US only) channels in the 5 GHz band."""
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_switches=hostapd_constants.US_CHANNELS_5G)
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_5g_with_soft_ap(self) -> None:
-        """Channel switch through (US only) 5 GHz channels with SoftAP up."""
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_switches=hostapd_constants.US_CHANNELS_5G,
-            test_with_soft_ap=True)
-
-    def test_channel_switch_5g_shuffled_with_soft_ap(self) -> None:
-        """Switch through shuffled (US only) 5 Ghz channels with SoftAP up."""
-        channels = hostapd_constants.US_CHANNELS_5G
-        random.shuffle(channels)
-        self.log.info('Shuffled channel switch sequence: {}'.format(channels))
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_switches=channels,
-            test_with_soft_ap=True)
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_115(
-            self) -> None:
-        """Channel switch into, through, and out of global op. class 115 channels.
-
-        Global operating class 115 is described in IEEE 802.11-2016 Table E-4.
-        Regression test for fxbug.dev/84777.
-        """
-        channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [
-            self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL
-        ]
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
-            channel_switches=channels)
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_115_with_soft_ap(
-            self) -> None:
-        """Test global operating class 124 channel switches, with SoftAP.
-
-        Regression test for fxbug.dev/84777.
-        """
-        channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [
-            self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL
-        ]
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
-            channel_switches=channels,
-            test_with_soft_ap=True)
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_124(
-            self) -> None:
-        """Switch into, through, and out of global op. class 124 channels.
-
-        Global operating class 124 is described in IEEE 802.11-2016 Table E-4.
-        Regression test for fxbug.dev/64279.
-        """
-        channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [
-            self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL
-        ]
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
-            channel_switches=channels)
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_124_with_soft_ap(
-            self) -> None:
-        """Test global operating class 124 channel switches, with SoftAP.
-
-        Regression test for fxbug.dev/64279.
-        """
-        channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [
-            self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL
-        ]
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
-            channel_switches=channels,
-            test_with_soft_ap=True)
-
-    def _channels_valid_for_band(self, channels: Sequence[int],
-                                 band: str) -> bool:
-        """Determine if the channels are valid for the band (US only).
-
-        Args:
-            channels: channel numbers
-            band: a valid band (e.g. hostapd_constants.BAND_2G)
-        """
-        if band == hostapd_constants.BAND_2G:
-            band_channels = frozenset(hostapd_constants.US_CHANNELS_2G)
-        elif band == hostapd_constants.BAND_5G:
-            band_channels = frozenset(hostapd_constants.US_CHANNELS_5G)
-        else:
-            asserts.fail('Invalid band {}'.format(band))
-        channels_set = frozenset(channels)
-        if channels_set <= band_channels:
-            return True
-        return False
-
-    def _start_soft_ap(self) -> None:
-        """Start a SoftAP on the DUT.
-
-        Raises:
-            EnvironmentError: if the SoftAP does not start
-        """
-        ssid = rand_ascii_str(10)
-        security_type = 'none'
-        password = ''
-        connectivity_mode = 'local_only'
-        operating_band = 'any'
-
-        self.log.info('Starting SoftAP on DUT')
-
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            ssid, security_type, password, connectivity_mode, operating_band)
-        if response.get('error'):
-            raise EnvironmentError('SL4F: Failed to setup SoftAP. Err: %s' %
-                                   response['error'])
-        self.log.info('SoftAp network (%s) is up.' % ssid)
-
-    def _stop_all_soft_aps(self) -> None:
-        """Stops all SoftAPs on Fuchsia Device.
-
-        Raises:
-            EnvironmentError: if SoftAP stop call fails
-        """
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint(
-        )
-        if response.get('error'):
-            raise EnvironmentError(
-                'SL4F: Failed to stop all SoftAPs. Err: %s' %
-                response['error'])
-
-    def _client_channel(self) -> int:
-        """Determine the channel of the DUT client interface.
-
-        If the interface is not connected, the method will assert a test
-        failure.
-
-        Returns: channel number
-
-        Raises:
-            EnvironmentError: if client interface channel cannot be
-                determined
-        """
-        status = self.dut.status()
-        if status['error']:
-            raise EnvironmentError('Could not determine client channel')
-
-        result = status['result']
-        if isinstance(result, dict):
-            if result.get('Connected'):
-                return result['Connected']['channel']['primary']
-            asserts.fail('Client interface not connected')
-        raise EnvironmentError('Could not determine client channel')
-
-    def _soft_ap_channel(self) -> int:
-        """Determine the channel of the DUT SoftAP interface.
-
-        If the interface is not connected, the method will assert a test
-        failure.
-
-        Returns: channel number
-
-        Raises:
-            EnvironmentError: if SoftAP interface channel cannot be determined.
-        """
-        iface_ids = self.dut.get_wlan_interface_id_list()
-        for iface_id in iface_ids:
-            query = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(iface_id)
-            if query['error']:
-                continue
-            query_result = query['result']
-            if type(query_result) is dict and query_result.get('role') == 'Ap':
-                status = self.dut.device.sl4f.wlan_lib.wlanStatus(iface_id)
-                if status['error']:
-                    continue
-                status_result = status['result']
-                if isinstance(status_result, dict):
-                    if status_result.get('Connected'):
-                        return status_result['Connected']['channel']['primary']
-                    asserts.fail('SoftAP interface not connected')
-        raise EnvironmentError('Could not determine SoftAP channel')
diff --git a/src/antlion/tests/wlan/functional/ConnectionStressTest.py b/src/antlion/tests/wlan/functional/ConnectionStressTest.py
deleted file mode 100644
index 635c902..0000000
--- a/src/antlion/tests/wlan/functional/ConnectionStressTest.py
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for testing WiFi connection and disconnection in a loop
-
-"""
-
-import time
-
-from antlion import signals
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.utils import rand_ascii_str
-
-
-class ConnectionStressTest(WifiBaseTest):
-    # Default number of test iterations here.
-    # Override using parameter in config file.
-    # Eg: "connection_stress_test_iterations": "50"
-    num_of_iterations = 10
-    channel_2G = hostapd_constants.AP_DEFAULT_CHANNEL_2G
-    channel_5G = hostapd_constants.AP_DEFAULT_CHANNEL_5G
-
-    def setup_class(self):
-        super().setup_class()
-        self.ssid = rand_ascii_str(10)
-        self.fd = self.fuchsia_devices[0]
-        self.dut = create_wlan_device(self.fd)
-        self.access_point = self.access_points[0]
-        self.num_of_iterations = int(
-            self.user_params.get("connection_stress_test_iterations",
-                                 self.num_of_iterations))
-        self.log.info('iterations: %d' % self.num_of_iterations)
-
-    def teardown_test(self):
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def start_ap(self, profile, channel, security=None):
-        """Starts an Access Point
-
-        Args:
-            profile: Profile name such as 'whirlwind'
-            channel: Channel to operate on
-        """
-        self.log.info('Profile: %s, Channel: %d' % (profile, channel))
-        setup_ap(access_point=self.access_point,
-                 profile_name=profile,
-                 channel=channel,
-                 ssid=self.ssid,
-                 security=security)
-
-    def connect_disconnect(self,
-                           ap_config,
-                           ssid=None,
-                           password=None,
-                           negative_test=False):
-        """Helper to start an AP, connect DUT to it and disconnect
-
-        Args:
-            ap_config: Dictionary contaning profile name and channel
-            ssid: ssid to connect to
-            password: password for the ssid to connect to
-        """
-        security_mode = ap_config.get('security_mode', None)
-        target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-            security_mode, None)
-
-        if security_mode:
-            security_profile = hostapd_security.Security(
-                security_mode=ap_config['security_mode'],
-                password=ap_config['password'])
-        else:
-            security_profile = None
-
-        # Start AP
-        self.start_ap(ap_config['profile'],
-                      ap_config['channel'],
-                      security=security_profile)
-
-        failed = False
-        # Connect and Disconnect several times
-        for x in range(0, self.num_of_iterations):
-            if not ssid:
-                ssid = self.ssid
-            if negative_test:
-                if not self.dut.associate(ssid,
-                                          target_pwd=password,
-                                          target_security=target_security):
-                    self.log.info(
-                        'Attempt %d. Did not associate as expected.' % x)
-                else:
-                    self.log.error('Attempt %d. Negative test successfully '
-                                   'associated. Fail.' % x)
-                    failed = True
-            else:
-                # Connect
-                if self.dut.associate(ssid, target_pwd=password):
-                    self.log.info('Attempt %d. Successfully associated' % x)
-                else:
-                    self.log.error('Attempt %d. Failed to associate.' % x)
-                    failed = True
-                # Disconnect
-                self.dut.disconnect()
-
-            # Wait a second before trying again
-            time.sleep(1)
-
-        # Stop AP
-        self.access_point.stop_all_aps()
-        if failed:
-            raise signals.TestFailure(
-                'One or more association attempt failed.')
-
-    def test_whirlwind_2g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind',
-            'channel': self.channel_2G,
-            'security_mode': None
-        })
-
-    def test_whirlwind_5g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind',
-            'channel': self.channel_5G,
-            'security_mode': None
-        })
-
-    def test_whirlwind_11ab_2g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind_11ab_legacy',
-            'channel': self.channel_2G,
-            'security_mode': None
-        })
-
-    def test_whirlwind_11ab_5g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind_11ab_legacy',
-            'channel': self.channel_5G,
-            'security_mode': None
-        })
-
-    def test_whirlwind_11ag_2g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind_11ag_legacy',
-            'channel': self.channel_2G,
-            'security_mode': None
-        })
-
-    def test_whirlwind_11ag_5g(self):
-        self.connect_disconnect({
-            'profile': 'whirlwind_11ag_legacy',
-            'channel': self.channel_5G,
-            'security_mode': None
-        })
-
-    def test_wrong_ssid_whirlwind_2g(self):
-        self.connect_disconnect(
-            {
-                'profile': 'whirlwind',
-                'channel': self.channel_2G,
-                'security_mode': None
-            },
-            ssid=rand_ascii_str(20),
-            negative_test=True)
-
-    def test_wrong_ssid_whirlwind_5g(self):
-        self.connect_disconnect(
-            {
-                'profile': 'whirlwind',
-                'channel': self.channel_5G,
-                'security_mode': None
-            },
-            ssid=rand_ascii_str(20),
-            negative_test=True)
-
-    def test_wrong_password_whirlwind_2g(self):
-        self.connect_disconnect(
-            {
-                'profile': 'whirlwind',
-                'channel': self.channel_2G,
-                'security_mode': hostapd_constants.WPA2_STRING,
-                'password': rand_ascii_str(10)
-            },
-            password=rand_ascii_str(20),
-            negative_test=True)
-
-    def test_wrong_password_whirlwind_5g(self):
-        self.connect_disconnect(
-            {
-                'profile': 'whirlwind',
-                'channel': self.channel_5G,
-                'security_mode': hostapd_constants.WPA2_STRING,
-                'password': rand_ascii_str(10)
-            },
-            password=rand_ascii_str(20),
-            negative_test=True)
diff --git a/src/antlion/tests/wlan/functional/DownloadStressTest.py b/src/antlion/tests/wlan/functional/DownloadStressTest.py
deleted file mode 100644
index e376539..0000000
--- a/src/antlion/tests/wlan/functional/DownloadStressTest.py
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for testing various download stress scenarios.
-
-"""
-import threading
-
-from antlion import signals
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.fuchsia import utils
-from antlion.utils import rand_ascii_str
-
-
-class DownloadStressTest(WifiBaseTest):
-    # Default number of test iterations here.
-    # Override using parameter in config file.
-    # Eg: "download_stress_test_iterations": "10"
-    num_of_iterations = 3
-
-    # Timeout for download thread in seconds
-    download_timeout_s = 60 * 5
-
-    # Download urls
-    url_20MB = 'http://ipv4.download.thinkbroadband.com/20MB.zip'
-    url_40MB = 'http://ipv4.download.thinkbroadband.com/40MB.zip'
-    url_60MB = 'http://ipv4.download.thinkbroadband.com/60MB.zip'
-    url_512MB = 'http://ipv4.download.thinkbroadband.com/512MB.zip'
-
-    # Constants used in test_one_large_multiple_small_downloads
-    download_small_url = url_20MB
-    download_large_url = url_512MB
-    num_of_small_downloads = 5
-    download_threads_result = []
-
-    def setup_class(self):
-        super().setup_class()
-        self.ssid = rand_ascii_str(10)
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-        self.access_point = self.access_points[0]
-        self.num_of_iterations = int(
-            self.user_params.get("download_stress_test_iterations",
-                                 self.num_of_iterations))
-
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid)
-        self.dut.associate(self.ssid)
-
-    def teardown_test(self):
-        self.download_threads_result.clear()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def test_download_small(self):
-        self.log.info("Downloading small file")
-        return self.download_file(self.url_20MB)
-
-    def test_download_large(self):
-        return self.download_file(self.url_512MB)
-
-    def test_continuous_download(self):
-        for x in range(0, self.num_of_iterations):
-            if not self.download_file(self.url_512MB):
-                return False
-        return True
-
-    def download_file(self, url):
-        self.log.info("Start downloading: %s" % url)
-        return utils.http_file_download_by_curl(
-            self.dut.device,
-            url,
-            additional_args='--max-time %d --silent' % self.download_timeout_s)
-
-    def download_thread(self, url):
-        download_status = self.download_file(url)
-        if download_status:
-            self.log.info("Success downloading: %s" % url)
-        else:
-            self.log.info("Failure downloading: %s" % url)
-
-        self.download_threads_result.append(download_status)
-        return download_status
-
-    def test_multi_downloads(self):
-        download_urls = [self.url_20MB, self.url_40MB, self.url_60MB]
-        download_threads = []
-
-        try:
-            # Start multiple downloads at the same time
-            for index, url in enumerate(download_urls):
-                self.log.info('Create and start thread %d.' % index)
-                t = threading.Thread(target=self.download_thread, args=(url, ))
-                download_threads.append(t)
-                t.start()
-
-            # Wait for all threads to complete or timeout
-            for t in download_threads:
-                t.join(self.download_timeout_s)
-
-        finally:
-            is_alive = False
-
-            for index, t in enumerate(download_threads):
-                if t.isAlive():
-                    t = None
-                    is_alive = True
-
-            if is_alive:
-                raise signals.TestFailure('Thread %d timedout' % index)
-
-        for index in range(0, len(self.download_threads_result)):
-            if not self.download_threads_result[index]:
-                self.log.info("Download failed for %d" % index)
-                raise signals.TestFailure('Thread %d failed to download' %
-                                          index)
-                return False
-
-        return True
-
-    def test_one_large_multiple_small_downloads(self):
-        for index in range(self.num_of_iterations):
-            download_threads = []
-            try:
-                large_thread = threading.Thread(
-                    target=self.download_thread,
-                    args=(self.download_large_url, ))
-                download_threads.append(large_thread)
-                large_thread.start()
-
-                for i in range(self.num_of_small_downloads):
-                    # Start small file download
-                    t = threading.Thread(target=self.download_thread,
-                                         args=(self.download_small_url, ))
-                    download_threads.append(t)
-                    t.start()
-                    # Wait for thread to exit before starting the next iteration
-                    t.join(self.download_timeout_s)
-
-                # Wait for the large file download thread to complete
-                large_thread.join(self.download_timeout_s)
-
-            finally:
-                is_alive = False
-
-                for index, t in enumerate(download_threads):
-                    if t.isAlive():
-                        t = None
-                        is_alive = True
-
-                if is_alive:
-                    raise signals.TestFailure('Thread %d timedout' % index)
-
-            for index in range(0, len(self.download_threads_result)):
-                if not self.download_threads_result[index]:
-                    self.log.info("Download failed for %d" % index)
-                    raise signals.TestFailure('Thread %d failed to download' %
-                                              index)
-                    return False
-
-            # Clear results before looping again
-            self.download_threads_result.clear()
-
-        return True
diff --git a/src/antlion/tests/wlan/functional/PingStressTest.py b/src/antlion/tests/wlan/functional/PingStressTest.py
deleted file mode 100644
index 1653205..0000000
--- a/src/antlion/tests/wlan/functional/PingStressTest.py
+++ /dev/null
@@ -1,224 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-PingStressTest exercises sending ICMP and ICMPv6 pings to a wireless access
-router and another device behind the AP. Note, this does not reach out to the
-internet. The DUT is only responsible for sending a routable packet; any
-communication past the first-hop is not the responsibility of the DUT.
-"""
-
-import threading
-
-from collections import namedtuple
-
-from antlion import signals
-from antlion import utils
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.utils import rand_ascii_str
-
-LOOPBACK_IPV4 = '127.0.0.1'
-LOOPBACK_IPV6 = '::1'
-PING_RESULT_TIMEOUT_SEC = 60 * 5
-
-Test = namedtuple(
-    typename='Args',
-    field_names=['name', 'dest_ip', 'count', 'interval', 'timeout', 'size'],
-    defaults=[3, 1000, 1000, 25])
-
-Addrs = namedtuple(
-    typename='Addrs',
-    field_names=['gateway_ipv4', 'gateway_ipv6', 'remote_ipv4', 'remote_ipv6'])
-
-
-class PingStressTest(WifiBaseTest):
-
-    def setup_generated_tests(self):
-        self.generate_tests(
-            self.send_ping, lambda test_name, *_: f'test_{test_name}', [
-                Test("loopback_ipv4", LOOPBACK_IPV4),
-                Test("loopback_ipv6", LOOPBACK_IPV6),
-                Test("gateway_ipv4", lambda addrs: addrs.gateway_ipv4),
-                Test("gateway_ipv6", lambda addrs: addrs.gateway_ipv6),
-                Test("remote_ipv4_small_packet",
-                     lambda addrs: addrs.remote_ipv4),
-                Test("remote_ipv6_small_packet",
-                     lambda addrs: addrs.remote_ipv6),
-                Test("remote_ipv4_small_packet_long",
-                     lambda addrs: addrs.remote_ipv4,
-                     count=50),
-                Test("remote_ipv6_small_packet_long",
-                     lambda addrs: addrs.remote_ipv6,
-                     count=50),
-                Test("remote_ipv4_medium_packet",
-                     lambda addrs: addrs.remote_ipv4,
-                     size=64),
-                Test("remote_ipv6_medium_packet",
-                     lambda addrs: addrs.remote_ipv6,
-                     size=64),
-                Test("remote_ipv4_medium_packet_long",
-                     lambda addrs: addrs.remote_ipv4,
-                     count=50,
-                     timeout=1500,
-                     size=64),
-                Test("remote_ipv6_medium_packet_long",
-                     lambda addrs: addrs.remote_ipv6,
-                     count=50,
-                     timeout=1500,
-                     size=64),
-                Test("remote_ipv4_large_packet",
-                     lambda addrs: addrs.remote_ipv4,
-                     size=500),
-                Test("remote_ipv6_large_packet",
-                     lambda addrs: addrs.remote_ipv6,
-                     size=500),
-                Test("remote_ipv4_large_packet_long",
-                     lambda addrs: addrs.remote_ipv4,
-                     count=50,
-                     timeout=5000,
-                     size=500),
-                Test("remote_ipv6_large_packet_long",
-                     lambda addrs: addrs.remote_ipv6,
-                     count=50,
-                     timeout=5000,
-                     size=500),
-            ])
-
-    def setup_class(self):
-        super().setup_class()
-        self.ssid = rand_ascii_str(10)
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-        self.access_point = self.access_points[0]
-        self.iperf_server = self.iperf_servers[0]
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=self.ssid,
-                 setup_bridge=True,
-                 is_ipv6_enabled=True,
-                 is_nat_enabled=False)
-
-        ap_bridges = self.access_point.interfaces.get_bridge_interface()
-        if len(ap_bridges) != 1:
-            raise signals.TestAbortClass(
-                f'Expected one bridge interface on the AP, got {ap_bridges}')
-        self.ap_ipv4 = utils.get_addr(self.access_point.ssh, ap_bridges[0])
-        self.ap_ipv6 = utils.get_addr(self.access_point.ssh,
-                                      ap_bridges[0],
-                                      addr_type='ipv6_link_local')
-        self.log.info(
-            f"Gateway finished setup ({self.ap_ipv4} | {self.ap_ipv6})")
-
-        self.iperf_server.renew_test_interface_ip_address()
-        self.iperf_server_ipv4 = self.iperf_server.get_addr()
-        self.iperf_server_ipv6 = self.iperf_server.get_addr(
-            addr_type='ipv6_private_local')
-        self.log.info(
-            f"Remote finished setup ({self.iperf_server_ipv4} | {self.iperf_server_ipv6})"
-        )
-
-        self.dut.associate(self.ssid)
-
-        # Wait till the DUT has valid IP addresses after connecting.
-        self.dut.device.wait_for_ipv4_addr(
-            self.dut.device.wlan_client_test_interface_name)
-        self.dut.device.wait_for_ipv6_addr(
-            self.dut.device.wlan_client_test_interface_name)
-        self.log.info("DUT has valid IP addresses on test network")
-
-    def teardown_class(self):
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def send_ping(self,
-                  _,
-                  get_addr_fn,
-                  count=3,
-                  interval=1000,
-                  timeout=1000,
-                  size=25):
-        dest_ip = get_addr_fn(
-            Addrs(
-                gateway_ipv4=self.ap_ipv4,
-                # IPv6 link-local addresses require specification of the
-                # outgoing interface as the scope ID when sending packets.
-                gateway_ipv6=
-                f'{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}',
-                remote_ipv4=self.iperf_server_ipv4,
-                # IPv6 global addresses do not require scope IDs.
-                remote_ipv6=self.iperf_server_ipv6)) if callable(
-                    get_addr_fn) else get_addr_fn
-
-        self.log.info(f'Attempting to ping {dest_ip}...')
-        ping_result = self.dut.can_ping(dest_ip, count, interval, timeout,
-                                        size)
-        if ping_result:
-            self.log.info('Ping was successful.')
-        else:
-            raise signals.TestFailure('Ping was unsuccessful.')
-
-    def test_simultaneous_pings(self):
-        ping_urls = [
-            self.iperf_server_ipv4,
-            self.ap_ipv4,
-            self.iperf_server_ipv6,
-            f'{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}',
-        ]
-        ping_threads = []
-        ping_results = []
-
-        def ping_thread(self, dest_ip, ping_results):
-            self.log.info('Attempting to ping %s...' % dest_ip)
-            ping_result = self.dut.can_ping(dest_ip, count=10, size=50)
-            if ping_result:
-                self.log.info('Success pinging: %s' % dest_ip)
-            else:
-                self.log.info('Failure pinging: %s' % dest_ip)
-            ping_results.append(ping_result)
-
-        try:
-            # Start multiple ping at the same time
-            for index, url in enumerate(ping_urls):
-                t = threading.Thread(target=ping_thread,
-                                     args=(self, url, ping_results))
-                ping_threads.append(t)
-                t.start()
-
-            # Wait for all threads to complete or timeout
-            for t in ping_threads:
-                t.join(PING_RESULT_TIMEOUT_SEC)
-
-        finally:
-            is_alive = False
-
-            for index, t in enumerate(ping_threads):
-                if t.is_alive():
-                    t = None
-                    is_alive = True
-
-            if is_alive:
-                raise signals.TestFailure(
-                    f'Timed out while pinging {ping_urls[index]}')
-
-        for index in range(0, len(ping_results)):
-            if not ping_results[index]:
-                raise signals.TestFailure(f'Failed to ping {ping_urls[index]}')
-        return True
diff --git a/src/antlion/tests/wlan/functional/SoftApTest.py b/src/antlion/tests/wlan/functional/SoftApTest.py
deleted file mode 100644
index 7255b0c..0000000
--- a/src/antlion/tests/wlan/functional/SoftApTest.py
+++ /dev/null
@@ -1,2029 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly import signals
-import multiprocessing as mp
-import random
-import time
-
-from antlion import utils
-from antlion import asserts
-from antlion.controllers import iperf_server
-from antlion.controllers import iperf_client
-from antlion.controllers.access_point import setup_ap, AccessPoint
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-CONNECTIVITY_MODE_LOCAL = 'local_only'
-CONNECTIVITY_MODE_UNRESTRICTED = 'unrestricted'
-DEFAULT_AP_PROFILE = 'whirlwind'
-DEFAULT_IPERF_PORT = 5201
-DEFAULT_STRESS_TEST_ITERATIONS = 10
-DEFAULT_TIMEOUT = 30
-DEFAULT_IPERF_TIMEOUT = 60
-DEFAULT_NO_ADDR_EXPECTED_TIMEOUT = 5
-INTERFACE_ROLE_AP = 'Ap'
-INTERFACE_ROLE_CLIENT = 'Client'
-OPERATING_BAND_2G = 'only_2_4_ghz'
-OPERATING_BAND_5G = 'only_5_ghz'
-OPERATING_BAND_ANY = 'any'
-SECURITY_OPEN = 'none'
-SECURITY_WEP = 'wep'
-SECURITY_WPA = 'wpa'
-SECURITY_WPA2 = 'wpa2'
-SECURITY_WPA3 = 'wpa3'
-STATE_UP = True
-STATE_DOWN = False
-TEST_TYPE_ASSOCIATE_ONLY = 'associate_only'
-TEST_TYPE_ASSOCIATE_AND_PING = 'associate_and_ping'
-TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC = 'associate_and_pass_traffic'
-TEST_TYPES = {
-    TEST_TYPE_ASSOCIATE_ONLY, TEST_TYPE_ASSOCIATE_AND_PING,
-    TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC
-}
-
-
-def get_test_name_from_settings(settings):
-    return settings['test_name']
-
-
-def get_ap_params_from_config_or_default(config):
-    """Retrieves AP parameters from antlion config, or returns default settings.
-
-    Args:
-        config: dict, from antlion config, that may contain custom ap parameters
-
-    Returns:
-        dict, containing all AP parameters
-    """
-    profile = config.get('profile', DEFAULT_AP_PROFILE)
-    ssid = config.get(
-        'ssid', utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G))
-    channel = config.get('channel', hostapd_constants.AP_DEFAULT_CHANNEL_2G)
-    security_mode = config.get('security_mode', None)
-    password = config.get('password', None)
-    if security_mode:
-        if not password:
-            password = generate_random_password(security_mode=security_mode)
-        security = hostapd_security.Security(security_mode, password)
-    else:
-        security = None
-
-    return {
-        'profile': profile,
-        'ssid': ssid,
-        'channel': channel,
-        'security': security,
-        'password': password
-    }
-
-
-def get_soft_ap_params_from_config_or_default(config):
-    """Retrieves SoftAp parameters from antlion config or returns default settings.
-
-    Args:
-        config: dict, from antlion config, that may contain custom soft ap
-            parameters
-
-    Returns:
-        dict, containing all soft AP parameters
-    """
-    ssid = config.get(
-        'ssid', utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G))
-    connectivity_mode = config.get('connectivity_mode',
-                                   CONNECTIVITY_MODE_LOCAL)
-    operating_band = config.get('operating_band', OPERATING_BAND_2G)
-    security_type = config.get('security_type', SECURITY_OPEN)
-    password = config.get('password', '')
-
-    # The SoftAP API uses 'open' security instead of None, '' password
-    # instead of None, and security_type instead of security_mode, hence
-    # the difference between ap_params and soft_ap_params
-    if security_type != SECURITY_OPEN and password == '':
-        password = generate_random_password(security_mode=security_type)
-
-    return {
-        'ssid': ssid,
-        'connectivity_mode': connectivity_mode,
-        'operating_band': operating_band,
-        'security_type': security_type,
-        'password': password
-    }
-
-
-class StressTestIterationFailure(Exception):
-    """Used to differentiate a subtest failure from an actual exception"""
-
-
-class SoftApTest(WifiBaseTest):
-    """Tests for Fuchsia SoftAP
-
-    Testbed requirement:
-    * One Fuchsia device
-    * At least one client (Android) device
-        * For multi-client tests, at least two client (Android) devices are
-          required. Test will be skipped if less than two client devices are
-          present.
-    * For any tests that exercise client-mode (e.g. toggle tests, simultaneous
-        tests), a physical AP (whirlwind) is also required. Those tests will be
-        skipped if physical AP is not present.
-    """
-
-    def setup_class(self):
-        self.soft_ap_test_params = self.user_params.get(
-            'soft_ap_test_params', {})
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-        # TODO(fxb/51313): Add in device agnosticity for clients
-        # Create a wlan device and iperf client for each Android client
-        self.clients = []
-        self.iperf_clients_map = {}
-        for device in self.android_devices:
-            client_wlan_device = create_wlan_device(device)
-            self.clients.append(client_wlan_device)
-            self.iperf_clients_map[
-                client_wlan_device] = client_wlan_device.create_iperf_client()
-        self.primary_client = self.clients[0]
-
-        # Create an iperf server on the DUT, which will be used for any streaming.
-        self.iperf_server_config = {
-            'user': self.dut.device.ssh_username,
-            'host': self.dut.device.ip,
-            'ssh_config': self.dut.device.ssh_config
-        }
-        self.iperf_server = iperf_server.IPerfServerOverSsh(
-            self.iperf_server_config, DEFAULT_IPERF_PORT, use_killall=True)
-        self.iperf_server.start()
-
-        # Attempt to create an ap iperf server. AP is only required for tests
-        # that use client mode.
-        try:
-            self.access_point = self.access_points[0]
-            self.ap_iperf_client = iperf_client.IPerfClientOverSsh(
-                self.user_params['AccessPoint'][0]['ssh_config'])
-        except AttributeError:
-            self.access_point = None
-            self.ap_iperf_client = None
-
-        self.iperf_clients_map[self.access_point] = self.ap_iperf_client
-
-    def teardown_class(self):
-        # Because this is using killall, it will stop all iperf processes
-        self.iperf_server.stop()
-
-    def setup_test(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        for client in self.clients:
-            client.disconnect()
-            client.reset_wifi()
-            client.wifi_toggle_state(True)
-        self.stop_all_soft_aps()
-        if self.access_point:
-            self.access_point.stop_all_aps()
-        self.dut.disconnect()
-
-    def teardown_test(self):
-        for client in self.clients:
-            client.disconnect()
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-        self.stop_all_soft_aps()
-        if self.access_point:
-            self.download_ap_logs()
-            self.access_point.stop_all_aps()
-        self.dut.disconnect()
-
-    def start_soft_ap(self, settings):
-        """Starts a softAP on Fuchsia device.
-
-        Args:
-            settings: a dict containing softAP configuration params
-                ssid: string, SSID of softAP network
-                security_type: string, security type of softAP network
-                    - 'none', 'wep', 'wpa', 'wpa2', 'wpa3'
-                password: string, password if applicable
-                connectivity_mode: string, connecitivity_mode for softAP
-                    - 'local_only', 'unrestricted'
-                operating_band: string, band for softAP network
-                    - 'any', 'only_5_ghz', 'only_2_4_ghz'
-        """
-        ssid = settings['ssid']
-        security_type = settings['security_type']
-        password = settings.get('password', '')
-        connectivity_mode = settings['connectivity_mode']
-        operating_band = settings['operating_band']
-
-        self.log.info('Starting SoftAP on DUT with settings: %s' % settings)
-
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            ssid, security_type, password, connectivity_mode, operating_band)
-        if response.get('error'):
-            raise EnvironmentError('SL4F: Failed to setup SoftAP. Err: %s' %
-                                   response['error'])
-
-        self.log.info('SoftAp network (%s) is up.' % ssid)
-
-    def stop_soft_ap(self, settings):
-        """ Stops a specific SoftAP On Fuchsia device.
-
-        Args:
-            settings: a dict containing softAP config params (see start_soft_ap)
-                for details
-
-        Raises:
-            EnvironmentError, if StopSoftAP call fails.
-        """
-        ssid = settings['ssid']
-        security_type = settings['security_type']
-        password = settings.get('password', '')
-
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAccessPoint(
-            ssid, security_type, password)
-        if response.get('error'):
-            raise EnvironmentError('SL4F: Failed to stop SoftAP. Err: %s' %
-                                   response['error'])
-
-    def stop_all_soft_aps(self):
-        """ Stops all SoftAPs on Fuchsia Device.
-
-        Raises:
-            EnvironmentError, if StopAllAps call fails.
-        """
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint(
-        )
-        if response.get('error'):
-            raise EnvironmentError(
-                'SL4F: Failed to stop all SoftAPs. Err: %s' %
-                response['error'])
-
-    def associate_with_soft_ap(self, device, soft_ap_settings):
-        """Associates client device with softAP on Fuchsia device.
-
-        Args:
-            device: wlan_device to associate with the softAP
-            settings: a dict containing softAP config params (see start_soft_ap)
-                for details
-
-        Raises:
-            TestFailure, if association fails
-        """
-        self.log.info(
-            'Attempting to associate client %s with SoftAP on FuchsiaDevice '
-            '(%s).' % (device.identifier, self.dut.identifier))
-
-        check_connectivity = soft_ap_settings[
-            'connectivity_mode'] == CONNECTIVITY_MODE_UNRESTRICTED
-        associated = device.associate(
-            soft_ap_settings['ssid'],
-            target_pwd=soft_ap_settings.get('password'),
-            target_security=hostapd_constants.
-            SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                soft_ap_settings['security_type'], None),
-            check_connectivity=check_connectivity)
-
-        if not associated:
-            self.log.error('Failed to connect to SoftAp.')
-            return False
-
-        self.log.info('Client successfully associated with SoftAP.')
-        return True
-
-    def disconnect_from_soft_ap(self, device):
-        """Disconnects client device from SoftAP.
-
-        Args:
-            device: wlan_device to disconnect from SoftAP
-        """
-        self.log.info('Disconnecting device %s from SoftAP.' %
-                      device.identifier)
-        device.disconnect()
-
-    def get_device_test_interface(self, device, role=None, channel=None):
-        """Retrieves test interface from a provided device, which can be the
-        FuchsiaDevice DUT, the AccessPoint, or an AndroidClient.
-
-        Args:
-            device: the device do get the test interface from. Either
-                FuchsiaDevice (DUT), Android client, or AccessPoint.
-            role: str, either "client" or "ap". Required for FuchsiaDevice (DUT)
-            channel: int, channel of the ap network. Required for AccessPoint.
-
-        Returns:
-            String, name of test interface on given device.
-        """
-
-        if device is self.dut:
-            device.device.wlan_controller.update_wlan_interfaces()
-            if role == INTERFACE_ROLE_CLIENT:
-                return device.device.wlan_client_test_interface_name
-            elif role == INTERFACE_ROLE_AP:
-                return device.device.wlan_ap_test_interface_name
-            else:
-                raise ValueError('Unsupported interface role: %s' % role)
-        elif isinstance(device, AccessPoint):
-            if not channel:
-                raise ValueError(
-                    'Must provide a channel to get AccessPoint interface')
-            if channel < 36:
-                return device.wlan_2g
-            else:
-                return device.wlan_5g
-        else:
-            return device.get_default_wlan_test_interface()
-
-    def wait_for_ipv4_address(self,
-                              device,
-                              interface_name,
-                              timeout=DEFAULT_TIMEOUT):
-        """ Waits for interface on a wlan_device to get an ipv4 address.
-
-        Args:
-            device: wlan_device or AccessPoint to check interface
-            interface_name: name of the interface to check
-            timeout: seconds to wait before raising an error
-
-        Raises:
-            ValueError, if interface does not have an ipv4 address after timeout
-        """
-        if isinstance(device, AccessPoint):
-            comm_channel = device.ssh
-        else:
-            comm_channel = device.device
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            ips = utils.get_interface_ip_addresses(comm_channel,
-                                                   interface_name)
-            if len(ips['ipv4_private']) > 0:
-                self.log.info('Device %s interface %s has ipv4 address %s' %
-                              (device.identifier, interface_name,
-                               ips['ipv4_private'][0]))
-                return ips['ipv4_private'][0]
-            else:
-                time.sleep(1)
-        raise ConnectionError(
-            'After %s seconds, device %s still does not have an ipv4 address '
-            'on interface %s.' % (timeout, device.identifier, interface_name))
-
-    def device_can_ping_addr(self, device, dest_ip, timeout=DEFAULT_TIMEOUT):
-        """ Verify wlan_device can ping a destination ip.
-
-        Args:
-            device: wlan_device to initiate ping
-            dest_ip: ip to ping from wlan_device
-
-        Raises:
-            TestFailure, if ping fails
-        """
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            with utils.SuppressLogOutput():
-                ping_result = device.can_ping(dest_ip)
-
-            if ping_result:
-                self.log.info('Ping successful from device %s to dest ip %s.' %
-                              (device.identifier, dest_ip))
-                return True
-            else:
-                self.log.debug(
-                    'Device %s could not ping dest ip %s. Retrying in 1 second.'
-                    % (device.identifier, dest_ip))
-                time.sleep(1)
-        else:
-            self.log.info('Failed to ping from device %s to dest ip %s.' %
-                          (device.identifier, dest_ip))
-            return False
-
-    def run_iperf_traffic(self, ip_client, server_address, server_port=5201):
-        """Runs traffic between client and ap an verifies throughput.
-
-        Args:
-            ip_client: iperf client to use
-            server_address: ipv4 address of the iperf server to use
-            server_port: port of the iperf server
-
-        Raises:
-            TestFailure, if no traffic passes in either direction
-        """
-        ip_client_identifier = self.get_iperf_client_identifier(ip_client)
-
-        self.log.info(
-            'Running traffic from iperf client %s to iperf server %s.' %
-            (ip_client_identifier, server_address))
-        client_to_ap_path = ip_client.start(
-            server_address, '-i 1 -t 10 -J -p %s' % server_port,
-            'client_to_soft_ap')
-
-        client_to_ap_result = iperf_server.IPerfResult(client_to_ap_path)
-        if (not client_to_ap_result.avg_receive_rate):
-            raise ConnectionError(
-                'Failed to pass traffic from iperf client %s to iperf server %s.'
-                % (ip_client_identifier, server_address))
-
-        self.log.info(
-            'Passed traffic from iperf client %s to iperf server %s with avg '
-            'rate of %s MB/s.' % (ip_client_identifier, server_address,
-                                  client_to_ap_result.avg_receive_rate))
-
-        self.log.info(
-            'Running traffic from iperf server %s to iperf client %s.' %
-            (server_address, ip_client_identifier))
-        ap_to_client_path = ip_client.start(
-            server_address, '-i 1 -t 10 -R -J -p %s' % server_port,
-            'soft_ap_to_client')
-
-        ap_to_client_result = iperf_server.IPerfResult(ap_to_client_path)
-        if (not ap_to_client_result.avg_receive_rate):
-            raise ConnectionError(
-                'Failed to pass traffic from iperf server %s to iperf client %s.'
-                % (server_address, ip_client_identifier))
-
-        self.log.info(
-            'Passed traffic from iperf server %s to iperf client %s with avg '
-            'rate of %s MB/s.' % (server_address, ip_client_identifier,
-                                  ap_to_client_result.avg_receive_rate))
-
-    def run_iperf_traffic_parallel_process(self,
-                                           ip_client,
-                                           server_address,
-                                           error_queue,
-                                           server_port=5201):
-        """ Executes run_iperf_traffic using a queue to capture errors. Used
-        when running iperf in a parallel process.
-
-        Args:
-            ip_client: iperf client to use
-            server_address: ipv4 address of the iperf server to use
-            error_queue: multiprocessing queue to capture errors
-            server_port: port of the iperf server
-        """
-        try:
-            self.run_iperf_traffic(ip_client,
-                                   server_address,
-                                   server_port=server_port)
-        except ConnectionError as err:
-            error_queue.put('In iperf process from %s to %s: %s' %
-                            (self.get_iperf_client_identifier(ip_client),
-                             server_address, err))
-
-    def get_iperf_client_identifier(self, ip_client):
-        """ Retrieves an indentifer string from iperf client, for logging.
-
-        Args:
-            ip_client: iperf client to grab identifier from
-        """
-        if type(ip_client) == iperf_client.IPerfClientOverAdb:
-            return ip_client._android_device_or_serial.serial
-        return ip_client._ssh_settings.hostname
-
-    def device_is_connected_to_ap(self,
-                                  client,
-                                  ap,
-                                  channel=None,
-                                  check_traffic=False,
-                                  timeout=DEFAULT_TIMEOUT):
-        """ Returns whether client device can ping (and optionally pass traffic)
-        to the ap device.
-
-        Args:
-            client: device that should be associated. Either FuchsiaDevice (DUT)
-                or Android client
-            ap: device acting as AP. Either FuchsiaDevice (DUT) or AccessPoint.
-            channel: int, channel the AP is using. Required if ap is an
-                AccessPoint object.
-            check_traffic: bool, whether to attempt to pass traffic between
-                client and ap devices.
-            timeout: int, time in seconds to wait for devices to have ipv4
-                addresses
-        """
-        try:
-            # Get interfaces
-            client_interface = self.get_device_test_interface(
-                client, INTERFACE_ROLE_CLIENT)
-            ap_interface = self.get_device_test_interface(
-                ap, role=INTERFACE_ROLE_AP, channel=channel)
-
-            # Get addresses
-            client_ipv4 = self.wait_for_ipv4_address(client,
-                                                     client_interface,
-                                                     timeout=timeout)
-            ap_ipv4 = self.wait_for_ipv4_address(ap,
-                                                 ap_interface,
-                                                 timeout=timeout)
-        except ConnectionError as err:
-            self.log.error(
-                'Failed to retrieve interfaces and addresses. Err: %s' % err)
-            return False
-
-        if not self.device_can_ping_addr(client, ap_ipv4):
-            self.log.error('Failed to ping from client to ap.')
-            return False
-
-        if not self.device_can_ping_addr(ap, client_ipv4):
-            self.log.error('Failed to ping from ap to client.')
-            return False
-
-        if check_traffic:
-            try:
-                if client is self.dut:
-                    self.run_iperf_traffic(self.iperf_clients_map[ap],
-                                           client_ipv4)
-                else:
-                    self.run_iperf_traffic(self.iperf_clients_map[client],
-                                           ap_ipv4)
-            except ConnectionError as err:
-                self.log.error('Failed to run traffic between DUT and AP.')
-                return False
-        return True
-
-    def verify_soft_ap_connectivity_from_state(self, state, client):
-        """Verifies SoftAP state based on a client connection.
-
-        Args:
-            state: bool, whether SoftAP should be up
-            client: SoftApClient, to verify connectivity (or lack therof)
-        """
-        if state == STATE_UP:
-            return self.device_is_connected_to_ap(client, self.dut)
-        else:
-            with utils.SuppressLogOutput():
-                try:
-                    return not self.device_is_connected_to_ap(
-                        client,
-                        self.dut,
-                        timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT)
-                # Allow a failed to find ap interface error
-                except LookupError as err:
-                    self.log.debug('Hit expected LookupError: %s' % err)
-                    return True
-
-    def verify_client_mode_connectivity_from_state(self, state, channel):
-        """Verifies client mode state based on DUT-AP connection.
-
-        Args:
-            state: bool, whether client mode should be up
-            channel: int, channel of the APs network
-        """
-        if state == STATE_UP:
-            return self.device_is_connected_to_ap(self.dut,
-                                                  self.access_point,
-                                                  channel=channel)
-        else:
-            with utils.SuppressLogOutput():
-                try:
-                    return not self.device_is_connected_to_ap(
-                        self.dut,
-                        self.access_point,
-                        channel=channel,
-                        timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT)
-                # Allow a failed to find client interface error
-                except LookupError as err:
-                    self.log.debug('Hit expected LookupError: %s' % err)
-                    return True
-
-# Test Types
-
-    def verify_soft_ap_associate_only(self, client, soft_ap_settings):
-        if not self.associate_with_soft_ap(client, soft_ap_settings):
-            asserts.fail('Failed to associate client with SoftAP.')
-
-    def verify_soft_ap_associate_and_ping(self, client, soft_ap_settings):
-        self.verify_soft_ap_associate_only(client, soft_ap_settings)
-        if not self.device_is_connected_to_ap(client, self.dut):
-            asserts.fail('Client and SoftAP could not ping eachother.')
-
-    def verify_soft_ap_associate_and_pass_traffic(self, client, settings):
-        self.verify_soft_ap_associate_only(client, settings)
-        if not self.device_is_connected_to_ap(
-                client, self.dut, check_traffic=True):
-            asserts.fail(
-                'Client and SoftAP not responding to pings and passing traffic '
-                'as expected.')
-
-# Runners for Generated Test Cases
-
-    def run_soft_ap_association_stress_test(self, settings):
-        """Sets up a SoftAP, and repeatedly associates and disassociates a
-        client.
-
-        Args:
-            settings: test configuration settings, see
-                test_soft_ap_association_stress for details
-        """
-        client = settings['client']
-        soft_ap_params = settings['soft_ap_params']
-        test_type = settings['test_type']
-        if not test_type in TEST_TYPES:
-            raise ValueError('Unrecognized test type %s' % test_type)
-        iterations = settings['iterations']
-        self.log.info(
-            'Running association stress test type %s in iteration %s times' %
-            (test_type, iterations))
-
-        self.start_soft_ap(soft_ap_params)
-
-        passed_count = 0
-        for run in range(iterations):
-            try:
-                self.log.info('Starting SoftAp association run %s' %
-                              str(run + 1))
-
-                if test_type == TEST_TYPE_ASSOCIATE_ONLY:
-                    self.verify_soft_ap_associate_only(client, soft_ap_params)
-
-                elif test_type == TEST_TYPE_ASSOCIATE_AND_PING:
-                    self.verify_soft_ap_associate_and_ping(
-                        client, soft_ap_params)
-
-                elif test_type == TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC:
-                    self.verify_soft_ap_associate_and_pass_traffic(
-                        client, soft_ap_params)
-
-                else:
-                    raise AttributeError('Invalid test type: %s' % test_type)
-
-            except signals.TestFailure as err:
-                self.log.error(
-                    'SoftAp association stress run %s failed. Err: %s' %
-                    (str(run + 1), err.details))
-            else:
-                self.log.info('SoftAp association stress run %s successful.' %
-                              str(run + 1))
-                passed_count += 1
-
-        if passed_count < iterations:
-            asserts.fail(
-                'SoftAp association stress test passed on %s/%s runs.' %
-                (passed_count, iterations))
-
-        asserts.explicit_pass(
-            'SoftAp association stress test passed on %s/%s runs.' %
-            (passed_count, iterations))
-
-# Alternate SoftAP and Client mode test
-
-    def run_soft_ap_and_client_mode_alternating_test(self, settings):
-        """Runs a single soft_ap and client alternating stress test.
-
-        See test_soft_ap_and_client_mode_alternating_stress for details.
-        """
-        iterations = settings['iterations']
-        pass_count = 0
-        current_soft_ap_state = STATE_DOWN
-        current_client_mode_state = STATE_DOWN
-
-        self.client_mode_toggle_pre_test(settings)
-        for iteration in range(iterations):
-            passes = True
-
-            # Attempt to toggle SoftAP on, then off. If the first toggle fails
-            # to occur, exit early.
-            for _ in range(2):
-                (current_soft_ap_state, err) = self.run_toggle_iteration_func(
-                    self.soft_ap_toggle_test_iteration, settings,
-                    current_soft_ap_state)
-                if err:
-                    self.log.error('Iteration %s failed. Err: %s' %
-                                   (str(iteration + 1), err))
-                    passes = False
-                if current_soft_ap_state == STATE_DOWN:
-                    break
-
-            # Attempt to toggle Client mode on, then off. If the first toggle,
-            # fails to occur, exit early.
-            for _ in range(2):
-                (current_client_mode_state,
-                 err) = self.run_toggle_iteration_func(
-                     self.client_mode_toggle_test_iteration, settings,
-                     current_client_mode_state)
-                if err:
-                    self.log.error('Iteration %s failed. Err: %s' %
-                                   (str(iteration + 1), err))
-                    passes = False
-                if current_client_mode_state == STATE_DOWN:
-                    break
-
-            if passes:
-                pass_count += 1
-
-        if pass_count == iterations:
-            asserts.explicit_pass(
-                'Toggle SoftAP and client mode stress test passed %s/%s times.'
-                % (pass_count, iterations))
-        else:
-            asserts.fail(
-                'Toggle SoftAP and client mode stress test only passed %s/%s '
-                'times.' % (pass_count, iterations))
-
-# Toggle Stress Test Helper Functions
-
-    def run_toggle_stress_test(self, settings):
-        """Runner function for toggle stress tests.
-
-        Repeats some test function through stress test iterations, logging
-        failures, tracking pass rate, managing states, etc.
-
-        Args:
-            settings: dict, stress test settings
-
-        Asserts:
-            PASS: if all iterations of the test function pass
-            FAIL: if any iteration of the test function fails
-        """
-        test_runner_func = settings['test_runner_func']
-        pre_test_func = settings.get('pre_test_func', None)
-        iterations = settings['iterations']
-        if pre_test_func:
-            pre_test_func(settings)
-
-        pass_count = 0
-        current_state = STATE_DOWN
-        for iteration in range(iterations):
-            (current_state,
-             err) = self.run_toggle_iteration_func(test_runner_func, settings,
-                                                   current_state)
-            if err:
-                self.log.error('Iteration %s failed. Err: %s' %
-                               (str(iteration + 1), err))
-            else:
-                pass_count += 1
-
-        if pass_count == iterations:
-            asserts.explicit_pass('Stress test passed %s/%s times.' %
-                                  (pass_count, iterations))
-        else:
-            asserts.fail('Stress test only passed %s/%s '
-                         'times.' % (pass_count, iterations))
-
-    def run_toggle_iteration_func(self, func, settings, current_state):
-        """Runs a toggle iteration function, updating the current state
-        based on what the toggle iteration function raises.
-
-        Used for toggle stress tests.
-
-        Note on EnvironmentError vs StressTestIterationFailure:
-            StressTestIterationFailure is raised by func when the toggle occurs
-                but connectivty or some other post-toggle check fails (i.e. the
-                next iteration should toggle to the next state.)
-
-            EnvironmentError is raise by func when the toggle itself fails (i.e
-                the next iteration should retry the same toggle again.)
-
-        Args:
-            func: toggle iteration func to run (e.g soft_ap_toggle_iteration)
-            settings: dict, stress test settings
-            current_state: bool, the current state of the mode being toggled
-
-        Returns:
-            (new_state, err):
-                new_state: bool, state of the mode after toggle attempt
-                err: exception, if any are raise, else None
-        """
-        try:
-            func(settings, current_state)
-        except EnvironmentError as err:
-            return (current_state, err)
-        except StressTestIterationFailure as err:
-            return (not current_state, err)
-        else:
-            return (not current_state, None)
-
-# Stress Test Toggle Functions
-
-    def start_soft_ap_and_verify_connected(self, client, soft_ap_params):
-        """Sets up SoftAP, associates a client, then verifies connection.
-
-        Args:
-            client: SoftApClient, client to use to verify SoftAP
-            soft_ap_params: dict, containing parameters to setup softap
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs, but connection
-            is not functioning as expected
-        """
-        # Change SSID every time, to avoid client connection issues.
-        soft_ap_params['ssid'] = utils.rand_ascii_str(
-            hostapd_constants.AP_SSID_LENGTH_2G)
-        self.start_soft_ap(soft_ap_params)
-        associated = self.associate_with_soft_ap(client, soft_ap_params)
-        if not associated:
-            raise StressTestIterationFailure(
-                'Failed to associated client to DUT SoftAP. '
-                'Continuing with iterations.')
-
-        if not self.verify_soft_ap_connectivity_from_state(STATE_UP, client):
-            raise StressTestIterationFailure(
-                'Failed to ping between client and DUT. Continuing '
-                'with iterations.')
-
-    def stop_soft_ap_and_verify_disconnected(self, client, soft_ap_params):
-        """Tears down SoftAP, and verifies connection is down.
-
-        Args:
-            client: SoftApClient, client to use to verify SoftAP
-            soft_ap_params: dict, containing parameters of SoftAP to teardown
-
-        Raise:
-            EnvironmentError, if client and AP can still communicate
-        """
-        self.log.info('Stopping SoftAP on DUT.')
-        self.stop_soft_ap(soft_ap_params)
-
-        if not self.verify_soft_ap_connectivity_from_state(STATE_DOWN, client):
-            raise EnvironmentError(
-                'Client can still ping DUT. Continuing with '
-                'iterations.')
-
-    def start_client_mode_and_verify_connected(self, ap_params):
-        """Connects DUT to AP in client mode and verifies connection
-
-        Args:
-            ap_params: dict, containing parameters of the AP network
-
-        Raises:
-            EnvironmentError, if DUT fails to associate altogether
-            StressTestIterationFailure, if DUT associates but connection is not
-                functioning as expected.
-        """
-        ap_ssid = ap_params['ssid']
-        ap_password = ap_params['password']
-        ap_channel = ap_params['channel']
-        ap_security = ap_params.get('security')
-
-        if ap_security:
-            ap_security_mode = ap_security.security_mode_string
-        else:
-            ap_security_mode = None
-
-        self.log.info('Associating DUT with AP network: %s' % ap_ssid)
-        associated = self.dut.associate(
-            target_ssid=ap_ssid,
-            target_pwd=ap_password,
-            target_security=hostapd_constants.
-            SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                ap_security_mode, None))
-        if not associated:
-            raise EnvironmentError('Failed to associate DUT in client mode.')
-        else:
-            self.log.info('Association successful.')
-
-        if not self.verify_client_mode_connectivity_from_state(
-                STATE_UP, ap_channel):
-            raise StressTestIterationFailure('Failed to ping AP from DUT.')
-
-    def stop_client_mode_and_verify_disconnected(self, ap_params):
-        """Disconnects DUT from AP and verifies connection is down.
-
-        Args:
-            ap_params: dict, containing parameters of the AP network
-
-        Raises:
-            EnvironmentError, if DUT and AP can still communicate
-        """
-        self.log.info('Disconnecting DUT from AP.')
-        self.dut.disconnect()
-        if not self.verify_client_mode_connectivity_from_state(
-                STATE_DOWN, ap_params['channel']):
-            raise EnvironmentError('DUT can still ping AP.')
-
-# Toggle Stress Test Iteration and Pre-Test Functions
-
-# SoftAP Toggle Stress Test Helper Functions
-
-    def soft_ap_toggle_test_iteration(self, settings, current_state):
-        """Runs a single iteration of SoftAP toggle stress test
-
-        Args:
-            settings: dict, containing test settings
-            current_state: bool, current state of SoftAP (True if up,
-                else False)
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs but mode isn't
-                functioning correctly.
-            EnvironmentError, if toggle fails to occur at all
-        """
-        soft_ap_params = settings['soft_ap_params']
-        self.log.info('Toggling SoftAP %s.' %
-                      ('down' if current_state else 'up'))
-
-        if current_state == STATE_DOWN:
-            self.start_soft_ap_and_verify_connected(self.primary_client,
-                                                    soft_ap_params)
-
-        else:
-            self.stop_soft_ap_and_verify_disconnected(self.primary_client,
-                                                      soft_ap_params)
-
-# Client Mode Toggle Stress Test Helper Functions
-
-    def client_mode_toggle_pre_test(self, settings):
-        """Prepares the AP before client mode toggle tests
-
-        Args:
-            settings: dict, stress test settings
-
-        Raises:
-            ConnectionError, if AP setup fails
-        """
-        ap_params = settings['ap_params']
-        ap_channel = ap_params['channel']
-        ap_profile = ap_params.pop('profile')
-        self.log.info('Setting up AP with params: %s' % ap_params)
-        setup_ap(access_point=self.access_point,
-                 profile_name=ap_profile,
-                 **ap_params)
-        # Confirms AP assigned itself an address
-        ap_interface = self.get_device_test_interface(self.access_point,
-                                                      channel=ap_channel)
-        self.wait_for_ipv4_address(self.access_point, ap_interface)
-
-    def client_mode_toggle_test_iteration(self, settings, current_state):
-        """Runs a single iteration of client mode toggle stress test
-
-        Args:
-            settings: dict, containing test settings
-            current_state: bool, current state of client mode (True if up,
-                else False)
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs but mode isn't
-                functioning correctly.
-            EnvironmentError, if toggle fails to occur at all
-        """
-        ap_params = settings['ap_params']
-        self.log.info('Toggling client mode %s' %
-                      ('off' if current_state else 'on'))
-
-        if current_state == STATE_DOWN:
-            self.start_client_mode_and_verify_connected(ap_params)
-
-        else:
-            self.stop_client_mode_and_verify_disconnected(ap_params)
-
-# Toggle SoftAP with Client Mode Up Test Helper Functions
-
-    def soft_ap_toggle_with_client_mode_pre_test(self, settings):
-        """Sets up and verifies client mode before SoftAP toggle test.
-        Args:
-            settings: dict, stress test settings
-
-        Raises:
-            ConnectionError, if client mode setup fails
-        """
-        self.client_mode_toggle_pre_test(settings)
-        try:
-            self.start_client_mode_and_verify_connected(settings['ap_params'])
-        except StressTestIterationFailure as err:
-            # This prevents it being treated as a routine error
-            raise ConnectionError(
-                'Failed to set up DUT client mode before SoftAP toggle test.'
-                'Err: %s' % err)
-
-    def soft_ap_toggle_with_client_mode_iteration(
-        self,
-        settings,
-        current_state,
-    ):
-        """Runs single iteration of SoftAP toggle stress with client mode test.
-
-        Args:
-            settings: dict, containing test settings
-            current_state: bool, current state of SoftAP (True if up,
-                else False)
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs but mode isn't
-                functioning correctly.
-            EnvironmentError, if toggle fails to occur at all
-        """
-        ap_params = settings['ap_params']
-        ap_channel = ap_params['channel']
-        self.soft_ap_toggle_test_iteration(settings, current_state)
-        if not self.device_is_connected_to_ap(
-                self.dut, self.access_point, channel=ap_channel):
-            raise StressTestIterationFailure(
-                'DUT client mode is no longer functional after SoftAP toggle.')
-
-# Toggle Client Mode with SoftAP Up Test Helper Functions
-
-    def client_mode_toggle_with_soft_ap_pre_test(self, settings):
-        """Sets up and verifies softap before client mode toggle test.
-        Args:
-            settings: dict, stress test settings
-
-        Raises:
-            ConnectionError, if softap setup fails
-        """
-        self.client_mode_toggle_pre_test(settings)
-        try:
-            self.start_soft_ap_and_verify_connected(self.primary_client,
-                                                    settings['soft_ap_params'])
-        except StressTestIterationFailure as err:
-            # This prevents it being treated as a routine error
-            raise ConnectionError(
-                'Failed to set up SoftAP before client mode toggle test. Err: %s'
-                % err)
-
-    def client_mode_toggle_with_soft_ap_iteration(self, settings,
-                                                  current_state):
-        """Runs single iteration of client mode toggle stress with SoftAP test.
-
-        Args:
-            settings: dict, containing test settings
-            current_state: bool, current state of client mode (True if up,
-                else False)
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs but mode isn't
-                functioning correctly.
-            EnvironmentError, if toggle fails to occur at all
-        """
-        self.client_mode_toggle_test_iteration(settings, current_state)
-        if not self.device_is_connected_to_ap(self.primary_client, self.dut):
-            raise StressTestIterationFailure(
-                'SoftAP is no longer functional after client mode toggle.')
-
-# Toggle SoftAP and Client Mode Randomly
-
-    def run_soft_ap_and_client_mode_random_toggle_stress_test(self, settings):
-        """Runner function for SoftAP and client mode random toggle tests.
-
-        Each iteration, randomly chooses if a mode will be toggled or not.
-
-        Args:
-            settings: dict, containing test settings
-        """
-        iterations = settings['iterations']
-        pass_count = 0
-        current_soft_ap_state = STATE_DOWN
-        current_client_mode_state = STATE_DOWN
-        ap_channel = settings['ap_params']['channel']
-
-        self.client_mode_toggle_pre_test(settings)
-        for iteration in range(iterations):
-            self.log.info('Starting iteration %s out of %s.' %
-                          (str(iteration + 1), iterations))
-            passes = True
-
-            # Randomly determine if softap, client mode, or both should
-            # be toggled.
-            rand_toggle_choice = random.randrange(0, 3)
-            if rand_toggle_choice <= 1:
-                (current_soft_ap_state, err) = self.run_toggle_iteration_func(
-                    self.soft_ap_toggle_test_iteration, settings,
-                    current_soft_ap_state)
-                if err:
-                    self.log.error(
-                        'Iteration %s failed toggling SoftAP. Err: %s' %
-                        (str(iteration + 1), err))
-                    passes = False
-            if rand_toggle_choice >= 1:
-                (current_client_mode_state,
-                 err) = self.run_toggle_iteration_func(
-                     self.client_mode_toggle_test_iteration, settings,
-                     current_client_mode_state)
-                if err:
-                    self.log.error(
-                        'Iteration %s failed toggling client mode. Err: %s' %
-                        (str(iteration + 1), err))
-                    passes = False
-
-            soft_ap_verified = self.verify_soft_ap_connectivity_from_state(
-                current_soft_ap_state, self.primary_client)
-            client_mode_verified = self.verify_client_mode_connectivity_from_state(
-                current_client_mode_state, ap_channel)
-
-            if not soft_ap_verified or not client_mode_verified:
-                passes = False
-            if passes:
-                pass_count += 1
-
-        if pass_count == iterations:
-            asserts.explicit_pass('Stress test passed %s/%s times.' %
-                                  (pass_count, iterations))
-        else:
-            asserts.fail('Stress test only passed %s/%s '
-                         'times.' % (pass_count, iterations))
-
-
-# Test Cases
-
-    def test_soft_ap_2g_open_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_open_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_5G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_open_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_2g_wep_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_wep_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_5G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_wep_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client, )
-
-    def test_soft_ap_2g_wpa_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_wpa_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_5G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_wpa_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_2g_wpa2_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_wpa2_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_5G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_wpa2_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_2g_wpa3_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_wpa3_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_wpa3_local(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_LOCAL,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_2g_open_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_open_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_5G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_open_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_OPEN,
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_2g_wep_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_wep_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_5G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_wep_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WEP,
-            'password': generate_random_password(security_mode=SECURITY_WEP),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_2g_wpa_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_wpa_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_5G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_wpa_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_2g_wpa2_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_wpa2_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_5G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_wpa2_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA2,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_2g_wpa3_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_2G
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_5g_wpa3_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_soft_ap_any_wpa3_unrestricted(self):
-        soft_ap_params = {
-            'ssid': utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            'security_type': SECURITY_WPA3,
-            'password': generate_random_password(),
-            'connectivity_mode': CONNECTIVITY_MODE_UNRESTRICTED,
-            'operating_band': OPERATING_BAND_ANY
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(self.primary_client,
-                                                       soft_ap_params)
-
-    def test_multi_client(self):
-        """Tests multi-client association with a single soft AP network.
-
-        This tests associates a variable length list of clients, verfying it can
-        can ping the SoftAP and pass traffic, and then verfies all previously
-        associated clients can still ping and pass traffic.
-
-        The same occurs in reverse for disassocations.
-
-        SoftAP parameters can be changed from default via ACTS config:
-        Example Config
-        "soft_ap_test_params" : {
-            "multi_client_test_params": {
-                "ssid": "testssid",
-                "security_type": "wpa2",
-                "password": "password",
-                "connectivity_mode": "local_only",
-                "operating_band": "only_2_4_ghz"
-            }
-        }
-        """
-        asserts.skip_if(
-            len(self.clients) < 2, 'Test requires at least 2 SoftAPClients')
-
-        test_params = self.soft_ap_test_params.get('multi_client_test_params',
-                                                   {})
-        soft_ap_params = get_soft_ap_params_from_config_or_default(
-            test_params.get('soft_ap_params', {}))
-
-        self.start_soft_ap(soft_ap_params)
-
-        associated = []
-
-        for client in self.clients:
-            # Associate new client
-            self.verify_soft_ap_associate_and_ping(client, soft_ap_params)
-
-            # Verify previously associated clients still behave as expected
-            for associated_client in associated:
-                self.log.info(
-                    'Verifying previously associated client %s still functions correctly.'
-                    % associated_client['device'].identifier)
-                if not self.device_is_connected_to_ap(
-                        associated_client['device'], self.dut,
-                        check_traffic=True):
-                    asserts.fail(
-                        'Previously associated client %s failed checks after '
-                        'client %s associated.' %
-                        (associated_client['device'].identifier,
-                         client.identifier))
-
-            client_interface = self.get_device_test_interface(client)
-            client_ipv4 = self.wait_for_ipv4_address(client, client_interface)
-            associated.append({"device": client, "address": client_ipv4})
-
-        self.log.info('All devices successfully associated.')
-
-        self.log.info('Verifying all associated clients can ping eachother.')
-        for transmitter in associated:
-            for receiver in associated:
-                if transmitter != receiver:
-                    if not transmitter['device'].can_ping(receiver['address']):
-                        asserts.fail(
-                            'Could not ping from one associated client (%s) to another (%s).'
-                            % (transmitter['address'], receiver['address']))
-                    else:
-                        self.log.info(
-                            'Successfully pinged from associated client (%s) to another (%s)'
-                            % (transmitter['address'], receiver['address']))
-
-        self.log.info(
-            'All associated clients can ping eachother. Beginning disassociations.'
-        )
-
-        while len(associated) > 0:
-            # Disassociate client
-            client = associated.pop()['device']
-            self.disconnect_from_soft_ap(client)
-
-            # Verify still connected clients still behave as expected
-            for associated_client in associated:
-                self.log.info(
-                    'Verifying still associated client %s still functions '
-                    'correctly.' % associated_client['device'].identifier)
-                if not self.device_is_connected_to_ap(
-                        associated_client['device'], self.dut,
-                        check_traffic=True):
-                    asserts.fail(
-                        'Previously associated client %s failed checks after'
-                        ' client %s disassociated.' %
-                        (associated_client['device'].identifier,
-                         client.identifier))
-
-        self.log.info('All disassociations occurred smoothly.')
-
-    def test_simultaneous_soft_ap_and_client(self):
-        """ Tests FuchsiaDevice DUT can act as a client and a SoftAP
-        simultaneously.
-
-        Raises:
-            ConnectionError: if DUT fails to connect as client
-            RuntimeError: if parallel processes fail to join
-            TestFailure: if DUT fails to pass traffic as either a client or an
-                AP
-        """
-        asserts.skip_if(not self.access_point, 'No access point provided.')
-
-        self.log.info('Setting up AP using hostapd.')
-        test_params = self.soft_ap_test_params.get(
-            'soft_ap_and_client_test_params', {})
-
-        # Configure AP
-        ap_params = get_ap_params_from_config_or_default(
-            test_params.get('ap_params', {}))
-
-        # Setup AP and associate DUT
-        ap_profile = ap_params.pop('profile')
-        setup_ap(access_point=self.access_point,
-                 profile_name=ap_profile,
-                 **ap_params)
-        try:
-            self.start_client_mode_and_verify_connected(ap_params)
-        except Exception as err:
-            asserts.fail('Failed to set up client mode. Err: %s' % err)
-
-        # Setup SoftAP
-        soft_ap_params = get_soft_ap_params_from_config_or_default(
-            test_params.get('soft_ap_params', {}))
-        self.start_soft_ap_and_verify_connected(self.primary_client,
-                                                soft_ap_params)
-
-        # Get FuchsiaDevice test interfaces
-        dut_ap_interface = self.get_device_test_interface(
-            self.dut, role=INTERFACE_ROLE_AP)
-        dut_client_interface = self.get_device_test_interface(
-            self.dut, role=INTERFACE_ROLE_CLIENT)
-
-        # Get FuchsiaDevice addresses
-        dut_ap_ipv4 = self.wait_for_ipv4_address(self.dut, dut_ap_interface)
-        dut_client_ipv4 = self.wait_for_ipv4_address(self.dut,
-                                                     dut_client_interface)
-
-        # Set up secondary iperf server of FuchsiaDevice
-        self.log.info('Setting up second iperf server on FuchsiaDevice DUT.')
-        secondary_iperf_server = iperf_server.IPerfServerOverSsh(
-            self.iperf_server_config, DEFAULT_IPERF_PORT + 1, use_killall=True)
-        secondary_iperf_server.start()
-
-        # Set up iperf client on AP
-        self.log.info('Setting up iperf client on AP.')
-        ap_iperf_client = iperf_client.IPerfClientOverSsh(
-            self.user_params['AccessPoint'][0]['ssh_config'])
-
-        # Setup iperf processes:
-        #     Primary client <-> SoftAP interface on FuchsiaDevice
-        #     AP <-> Client interface on FuchsiaDevice
-        process_errors = mp.Queue()
-        iperf_soft_ap = mp.Process(
-            target=self.run_iperf_traffic_parallel_process,
-            args=[
-                self.iperf_clients_map[self.primary_client], dut_ap_ipv4,
-                process_errors
-            ])
-
-        iperf_fuchsia_client = mp.Process(
-            target=self.run_iperf_traffic_parallel_process,
-            args=[ap_iperf_client, dut_client_ipv4, process_errors],
-            kwargs={'server_port': 5202})
-
-        # Run iperf processes simultaneously
-        self.log.info('Running simultaneous iperf traffic: between AP and DUT '
-                      'client interface, and DUT AP interface and client.')
-
-        iperf_soft_ap.start()
-        iperf_fuchsia_client.start()
-
-        # Block until processes can join or timeout
-        for proc in [iperf_soft_ap, iperf_fuchsia_client]:
-            proc.join(timeout=DEFAULT_IPERF_TIMEOUT)
-            if proc.is_alive():
-                proc.terminate()
-                proc.join()
-                raise RuntimeError('Failed to join process %s' % proc)
-
-        # Stop iperf server (also stopped in teardown class as failsafe)
-        secondary_iperf_server.stop()
-
-        # Check errors from parallel processes
-        if process_errors.empty():
-            asserts.explicit_pass(
-                'FuchsiaDevice was successfully able to pass traffic as a '
-                'client and an AP simultaneously.')
-        else:
-            while not process_errors.empty():
-                self.log.error('Error in iperf process: %s' %
-                               process_errors.get())
-            asserts.fail(
-                'FuchsiaDevice failed to pass traffic as a client and an AP '
-                'simultaneously.')
-
-    def test_soft_ap_association_stress(self):
-        """ Sets up a single AP and repeatedly associate/disassociate
-        a client, verifying connection every time
-
-        Each test creates 1 SoftAP and repeatedly associates/disassociates
-        client.
-
-        Example Config
-        "soft_ap_test_params" : {
-            "soft_ap_association_stress_tests": [
-                {
-                    "ssid": "test_network",
-                    "security_type": "wpa2",
-                    "password": "password",
-                    "connectivity_mode": "local_only",
-                    "operating_band": "only_2_4_ghz",
-                    "iterations": 10
-                }
-            ]
-        }
-        """
-        tests = self.soft_ap_test_params.get(
-            'test_soft_ap_association_stress',
-            [dict(test_name='test_soft_ap_association_stress_default')])
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            test_type = config_settings.get('test_type',
-                                            'associate_and_pass_traffic')
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
-            test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_association_stress_%s_iterations' %
-                    iterations),
-                'client':
-                self.primary_client,
-                'soft_ap_params':
-                soft_ap_params,
-                'test_type':
-                test_type,
-                'iterations':
-                iterations
-            }
-            test_settings_list.append(test_settings)
-
-        self.run_generated_testcases(self.run_soft_ap_association_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
-
-    def test_soft_ap_and_client_mode_alternating_stress(self):
-        """ Runs tests that alternate between SoftAP and Client modes.
-
-        Each tests sets up an AP. Then, for each iteration:
-            - DUT starts up SoftAP, client associates with SoftAP,
-                connection is verified, then disassociates
-            - DUT associates to the AP, connection is verified, then
-                disassociates
-
-        Example Config:
-        "soft_ap_test_params": {
-            "toggle_soft_ap_and_client_tests": [
-                {
-                    "test_name": "test_wpa2_client_ap_toggle",
-                    "ap_params": {
-                        "channel": 6,
-                        "ssid": "test-ap-network",
-                        "security_mode": "wpa2",
-                        "password": "password"
-                    },
-                    "soft_ap_params": {
-                        "ssid": "test-soft-ap-network",
-                        "security_type": "wpa2",
-                        "password": "other-password",
-                        "connectivity_mode": "local_only",
-                        "operating_band": "only_2_4_ghz"
-                    },
-                    "iterations": 5
-                }
-            ]
-        }
-        """
-        asserts.skip_if(not self.access_point, 'No access point provided.')
-        tests = self.soft_ap_test_params.get(
-            'test_soft_ap_and_client_mode_alternating_stress', [
-                dict(test_name=
-                     'test_soft_ap_and_client_mode_alternating_stress_default')
-            ])
-
-        test_settings_list = []
-        for config_settings in tests:
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
-
-            test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_and_client_mode_alternating_stress_%s_iterations'
-                    % iterations),
-                'iterations':
-                iterations,
-                'soft_ap_params':
-                soft_ap_params,
-                'ap_params':
-                ap_params,
-            }
-
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(
-            test_func=self.run_soft_ap_and_client_mode_alternating_test,
-            settings=test_settings_list,
-            name_func=get_test_name_from_settings)
-
-    def test_soft_ap_toggle_stress(self):
-        """ Runs SoftAP toggling stress test.
-
-        Each iteration toggles SoftAP to the opposite state (up or down).
-
-        If toggled up, a client is associated and connection is verified
-        If toggled down, test verifies client is not connected
-
-        Will run with default params, but custom tests can be provided in the
-        ACTS config.
-
-        Example Config
-        "soft_ap_test_params" : {
-            "test_soft_ap_toggle_stress": [
-                "soft_ap_params": {
-                    "security_type": "wpa2",
-                    "password": "password",
-                    "connectivity_mode": "local_only",
-                    "operating_band": "only_2_4_ghz",
-                },
-                "iterations": 10
-            ]
-        }
-        """
-        tests = self.soft_ap_test_params.get(
-            'test_soft_ap_toggle_stress',
-            [dict(test_name='test_soft_ap_toggle_stress_default')])
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
-            test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_toggle_stress_%s_iterations' % iterations),
-                'test_runner_func':
-                self.soft_ap_toggle_test_iteration,
-                'soft_ap_params':
-                soft_ap_params,
-                'iterations':
-                iterations
-            }
-            test_settings_list.append(test_settings)
-
-        self.run_generated_testcases(self.run_toggle_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
-
-    def test_client_mode_toggle_stress(self):
-        """ Runs client mode toggling stress test.
-
-        Each iteration toggles client mode to the opposite state (up or down).
-
-        If toggled up, DUT associates to AP, and connection is verified
-        If toggled down, test verifies DUT is not connected to AP
-
-        Will run with default params, but custom tests can be provided in the
-        ACTS config.
-
-        Example Config
-        "soft_ap_test_params" : {
-            "test_client_mode_toggle_stress": [
-                "soft_ap_params": {
-                    'ssid': ssid,
-                    'channel': channel,
-                    'security_mode': security,
-                    'password': password
-                },
-                "iterations": 10
-            ]
-        }
-        """
-        asserts.skip_if(not self.access_point, 'No access point provided.')
-        tests = self.soft_ap_test_params.get(
-            'test_client_mode_toggle_stress',
-            [dict(test_name='test_client_mode_toggle_stress_default')])
-
-        test_settings_list = []
-        for config_settings in tests:
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
-            test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_client_mode_toggle_stress_%s_iterations' %
-                    iterations),
-                'test_runner_func':
-                self.client_mode_toggle_test_iteration,
-                'pre_test_func':
-                self.client_mode_toggle_pre_test,
-                'ap_params':
-                ap_params,
-                'iterations':
-                iterations
-            }
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(self.run_toggle_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
-
-    def test_soft_ap_toggle_stress_with_client_mode(self):
-        """Same as test_soft_ap_toggle_stress, but client mode is set up
-        at test start and verified after every toggle."""
-        asserts.skip_if(not self.access_point, 'No access point provided.')
-        tests = self.soft_ap_test_params.get(
-            'test_soft_ap_toggle_stress_with_client_mode', [
-                dict(test_name=
-                     'test_soft_ap_toggle_stress_with_client_mode_default')
-            ])
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
-            test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_toggle_stress_with_client_mode_%s_iterations'
-                    % iterations),
-                'test_runner_func':
-                self.soft_ap_toggle_with_client_mode_iteration,
-                'pre_test_func':
-                self.soft_ap_toggle_with_client_mode_pre_test,
-                'soft_ap_params':
-                soft_ap_params,
-                'ap_params':
-                ap_params,
-                'iterations':
-                iterations
-            }
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(self.run_toggle_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
-
-    def test_client_mode_toggle_stress_with_soft_ap(self):
-        """Same as test_client_mode_toggle_stress, but softap is set up at
-        test start and verified after every toggle."""
-        asserts.skip_if(not self.access_point, 'No access point provided.')
-        tests = self.soft_ap_test_params.get(
-            'test_client_mode_toggle_stress_with_soft_ap', [
-                dict(test_name=
-                     'test_client_mode_toggle_stress_with_soft_ap_default')
-            ])
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
-            test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_client_mode_toggle_stress_with_soft_ap_%s_iterations'
-                    % iterations),
-                'test_runner_func':
-                self.client_mode_toggle_with_soft_ap_iteration,
-                'pre_test_func':
-                self.client_mode_toggle_with_soft_ap_pre_test,
-                'soft_ap_params':
-                soft_ap_params,
-                'ap_params':
-                ap_params,
-                'iterations':
-                iterations
-            }
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(self.run_toggle_stress_test,
-                                     test_settings_list,
-                                     name_func=get_test_name_from_settings)
-
-    def test_soft_ap_and_client_mode_random_toggle_stress(self):
-        """Same as above toggle stres tests, but each iteration, either softap,
-        client mode, or both are toggled, then states are verified."""
-        asserts.skip_if(not self.access_point, 'No access point provided.')
-        tests = self.soft_ap_test_params.get(
-            'test_soft_ap_and_client_mode_random_toggle_stress', [
-                dict(
-                    test_name=
-                    'test_soft_ap_and_client_mode_random_toggle_stress_default'
-                )
-            ])
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get('soft_ap_params', {}))
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get('ap_params', {}))
-            iterations = config_settings.get('iterations',
-                                             DEFAULT_STRESS_TEST_ITERATIONS)
-            test_settings = {
-                'test_name':
-                config_settings.get(
-                    'test_name',
-                    'test_soft_ap_and_client_mode_random_toggle_stress_%s_iterations'
-                    % iterations),
-                'soft_ap_params':
-                soft_ap_params,
-                'ap_params':
-                ap_params,
-                'iterations':
-                iterations
-            }
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(
-            self.run_soft_ap_and_client_mode_random_toggle_stress_test,
-            test_settings_list,
-            name_func=get_test_name_from_settings)
diff --git a/src/antlion/tests/wlan/functional/WlanRebootTest.py b/src/antlion/tests/wlan/functional/WlanRebootTest.py
deleted file mode 100644
index ed305c7..0000000
--- a/src/antlion/tests/wlan/functional/WlanRebootTest.py
+++ /dev/null
@@ -1,763 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-import os
-import time
-
-from multiprocessing import Process
-
-from antlion import asserts
-from antlion import context
-from antlion import utils
-from antlion.controllers import iperf_client
-from antlion.controllers import iperf_server
-from antlion.controllers.access_point import AccessPoint, setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.fuchsia_lib.utils_lib import wait_for_port
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-# Constants, for readibility
-AP = 'ap'
-DUT = 'dut'
-DEVICES = [AP, DUT]
-
-SOFT = 'soft'
-HARD = 'hard'
-REBOOT_TYPES = [SOFT, HARD]
-
-BAND_2G = '2g'
-BAND_5G = '5g'
-BANDS = [BAND_2G, BAND_5G]
-
-IPV4 = 'ipv4'
-IPV6 = 'ipv6'
-DUAL_IPV4_IPV6 = {IPV4: True, IPV6: True}
-IPV4_ONLY = {IPV4: True, IPV6: False}
-IPV6_ONLY = {IPV4: False, IPV6: True}
-IP_VERSIONS = [IPV4_ONLY, IPV6_ONLY, DUAL_IPV4_IPV6]
-
-INTERRUPTS = [True, False]
-OPEN_ENCRYPTION_STRING = "open"
-SECURITY_MODES = [
-    OPEN_ENCRYPTION_STRING, hostapd_constants.WPA2_STRING,
-    hostapd_constants.WPA3_STRING
-]
-
-DEFAULT_IPERF_TIMEOUT = 30
-
-DUT_NETWORK_CONNECTION_TIMEOUT = 60
-DUT_IP_ADDRESS_TIMEOUT = 30  # max time for DAD to complete
-
-# Constants for Custom Reboot Tests
-ALL = 'all'
-BOTH = 'both'
-
-CUSTOM_TEST_REBOOT_DEVICES = {AP: [AP], DUT: [DUT], ALL: [AP, DUT]}
-CUSTOM_TEST_REBOOT_TYPES = {SOFT: [SOFT], HARD: [HARD], ALL: [SOFT, HARD]}
-CUSTOM_TEST_BANDS = {
-    BAND_2G: [BAND_2G],
-    BAND_5G: [BAND_5G],
-    ALL: [BAND_2G, BAND_5G]
-}
-CUSTOM_TEST_IP_VERSIONS = {
-    IPV4: [IPV4_ONLY],
-    IPV6: [IPV6_ONLY],
-    BOTH: [DUAL_IPV4_IPV6],
-    ALL: [IPV4_ONLY, IPV6_ONLY, DUAL_IPV4_IPV6]
-}
-CUSTOM_TEST_INTERRUPTS = {'true': [True], 'false': [False], ALL: [True, False]}
-
-
-class WlanRebootTest(WifiBaseTest):
-    """Tests wlan reconnects in different reboot scenarios.
-
-    Testbed Requirement:
-    * One ACTS compatible device (dut)
-    * One Whirlwind Access Point (will also serve as iperf server)
-    * One PduDevice
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-
-    def setup_generated_tests(self):
-        self._read_wlan_reboot_test_params()
-        self.generate_tests(test_logic=self.run_reboot_test,
-                            name_func=self.generate_test_name,
-                            arg_sets=self.generate_test_args())
-
-    def setup_class(self):
-        super().setup_class()
-        self.android_devices = getattr(self, 'android_devices', [])
-        self.fuchsia_devices = getattr(self, 'fuchsia_devices', [])
-
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.access_point: AccessPoint = self.access_points[0]
-
-        self.iperf_server_on_ap = None
-        self.iperf_client_on_dut = None
-        if not self.skip_iperf:
-            if hasattr(self, "iperf_clients") and self.iperf_clients:
-                self.iperf_client_on_dut = self.iperf_clients[0]
-            else:
-                self.iperf_client_on_dut = self.dut.create_iperf_client()
-        else:
-            self.log.info(
-                'Skipping iperf throughput validation as requested by ACTS '
-                'config')
-
-    def setup_test(self):
-        self.access_point.stop_all_aps()
-        self.dut.wifi_toggle_state(True)
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        self.dut.disconnect()
-        self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-
-    def teardown_test(self):
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-        self.dut.disconnect()
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.reset_wifi()
-
-    def setup_ap(self,
-                 ssid,
-                 band,
-                 ipv4=True,
-                 ipv6=False,
-                 security_mode=None,
-                 password=None):
-        """Setup ap with basic config.
-
-        Args:
-            ssid: string, ssid to setup on ap
-            band: string ('2g' or '5g') of band to setup.
-            ipv4: True if using ipv4 (dhcp), else False.
-            ipv6: True if using ipv6 (radvd), else False.
-        """
-        # TODO(fxb/63719): Add varying AP parameters
-        security_profile = None
-        if security_mode:
-            security_profile = Security(security_mode=security_mode,
-                                        password=password)
-        if band == BAND_2G:
-            setup_ap(access_point=self.access_point,
-                     profile_name='whirlwind',
-                     channel=11,
-                     ssid=ssid,
-                     security=security_profile,
-                     is_ipv6_enabled=ipv6)
-        elif band == BAND_5G:
-            setup_ap(access_point=self.access_point,
-                     profile_name='whirlwind',
-                     channel=36,
-                     ssid=ssid,
-                     security=security_profile,
-                     is_ipv6_enabled=ipv6)
-
-        if not ipv4:
-            self.access_point.stop_dhcp()
-
-        self.log.info('Network (SSID: %s) is up.' % ssid)
-
-    def setup_iperf_server_on_ap(self,
-                                 band) -> iperf_server.IPerfServerOverSsh:
-        """Configures iperf server based on the tests band.
-
-        Args:
-            band: string ('2g' or '5g') of band to setup.
-        """
-        if band == BAND_2G:
-            return iperf_server.IPerfServerOverSsh(
-                self.user_params['AccessPoint'][0]['ssh_config'],
-                5201,
-                test_interface=self.access_point.wlan_2g)
-        elif band == BAND_5G:
-            return iperf_server.IPerfServerOverSsh(
-                self.user_params['AccessPoint'][0]['ssh_config'],
-                5201,
-                test_interface=self.access_point.wlan_5g)
-
-    def get_iperf_server_address(self, iperf_server_on_ap, ip_version):
-        """Retrieves the ip address of the iperf server.
-
-        Args:
-            iperf_server_on_ap: IPerfServer object, linked to AP
-            ip_version: string, the ip version (ipv4 or ipv6)
-
-        Returns:
-            String, the ip address of the iperf_server
-        """
-        iperf_server_addresses = iperf_server_on_ap.get_interface_ip_addresses(
-            iperf_server_on_ap.test_interface)
-        if ip_version == IPV4:
-            iperf_server_ip_address = (
-                iperf_server_addresses['ipv4_private'][0])
-        elif ip_version == IPV6:
-            if iperf_server_addresses['ipv6_private_local']:
-                iperf_server_ip_address = (
-                    iperf_server_addresses['ipv6_private_local'][0])
-            else:
-                iperf_server_ip_address = (
-                    '%s%%%s' % (iperf_server_addresses['ipv6_link_local'][0],
-                                self.iperf_client_on_dut.test_interface))
-        else:
-            raise ValueError('Invalid IP version: %s' % ip_version)
-
-        return iperf_server_ip_address
-
-    def verify_traffic_between_dut_and_ap(self,
-                                          iperf_server_on_ap,
-                                          iperf_client_on_dut,
-                                          ip_version=IPV4):
-        """Runs IPerf traffic from the iperf client (dut) and the iperf
-        server (and vice versa) and verifies traffic was able to pass
-        successfully.
-
-        Args:
-            iperf_server_on_ap: IPerfServer object, linked to AP
-            iperf_client_on_dut: IPerfClient object, linked to DUT
-            ip_version: string, the ip version (ipv4 or ipv6)
-
-        Raises:
-            ValueError, if invalid ip_version is passed.
-            ConnectionError, if traffic is not passed successfully in both
-                directions.
-        """
-        dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-            iperf_client_on_dut.test_interface)
-
-        iperf_server_ip_address = self.get_iperf_server_address(
-            iperf_server_on_ap, ip_version)
-
-        self.log.info(
-            'Attempting to pass traffic from DUT to IPerf server (%s).' %
-            iperf_server_ip_address)
-        tx_file = iperf_client_on_dut.start(iperf_server_ip_address,
-                                            '-i 1 -t 3 -J',
-                                            'reboot_tx',
-                                            timeout=DEFAULT_IPERF_TIMEOUT)
-        tx_results = iperf_server.IPerfResult(tx_file)
-        if not tx_results.avg_receive_rate or tx_results.avg_receive_rate == 0:
-            raise ConnectionError(
-                'Failed to pass IPerf traffic from DUT to server (%s). TX '
-                'Average Receive Rate: %s' %
-                (iperf_server_ip_address, tx_results.avg_receive_rate))
-        else:
-            self.log.info(
-                'Success: Traffic passed from DUT to IPerf server (%s).' %
-                iperf_server_ip_address)
-        self.log.info(
-            'Attempting to pass traffic from IPerf server (%s) to DUT.' %
-            iperf_server_ip_address)
-        rx_file = iperf_client_on_dut.start(iperf_server_ip_address,
-                                            '-i 1 -t 3 -R -J',
-                                            'reboot_rx',
-                                            timeout=DEFAULT_IPERF_TIMEOUT)
-        rx_results = iperf_server.IPerfResult(rx_file)
-        if not rx_results.avg_receive_rate or rx_results.avg_receive_rate == 0:
-            raise ConnectionError(
-                'Failed to pass IPerf traffic from server (%s) to DUT. RX '
-                'Average Receive Rate: %s' %
-                (iperf_server_ip_address, rx_results.avg_receive_rate))
-        else:
-            self.log.info(
-                'Success: Traffic passed from IPerf server (%s) to DUT.' %
-                iperf_server_ip_address)
-
-    def start_dut_ping_process(self, iperf_server_on_ap, ip_version=IPV4):
-        """Creates a  process that pings the AP from the DUT.
-
-        Runs in parallel for 15 seconds, so it can be interrupted by a reboot.
-        Sleeps for a few seconds to ensure pings have started.
-
-        Args:
-            iperf_server_on_ap: IPerfServer object, linked to AP
-            ip_version: string, the ip version (ipv4 or ipv6)
-        """
-        ap_address = self.get_iperf_server_address(iperf_server_on_ap,
-                                                   ip_version)
-        if ap_address:
-            self.log.info(
-                'Starting ping process to %s in parallel. Logs from this '
-                'process will be suppressed, since it will be intentionally '
-                'interrupted.' % ap_address)
-            ping_proc = Process(target=self.dut.ping,
-                                args=[ap_address],
-                                kwargs={'count': 15})
-            with utils.SuppressLogOutput():
-                ping_proc.start()
-            # Allow for a few seconds of pinging before allowing it to be
-            # interrupted.
-            time.sleep(3)
-        else:
-            raise ConnectionError('Failed to retrieve APs iperf address.')
-
-    def prepare_dut_for_reconnection(self):
-        """Perform any actions to ready DUT for reconnection.
-
-        These actions will vary depending on the DUT. eg. android devices may
-        need to be woken up, ambient devices should not require any interaction,
-        etc.
-        """
-        self.dut.wifi_toggle_state(True)
-        for ad in self.android_devices:
-            ad.droid.wakeUpNow()
-
-    def wait_for_dut_network_connection(self, ssid):
-        """Checks if device is connected to given network. Sleeps 1 second
-        between retries.
-
-        Args:
-            ssid: string of ssid
-        Raises:
-            ConnectionError, if DUT is not connected after all timeout.
-        """
-        self.log.info(
-            'Checking if DUT is connected to %s network. Will retry for %s '
-            'seconds.' % (ssid, self.dut_network_connection_timeout))
-        timeout = time.time() + self.dut_network_connection_timeout
-        while time.time() < timeout:
-            try:
-                is_connected = self.dut.is_connected(ssid=ssid)
-            except Exception as err:
-                self.log.debug('SL4* call failed. Retrying in 1 second.')
-                is_connected = False
-            finally:
-                if is_connected:
-                    self.log.info('Success: DUT has connected.')
-                    break
-                else:
-                    self.log.debug(
-                        'DUT not connected to network %s...retrying in 1 second.'
-                        % ssid)
-                    time.sleep(1)
-        else:
-            raise ConnectionError('DUT failed to connect to the network.')
-
-    def write_csv_time_to_reconnect(self, test_name, time_to_reconnect):
-        """Writes the time to reconnect to a csv file.
-        Args:
-            test_name: the name of the test case
-            time_to_reconnect: the time from when the rebooted device came back
-                up to when it reassociated (or 'FAIL'), if it failed to
-                reconnect.
-        """
-        log_context = context.get_current_context()
-        log_path = os.path.join(log_context.get_base_output_path(),
-                                'WlanRebootTest/')
-        csv_file_name = '%stime_to_reconnect.csv' % log_path
-        self.log.info('Writing to %s' % csv_file_name)
-        with open(csv_file_name, 'a') as csv_file:
-            csv_file.write('%s,%s\n' % (test_name, time_to_reconnect))
-
-    def log_and_continue(self, run, time_to_reconnect=None, error=None):
-        """Writes the time to reconnect to the csv file before continuing, used
-        in stress tests runs.
-
-        Args:
-            time_to_reconnect: the time from when the rebooted device came back
-                ip to when reassociation occurred.
-            run: the run number in a looped stress tested.,
-            error: string, error message to log before continuing with the test
-        """
-        if error:
-            self.log.info(
-                'Device failed to reconnect to network %s on run %s. Error: %s'
-                % (self.ssid, run, error))
-            self.write_csv_time_to_reconnect(
-                '%s_run_%s' % (self.test_name, run), 'FAIL')
-
-        else:
-            self.log.info(
-                'Device successfully reconnected to network %s after %s seconds'
-                ' on run %s.' % (self.ssid, time_to_reconnect, run))
-            self.write_csv_time_to_reconnect(
-                '%s_run_%s' % (self.test_name, run), time_to_reconnect)
-
-    def run_reboot_test(self, settings):
-        """Runs a reboot test based on a given config.
-            1. Setups up a network, associates the dut, and saves the network.
-            2. Verifies the dut receives ip address(es).
-            3. Verifies traffic between DUT and AP (IPerf client and server).
-            4. Reboots (hard or soft) the device (dut or ap).
-                - If the ap was rebooted, setup the same network again.
-            5. Wait for reassociation or timeout.
-            6. If reassocation occurs:
-                - Verifies the dut receives ip address(es).
-                - Verifies traffic between DUT and AP (IPerf client and server).
-            7. Logs time to reconnect (or failure to reconnect)
-            8. If stress testing, repeats steps 4 - 7 for N iterations.
-
-        Args:
-            settings: dictionary containing the following values:
-                reboot_device: string ('dut' or 'ap') of the device to reboot.
-                reboot_type: string ('soft' or 'hard') of how to reboot the
-                    reboot_device.
-                band: string ('2g' or '5g') of band to setup.
-                ipv4: True if using ipv4 (dhcp), else False.
-                ipv6: True if using ipv6 (radvd), else False.
-
-                Optional:
-                    interrupt: if True, the DUT will be pinging the AP in a
-                        parallel process when the reboot occurs. This is used to
-                        compare reconnect times when idle to active.
-                    test_name: name of the test, used when stress testing.
-                    iterations: number of times to perform test, used when stress
-                        testing.
-
-        Raises:
-            ValueError, if ipv4 and ipv6 are both False
-            ValueError, if band is not '2g' or '5g'
-            ValueError, if reboot_device is not 'dut' or 'ap'
-            ValueError, if reboot_type is not 'soft' or 'hard'
-
-        """
-        iterations = settings.get('iterations', 1)
-        passed_count = 0
-        ipv4 = settings.get('ipv4', None)
-        ipv6 = settings.get('ipv6', None)
-        reboot_device = settings['reboot_device']
-        reboot_type = settings['reboot_type']
-        band = settings['band']
-        security_mode = settings.get('security_mode', None)
-        password = settings.get('password', None)
-        if security_mode:
-            if security_mode.lower() == 'open':
-                security_mode = None
-            elif not password:
-                password = generate_random_password(
-                    security_mode=security_mode)
-        interrupt = settings.get('interrupt', None)
-        # Skip hard reboots if no PDU present
-        asserts.skip_if(
-            reboot_type == HARD
-            and len(self.user_params.get('PduDevice', [])) < 1,
-            'Hard reboots require a PDU device.')
-        # Skip DUT reboot w/ interrupt tests, since they are not more helpful
-        # and may cause threading issues.
-        asserts.skip_if(
-            (reboot_device == DUT) and interrupt,
-            'Stream interrupts for DUT reboots are prone to threading issues '
-            'and are not supported.')
-
-        # Validate test settings.
-        if not ipv4 and not ipv6:
-            raise ValueError('Either ipv4, ipv6, or both must be True.')
-        if reboot_device != DUT and reboot_device != AP:
-            raise ValueError('Invalid reboot device: %s' % reboot_device)
-        if reboot_type != SOFT and reboot_type != HARD:
-            raise ValueError('Invalid reboot type: %s' % reboot_type)
-        if band != BAND_2G and band != BAND_5G:
-            raise ValueError('Invalid band: %s' % band)
-
-        self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode, password)
-        if not self.dut.associate(
-                self.ssid,
-                target_security=hostapd_constants.
-                SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(security_mode),
-                target_pwd=password):
-            raise EnvironmentError('Initial network connection failed.')
-
-        if not self.skip_iperf:
-            dut_test_interface = self.iperf_client_on_dut.test_interface
-            if ipv4:
-                self.dut.device.wait_for_ipv4_addr(dut_test_interface)
-            if ipv6:
-                self.dut.device.wait_for_ipv6_addr(dut_test_interface)
-
-            self.iperf_server_on_ap = self.setup_iperf_server_on_ap(band)
-            self.iperf_server_on_ap.start()
-            wait_for_port(self.iperf_server_on_ap.ssh_settings.hostname, 5201)
-
-            ip_version = IPV6 if ipv6 else IPV4
-            self.verify_traffic_between_dut_and_ap(self.iperf_server_on_ap,
-                                                   self.iperf_client_on_dut,
-                                                   ip_version=ip_version)
-
-        # Looping reboots for stress testing
-        for run in range(iterations):
-            run += 1
-            self.log.info('Starting run %s of %s.' % (run, iterations))
-
-            # Ping from DUT to AP during AP reboot
-            if interrupt:
-                if ipv4:
-                    self.start_dut_ping_process(self.iperf_server_on_ap)
-                if ipv6:
-                    self.start_dut_ping_process(self.iperf_server_on_ap,
-                                                ip_version=IPV6)
-
-            # DUT reboots
-            if reboot_device == DUT:
-                if not self.skip_iperf and type(
-                        self.iperf_client_on_dut
-                ) == iperf_client.IPerfClientOverSsh:
-                    self.iperf_client_on_dut.close_ssh()
-                if reboot_type == SOFT:
-                    self.dut.device.reboot()
-                elif reboot_type == HARD:
-                    self.dut.hard_power_cycle(self.pdu_devices)
-
-            # AP reboots
-            elif reboot_device == AP:
-                if reboot_type == SOFT:
-                    self.log.info('Cleanly stopping ap.')
-                    self.access_point.stop_all_aps()
-                elif reboot_type == HARD:
-                    if not self.skip_iperf:
-                        self.iperf_server_on_ap.close_ssh()
-                    self.access_point.hard_power_cycle(self.pdu_devices)
-                self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode,
-                              password)
-
-            self.prepare_dut_for_reconnection()
-            uptime = time.time()
-            try:
-                self.wait_for_dut_network_connection(self.ssid)
-                time_to_reconnect = time.time() - uptime
-
-                if not self.skip_iperf:
-                    if ipv4:
-                        self.dut.device.wait_for_ipv4_addr(dut_test_interface)
-                    if ipv6:
-                        self.dut.device.wait_for_ipv6_addr(dut_test_interface)
-
-                    self.iperf_server_on_ap.start()
-
-                    if ipv4:
-                        self.verify_traffic_between_dut_and_ap(
-                            self.iperf_server_on_ap, self.iperf_client_on_dut)
-                    if ipv6:
-                        self.verify_traffic_between_dut_and_ap(
-                            self.iperf_server_on_ap,
-                            self.iperf_client_on_dut,
-                            ip_version=IPV6)
-
-            except ConnectionError as err:
-                self.log_and_continue(run, error=err)
-            else:
-                passed_count += 1
-                self.log_and_continue(run, time_to_reconnect=time_to_reconnect)
-
-        if passed_count == iterations:
-            asserts.explicit_pass(
-                'Test Summary: device successfully reconnected to network %s '
-                '%s/%s times.' % (self.ssid, passed_count, iterations))
-
-        else:
-            asserts.fail(
-                'Test Summary: device failed reconnection test. Reconnected to '
-                'network %s %s/%s times.' %
-                (self.ssid, passed_count, iterations))
-
-    def generate_test_name(self, settings):
-        """Generates a test case name based on the reboot settings passed.
-
-        Args:
-            settings: A dictionary of settings related to reboot test.
-
-        Returns:
-            A string that represents a test case name.
-        """
-        test_name = "test_{reboot_type}_reboot_{reboot_device}_{band}_{security_mode}".format(
-            **settings)
-
-        if settings.get(IPV4):
-            test_name += "_ipv4"
-
-        if settings.get(IPV6):
-            test_name += "_ipv6"
-
-        if settings.get('interrupt'):
-            test_name += "_interrupt"
-
-        if settings.get('iterations'):
-            test_name += f"_with_{settings['iterations']}_iterations"
-
-        return test_name
-
-    def generate_test_args(self):
-        # If custom reboot tests present in ACTS config then run just those
-        test_args = self._generate_custom_reboots_test_args()
-        if test_args:
-            return test_args
-
-        # Interrupt tests requires using iperf. So do not run interrupt tests
-        # when skip_iperf is True
-        if self.skip_iperf is True:
-            interrupts = [False]
-        else:
-            interrupts = INTERRUPTS
-
-        for (reboot_device, reboot_type, band, ip_version, interrupt,
-             security_mode) in itertools.product(DEVICES, REBOOT_TYPES, BANDS,
-                                                 IP_VERSIONS, interrupts,
-                                                 SECURITY_MODES):
-            settings = {
-                "reboot_device": reboot_device,
-                "reboot_type": reboot_type,
-                "band": band,
-                "security_mode": security_mode,
-                "ipv4": ip_version["ipv4"],
-                "ipv6": ip_version["ipv6"],
-                "interrupt": interrupt,
-            }
-            test_args.append((settings, ))
-
-        return test_args
-
-    def _generate_custom_reboots_test_args(self):
-        """Used to create custom reboot tests from antlion config. Can be
-        individual tests or permutation sets (i.e. setting "all" for a
-        test param will run a test with every permutation).
-
-        Parameters:
-            reboot_device: string - "ap", "dut", or "all"
-            reboot_type: string - "soft", "hard", or "all"
-            band: string, "2g" - "5g", "all"
-            ip_version: string - "ipv4", "ipv6", "both", or "all"
-            interrupt: bool - whether to have traffic flowing at reboot
-            security_modes: optional, string or list - "open", "wep", "wpa",
-                "wpa2", "wpa/wpa2", "wpa3", "wpa2/wpa3"
-            iterations: int - number of iterations for each test
-
-        Example:
-        "wlan_reboot_test_params": {
-            "test_custom_reboots": [
-                {
-                    "reboot_device": "dut",
-                    "reboot_type": "soft",
-                    "band": "2g",
-                    "ip_version": "both"
-                },
-                {
-                    "reboot_device": "all",
-                    "reboot_type": "hard",
-                    "band": "all",
-                    "ip_version": ipv4",
-                    "security_modes": "wpa2",
-                    "iterations": 10
-                },
-                {
-                    "reboot_device": "dut",
-                    "reboot_type": "hard",
-                    "band": "5g",
-                    "ip_version": "ipv4",
-                    "security_modes": ["open", "wpa3"]
-                }
-            ]
-        }
-
-        The first example runs a single DUT soft reboot test with a 2.4GHz
-        network and dual ipv4/ipv6.
-
-        The second example runs 4 tests, each with 10 iterations. It runs hard
-        reboots with ipv4 for the permutations of DUT/AP and 2.4GHz/5GHz.
-
-        The third example runs two tests, both hard reboots of the DUT with 5g
-        and ipv4 only, one with open security and one with WPA3.
-        """
-        if 'test_custom_reboots' not in self.wlan_reboot_test_params:
-            self.log.info('No custom reboots provided in ACTS config.')
-            return []
-
-        test_args = []
-        for test in self.wlan_reboot_test_params['test_custom_reboots']:
-            # Ensure required params are present
-            try:
-                reboot_device = test['reboot_device'].lower()
-                reboot_type = test['reboot_type'].lower()
-                band = test['band'].lower()
-                ip_version = test['ip_version'].lower()
-            except KeyError as err:
-                raise AttributeError(
-                    'Must provide reboot_type, reboot_device, ip_version, and '
-                    'band (optionally interrupt and iterations) in custom test '
-                    'config. See test_custom_reboots docstring for details. '
-                    'Err: %s' % err)
-            security_modes = test.get('security_modes', 'open')
-            interrupt = str(test.get('interrupt', False)).lower()
-            iterations = test.get('iterations', 1)
-
-            if interrupt == "true" and self.skip_iperf:
-                raise AttributeError(
-                    "Interrupt can't be set to True when iperf is disabled. "
-                    "Update 'skip_iperf' to 'false' in ACTS config and run again"
-                )
-
-            # Validate parameters and convert to lists (for permutations)
-            try:
-                reboot_devices = CUSTOM_TEST_REBOOT_DEVICES[reboot_device]
-                reboot_types = CUSTOM_TEST_REBOOT_TYPES[reboot_type]
-                bands = CUSTOM_TEST_BANDS[band]
-                ip_versions = CUSTOM_TEST_IP_VERSIONS[ip_version]
-                interrupts = CUSTOM_TEST_INTERRUPTS[interrupt]
-                if isinstance(security_modes, str):
-                    security_modes = [security_modes]
-            except KeyError as err:
-                raise AttributeError(
-                    'Invalid custom test parameter provided. Err: %s' % err)
-
-            for (reboot_device, reboot_type, band, ip_version, interrupt,
-                 security_mode) in itertools.product(reboot_devices,
-                                                     reboot_types, bands,
-                                                     ip_versions, interrupts,
-                                                     security_modes):
-                settings = {
-                    "reboot_device": reboot_device,
-                    "reboot_type": reboot_type,
-                    "band": band,
-                    "security_mode": security_mode,
-                    "ipv4": ip_version[IPV4],
-                    "ipv6": ip_version[IPV6],
-                    "interrupt": interrupt,
-                    "iterations": iterations,
-                }
-
-                test_args.append((settings, ))
-        return test_args
-
-    def _read_wlan_reboot_test_params(self):
-        self.wlan_reboot_test_params = self.user_params.get(
-            'wlan_reboot_test_params', {})
-        self.skip_iperf = self.wlan_reboot_test_params.get('skip_iperf', False)
-        # Times (in seconds) to wait for DUT network connection and assigning an
-        # ip address to the wlan interface.
-        self.dut_network_connection_timeout = self.wlan_reboot_test_params.get(
-            'dut_network_connection_timeout', DUT_NETWORK_CONNECTION_TIMEOUT)
-        self.dut_ip_address_timeout = self.wlan_reboot_test_params.get(
-            'dut_ip_address_timeout', DUT_IP_ADDRESS_TIMEOUT)
diff --git a/src/antlion/tests/wlan/functional/WlanScanTest.py b/src/antlion/tests/wlan/functional/WlanScanTest.py
deleted file mode 100644
index ed5d08a..0000000
--- a/src/antlion/tests/wlan/functional/WlanScanTest.py
+++ /dev/null
@@ -1,254 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""
-This test exercises basic scanning functionality to confirm expected behavior
-related to wlan scanning
-"""
-
-from datetime import datetime
-
-from antlion import signals
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_bss_settings
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-
-class WlanScanTest(WifiBaseTest):
-    """WLAN scan test class.
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * Several Wi-Fi networks visible to the device, including an open Wi-Fi
-      network or a onHub/GoogleWifi
-    """
-
-    def setup_class(self):
-        super().setup_class()
-
-        self.access_point = self.access_points[0]
-        self.start_access_point = False
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='drivers')
-        if "AccessPoint" in self.user_params:
-            # This section sets up the config that could be sent to the AP if
-            # the AP is needed. The reasoning is since ACTS already connects
-            # to the AP if it is in the config, generating the config in memory
-            # has no over head is used if need by the test if one of the ssids
-            # needed for the test is not included in the config.  The logic
-            # here creates 2 ssids on each radio, 5ghz and 2.4ghz, with an
-            # open, no security network and one that is wpa2, for a total of 4
-            # networks.  However, if all of the ssids are specified in the
-            # the config will never be written to the AP and the AP will not be
-            # brought up.  For more information about how to configure the
-            # hostapd config info, see the hostapd libraries, which have more
-            # documentation.
-            bss_settings_2g = []
-            bss_settings_5g = []
-            open_network = self.get_open_network(False, [])
-            self.open_network_2g = open_network['2g']
-            self.open_network_5g = open_network['5g']
-            wpa2_settings = self.get_psk_network(False, [])
-            self.wpa2_network_2g = wpa2_settings['2g']
-            self.wpa2_network_5g = wpa2_settings['5g']
-            bss_settings_2g.append(
-                hostapd_bss_settings.BssSettings(
-                    name=self.wpa2_network_2g['SSID'],
-                    ssid=self.wpa2_network_2g['SSID'],
-                    security=hostapd_security.Security(
-                        security_mode=self.wpa2_network_2g["security"],
-                        password=self.wpa2_network_2g["password"])))
-            bss_settings_5g.append(
-                hostapd_bss_settings.BssSettings(
-                    name=self.wpa2_network_5g['SSID'],
-                    ssid=self.wpa2_network_5g['SSID'],
-                    security=hostapd_security.Security(
-                        security_mode=self.wpa2_network_5g["security"],
-                        password=self.wpa2_network_5g["password"])))
-            self.ap_2g = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=self.access_point.wlan_2g,
-                iface_wlan_5g=self.access_point.wlan_5g,
-                channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                ssid=self.open_network_2g['SSID'],
-                bss_settings=bss_settings_2g)
-            self.ap_5g = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=self.access_point.wlan_2g,
-                iface_wlan_5g=self.access_point.wlan_5g,
-                channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                ssid=self.open_network_5g['SSID'],
-                bss_settings=bss_settings_5g)
-
-        if "wlan_open_network_2g" in self.user_params:
-            self.open_network_2g = self.user_params.get("wlan_open_network_2g")
-        elif "AccessPoint" in self.user_params:
-            self.start_access_point_2g = True
-        else:
-            raise Exception('Missing parameter in config '
-                            '(wlan_open_network_2g)')
-
-        if "wlan_open_network_5g" in self.user_params:
-            self.open_network_5g = self.user_params.get("wlan_open_network_5g")
-        elif "AccessPoint" in self.user_params:
-            self.start_access_point_5g = True
-        else:
-            raise Exception('Missing parameter in config '
-                            '(wlan_open_network_5g)')
-
-        if "wlan_wpa2_network_2g" in self.user_params:
-            self.wpa2_network_2g = self.user_params.get("wlan_wpa2_network_2g")
-        elif "AccessPoint" in self.user_params:
-            self.start_access_point_2g = True
-        else:
-            raise Exception('Missing parameter in config '
-                            '(wlan_wpa2_network_2g)')
-
-        if "wlan_wpa2_network_5g" in self.user_params:
-            self.wpa2_network_5g = self.user_params.get("wlan_wpa2_network_5g")
-        elif "AccessPoint" in self.user_params:
-            self.start_access_point_5g = True
-        else:
-            raise Exception('Missing parameter in config '
-                            '(wlan_wpa2_network_5g)')
-
-        # Only bring up the APs that are needed for the test.  Each ssid is
-        # randomly generated so there is no chance of re associating to a
-        # previously saved ssid on the device.
-        if self.start_access_point_2g:
-            self.start_access_point = True
-            self.access_point.start_ap(hostapd_config=self.ap_2g)
-        if self.start_access_point_5g:
-            self.start_access_point = True
-            self.access_point.start_ap(hostapd_config=self.ap_5g)
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            # stub for setting up all the fuchsia devices in the testbed.
-            pass
-
-    def teardown_test(self):
-        for fd in self.fuchsia_devices:
-            fd.sl4f.wlan_lib.wlanDisconnect()
-
-    def teardown_class(self):
-        if self.start_access_point:
-            self.download_ap_logs()
-            self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            super().on_device_fail(fd, test_name, begin_time)
-            fd.configure_wlan(association_mechanism='drivers')
-
-    """Helper Functions"""
-
-    def check_connect_response(self, connection_response):
-        """ Checks the result of connecting to a wlan.
-            Args:
-                connection_response: The response from SL4F after attempting
-                    to connect to a wlan.
-        """
-        if connection_response.get("error") is None:
-            # the command did not get an error response - go ahead and
-            # check the result
-            connection_result = connection_response.get("result")
-            if connection_result:
-                self.log.info("connection to network successful")
-            else:
-                # ideally, we would have the actual error...  but logging
-                # here to cover that error case
-                raise signals.TestFailure("Connect call failed, aborting test")
-        else:
-            # the response indicates an error - log and raise failure
-            raise signals.TestFailure("Aborting test - Connect call failed "
-                                      "with error: %s" %
-                                      connection_response.get("error"))
-
-    def scan_while_connected(self, wlan_network_params, fd):
-        """ Connects to as specified network and initiates a scan
-                Args:
-                    wlan_network_params: A dictionary containing wlan
-                        infomation.
-                    fd: The fuchsia device to connect to the wlan.
-        """
-        target_ssid = wlan_network_params['SSID']
-        self.log.info("got the ssid! %s", target_ssid)
-        target_pwd = None
-        if 'password' in wlan_network_params:
-            target_pwd = wlan_network_params['password']
-
-        bss_scan_response = fd.sl4f.wlan_lib.wlanScanForBSSInfo().get('result')
-        connection_response = fd.sl4f.wlan_lib.wlanConnectToNetwork(
-            target_ssid,
-            bss_scan_response[target_ssid][0],
-            target_pwd=target_pwd)
-        self.check_connect_response(connection_response)
-        self.basic_scan_request(fd)
-
-    def basic_scan_request(self, fd):
-        """ Initiates a basic scan on a Fuchsia device
-            Args:
-                fd: A fuchsia device
-        """
-        start_time = datetime.now()
-
-        scan_response = fd.sl4f.wlan_lib.wlanStartScan()
-
-        # first check if we received an error
-        if scan_response.get("error") is None:
-            # the scan command did not get an error response - go ahead
-            # and check for scan results
-            scan_results = scan_response["result"]
-        else:
-            # the response indicates an error - log and raise failure
-            raise signals.TestFailure("Aborting test - scan failed with "
-                                      "error: %s" % scan_response.get("error"))
-
-        self.log.info("scan contained %d results", len(scan_results))
-
-        total_time_ms = (datetime.now() - start_time).total_seconds() * 1000
-        self.log.info("scan time: %d ms", total_time_ms)
-
-        if len(scan_results) > 0:
-            raise signals.TestPass(details="",
-                                   extras={"Scan time": "%d" % total_time_ms})
-        else:
-            raise signals.TestFailure("Scan failed or did not "
-                                      "find any networks")
-
-    """Tests"""
-
-    def test_basic_scan_request(self):
-        """Verify a general scan trigger returns at least one result"""
-        for fd in self.fuchsia_devices:
-            self.basic_scan_request(fd)
-
-    def test_scan_while_connected_open_network_2g(self):
-        for fd in self.fuchsia_devices:
-            self.scan_while_connected(self.open_network_2g, fd)
-
-    def test_scan_while_connected_wpa2_network_2g(self):
-        for fd in self.fuchsia_devices:
-            self.scan_while_connected(self.wpa2_network_2g, fd)
-
-    def test_scan_while_connected_open_network_5g(self):
-        for fd in self.fuchsia_devices:
-            self.scan_while_connected(self.open_network_5g, fd)
-
-    def test_scan_while_connected_wpa2_network_5g(self):
-        for fd in self.fuchsia_devices:
-            self.scan_while_connected(self.wpa2_network_5g, fd)
diff --git a/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py b/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py
deleted file mode 100644
index 631df3b..0000000
--- a/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py
+++ /dev/null
@@ -1,357 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-
-# TODO(fxb/68956): Add security protocol check to mixed mode tests when info is
-# available.
-class WlanTargetSecurityTest(WifiBaseTest):
-    """Tests Fuchsia's target security concept and security upgrading
-
-    Testbed Requirements:
-    * One Fuchsia device
-    * One Whirlwind Access Point
-    """
-
-    def setup_class(self):
-        if 'dut' in self.user_params and self.user_params[
-                'dut'] != 'fuchsia_devices':
-            raise AttributeError(
-                'WlanTargetSecurityTest is only relevant for Fuchsia devices.')
-
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-        if self.dut.device.association_mechanism != 'policy':
-            raise AttributeError(
-                'Must use WLAN policy layer to test target security.')
-
-        self.access_point = self.access_points[0]
-
-    def teardown_class(self):
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def teardown_test(self):
-        self.dut.disconnect()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def on_exception(self, test_name, begin_time):
-        super().on_exception(test_name, begin_time)
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def setup_ap(self, security_mode=None):
-        """ Sets up an AP using the provided security mode.
-
-        Args:
-            security_mode: string, security mode for AP
-        Returns:
-            Tuple, (ssid, password). Returns a password even if for open
-                security, since non-open target securities require a credential
-                to attempt a connection.
-        """
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
-        # Length 13, so it can be used for WEP or WPA
-        password = utils.rand_ascii_str(13)
-        security_profile = None
-
-        if security_mode:
-            security_profile = Security(security_mode=security_mode,
-                                        password=password)
-
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile)
-
-        return (ssid, password)
-
-    # Open Security on AP
-    def test_associate_open_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap()
-        asserts.assert_true(self.dut.associate(ssid), 'Failed to associate.')
-
-    def test_reject_open_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap()
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_reject_open_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap()
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_reject_open_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap()
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_reject_open_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap()
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    # WEP Security on AP
-    def test_reject_wep_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
-
-    def test_associate_wep_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    def test_reject_wep_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_reject_wep_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_reject_wep_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    # WPA Security on AP
-    def test_reject_wpa_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
-
-    def test_reject_wpa_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_associate_wpa_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    def test_reject_wpa_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_reject_wpa_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    # WPA2 Security on AP
-    def test_reject_wpa2_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
-
-    def test_reject_wpa2_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_associate_wpa2_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    def test_associate_wpa2_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    def test_reject_wpa2_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    # WPA/WPA2 Security on AP
-    def test_reject_wpa_wpa2_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
-
-    def test_reject_wpa_wpa2_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_associate_wpa_wpa2_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    def test_associate_wpa_wpa2_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    def test_reject_wpa_wpa2_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    # WPA3 Security on AP
-    def test_reject_wpa3_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
-
-    def test_reject_wpa3_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_associate_wpa3_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password),
-            'Expected failure to associate. WPA credentials for WPA3 was '
-            'temporarily disabled, see https://fxbug.dev/85817 for context. '
-            'If this feature was reenabled, please update this test\'s '
-            'expectation.')
-
-    def test_associate_wpa3_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    def test_associate_wpa3_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    # WPA2/WPA3 Security on AP
-    def test_reject_wpa2_wpa3_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_false(self.dut.associate(ssid),
-                             'Should not have associated.')
-
-    def test_reject_wpa2_wpa3_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(
-            hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WEP_STRING,
-                               target_pwd=password),
-            'Should not have associated.')
-
-    def test_associate_wpa2_wpa3_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(
-            hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_false(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA_STRING,
-                               target_pwd=password),
-            'Expected failure to associate. WPA credentials for WPA3 was '
-            'temporarily disabled, see https://fxbug.dev/85817 for context. '
-            'If this feature was reenabled, please update this test\'s '
-            'expectation.')
-
-    def test_associate_wpa2_wpa3_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(
-            hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA2_STRING,
-                               target_pwd=password), 'Failed to associate.')
-
-    def test_associate_wpa2_wpa3_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(
-            hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_true(
-            self.dut.associate(ssid,
-                               target_security=hostapd_constants.WPA3_STRING,
-                               target_pwd=password), 'Failed to associate.')
diff --git a/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py b/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
deleted file mode 100644
index ffd41e2..0000000
--- a/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
+++ /dev/null
@@ -1,213 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from datetime import datetime, timedelta, timezone
-from typing import FrozenSet
-
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion import asserts
-from antlion import signals
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.radio_measurement import BssidInformation, BssidInformationCapabilities, NeighborReportElement, PhyType
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionManagementRequest
-
-
-# TODO(fxbug.dev/103440) WNM support should be visible/controllable in ACTS.
-# When ACTS can see WNM features that are enabled (through ACTS config) or
-# ACTS can enable WNM features (through new APIs), additional tests should be
-# added to this suite that check that features function properly when the DUT is
-# configured to support those features.
-class WlanWirelessNetworkManagementTest(WifiBaseTest):
-    """Tests Fuchsia's Wireless Network Management (AKA 802.11v) support.
-
-    Testbed Requirements:
-    * One Fuchsia device
-    * One Whirlwind access point
-
-    Existing Fuchsia drivers do not yet support WNM features out-of-the-box, so these
-    tests check that WNM features are not enabled.
-    """
-
-    def setup_class(self):
-        if 'dut' in self.user_params and self.user_params[
-                'dut'] != 'fuchsia_devices':
-            raise AttributeError(
-                'WlanWirelessNetworkManagementTest is only relevant for Fuchsia devices.'
-            )
-
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-        if self.dut.device.association_mechanism != 'policy':
-            raise AttributeError('Must use WLAN policy layer to test WNM.')
-        self.access_point = self.access_points[0]
-
-    def teardown_class(self):
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def teardown_test(self):
-        self.dut.disconnect()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name: str, begin_time: str):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def on_exception(self, test_name: str, begin_time: str):
-        super().on_exception(test_name, begin_time)
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def setup_ap(
-        self,
-        ssid: str,
-        channel: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset()):
-        """Sets up an AP using the provided parameters.
-
-        Args:
-            ssid: SSID for the AP.
-            channel: which channel number to set the AP to (default is
-                AP_DEFAULT_CHANNEL_2G).
-            wnm_features: Wireless Network Management features to enable
-                (default is no WNM features).
-        """
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=channel,
-                 ssid=ssid,
-                 security=None,
-                 wnm_features=wnm_features)
-
-    def _get_client_mac(self) -> str:
-        """Get the MAC address of the DUT client interface.
-
-        Returns:
-            str, MAC address of the DUT client interface.
-        Raises:
-            ValueError if there is no DUT client interface.
-            ConnectionError if the DUT interface query fails.
-        """
-        wlan_ifaces = self.dut.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if wlan_ifaces.get('error'):
-            raise ConnectionError('Failed to get wlan interface IDs: %s' %
-                                  wlan_ifaces['error'])
-
-        for wlan_iface in wlan_ifaces['result']:
-            iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(
-                wlan_iface)
-            if iface_info.get('error'):
-                raise ConnectionError('Failed to query wlan iface: %s' %
-                                      iface_info['error'])
-
-            if iface_info['result']['role'] == 'Client':
-                return utils.mac_address_list_to_str(
-                    iface_info['result']['sta_addr'])
-        raise ValueError(
-            'Failed to get client interface mac address. No client interface found.'
-        )
-
-    def test_bss_transition_ap_supported_dut_unsupported(self):
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT])
-        self.setup_ap(ssid, wnm_features=wnm_features)
-        asserts.assert_true(self.dut.associate(ssid), 'Failed to associate.')
-        asserts.assert_true(self.dut.is_connected(), 'Failed to connect.')
-        client_mac = self._get_client_mac()
-
-        ext_capabilities = self.access_point.get_sta_extended_capabilities(
-            self.access_point.wlan_2g, client_mac)
-        asserts.assert_false(
-            ext_capabilities.bss_transition,
-            'DUT is incorrectly advertising BSS Transition Management support')
-
-    def test_wnm_sleep_mode_ap_supported_dut_unsupported(self):
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset([hostapd_constants.WnmFeature.WNM_SLEEP_MODE])
-        self.setup_ap(ssid, wnm_features=wnm_features)
-        asserts.assert_true(self.dut.associate(ssid), 'Failed to associate.')
-        asserts.assert_true(self.dut.is_connected(), 'Failed to connect.')
-        client_mac = self._get_client_mac()
-
-        ext_capabilities = self.access_point.get_sta_extended_capabilities(
-            self.access_point.wlan_2g, client_mac)
-        asserts.assert_false(
-            ext_capabilities.wnm_sleep_mode,
-            'DUT is incorrectly advertising WNM Sleep Mode support')
-
-    def test_btm_req_ignored_dut_unsupported(self):
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT])
-        # Setup 2.4 GHz AP.
-        self.setup_ap(ssid,
-                      channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                      wnm_features=wnm_features)
-
-        asserts.assert_true(self.dut.associate(ssid), 'Failed to associate.')
-        # Verify that DUT is actually associated (as seen from AP).
-        client_mac = self._get_client_mac()
-        asserts.assert_true(
-            client_mac
-            in self.access_point.get_stas(self.access_point.wlan_2g),
-            'Client MAC not included in list of associated STAs on the 2.4GHz band'
-        )
-
-        # Setup 5 GHz AP with same SSID.
-        self.setup_ap(ssid,
-                      channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                      wnm_features=wnm_features)
-
-        # Construct a BTM request.
-        dest_bssid = self.access_point.get_bssid_from_ssid(
-            ssid, self.access_point.wlan_5g)
-        dest_bssid_info = BssidInformation(
-            security=True, capabilities=BssidInformationCapabilities())
-        neighbor_5g_ap = NeighborReportElement(
-            dest_bssid,
-            dest_bssid_info,
-            operating_class=126,
-            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            phy_type=PhyType.VHT)
-        btm_req = BssTransitionManagementRequest(
-            disassociation_imminent=True, candidate_list=[neighbor_5g_ap])
-
-        # Send BTM request from 2.4 GHz AP to DUT
-        self.access_point.send_bss_transition_management_req(
-            self.access_point.wlan_2g, client_mac, btm_req)
-
-        # Check that DUT has not reassociated.
-        REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
-        while datetime.now(timezone.utc) < REASSOC_DEADLINE:
-            # Fail if DUT has reassociated to 5 GHz AP (as seen from AP).
-            if client_mac in self.access_point.get_stas(
-                    self.access_point.wlan_5g):
-                raise signals.TestFailure(
-                    'DUT unexpectedly roamed to target BSS after BTM request')
-            else:
-                time.sleep(0.25)
-
-        # DUT should have stayed associated to original AP.
-        asserts.assert_true(
-            client_mac
-            in self.access_point.get_stas(self.access_point.wlan_2g),
-            'DUT lost association on the 2.4GHz band after BTM request')
diff --git a/src/antlion/tests/wlan/misc/WlanInterfaceTest.py b/src/antlion/tests/wlan/misc/WlanInterfaceTest.py
deleted file mode 100644
index 352f704..0000000
--- a/src/antlion/tests/wlan/misc/WlanInterfaceTest.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import signals
-
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-
-class WlanInterfaceTest(WifiBaseTest):
-
-    def setup_class(self):
-        super().setup_class()
-        dut = self.user_params.get('dut', None)
-        if dut:
-            if dut == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif dut == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an Fuchsia device
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-    def test_destroy_iface(self):
-        """Test that we don't error out when destroying the WLAN interface.
-
-        Steps:
-        1. Find a wlan interface
-        2. Destroy it
-
-        Expected Result:
-        Verify there are no errors in destroying the wlan interface.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: WLAN
-        Priority: 1
-        """
-        wlan_interfaces = self.dut.get_wlan_interface_id_list()
-        if len(wlan_interfaces) < 1:
-            raise signals.TestFailure("Not enough wlan interfaces for test")
-        if not self.dut.destroy_wlan_interface(wlan_interfaces[0]):
-            raise signals.TestFailure("Failed to destroy WLAN interface")
-        raise signals.TestPass("Success")
diff --git a/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py b/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py
deleted file mode 100644
index ee4efa1..0000000
--- a/src/antlion/tests/wlan/misc/WlanMiscScenarioTest.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-
-class WlanMiscScenarioTest(WifiBaseTest):
-    """Random scenario tests, usually to reproduce certain bugs, that do not
-    fit into a specific test category, but should still be run in CI to catch
-    regressions.
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        dut = self.user_params.get('dut', None)
-        if dut:
-            if dut == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif dut == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an Fuchsia device
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        self.access_point = self.access_points[0]
-
-    def teardown_class(self):
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def teardown_test(self):
-        self.dut.disconnect()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def on_exception(self, test_name, begin_time):
-        super().on_exception(test_name, begin_time)
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def test_connect_to_wpa2_after_wpa3_rejection(self):
-        """Test association to non-WPA3 network after receiving a WPA3
-        rejection, which was triggering a firmware hang.
-
-        Bug: https://bugs.fuchsia.dev/p/fuchsia/issues/detail?id=71233
-        """
-        # Setup a WPA3 network
-        wpa3_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=wpa3_ssid,
-                 security=Security(security_mode='wpa3',
-                                   password=generate_random_password('wpa3')))
-        # Attempt to associate with wrong password, expecting failure
-        self.log.info('Attempting to associate WPA3 with wrong password.')
-        asserts.assert_false(
-            self.dut.associate(wpa3_ssid,
-                               target_pwd='wrongpass',
-                               target_security='wpa3'),
-            'Associated with WPA3 network using the wrong password')
-
-        self.access_point.stop_all_aps()
-
-        # Setup a WPA2 Network
-        wpa2_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
-        wpa2_password = generate_random_password('wpa2')
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=wpa2_ssid,
-                 security=Security(security_mode='wpa2',
-                                   password=wpa2_password))
-
-        # Attempt to associate, expecting success
-        self.log.info('Attempting to associate with WPA2 network.')
-        asserts.assert_true(
-            self.dut.associate(wpa2_ssid,
-                               target_pwd=wpa2_password,
-                               target_security='wpa2'),
-            'Failed to associate with WPA2 network after a WPA3 rejection.')
diff --git a/src/antlion/tests/wlan/performance/ChannelSweepTest.py b/src/antlion/tests/wlan/performance/ChannelSweepTest.py
deleted file mode 100644
index 4c84213..0000000
--- a/src/antlion/tests/wlan/performance/ChannelSweepTest.py
+++ /dev/null
@@ -1,1106 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import time
-
-from statistics import pstdev
-
-from antlion import asserts
-from antlion import context
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.iperf_server import IPerfResult
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-N_CAPABILITIES_DEFAULT = [
-    hostapd_constants.N_CAPABILITY_LDPC, hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_SGI40,
-    hostapd_constants.N_CAPABILITY_TX_STBC,
-    hostapd_constants.N_CAPABILITY_RX_STBC1
-]
-
-AC_CAPABILITIES_DEFAULT = [
-    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-    hostapd_constants.AC_CAPABILITY_RXLDPC,
-    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN
-]
-
-DEFAULT_MIN_THROUGHPUT = 0
-DEFAULT_MAX_STD_DEV = 1
-DEFAULT_IPERF_TIMEOUT = 30
-
-DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR = 30
-GRAPH_CIRCLE_SIZE = 10
-IPERF_NO_THROUGHPUT_VALUE = 0
-MAX_2_4_CHANNEL = 14
-TIME_TO_SLEEP_BETWEEN_RETRIES = 1
-TIME_TO_WAIT_FOR_COUNTRY_CODE = 10
-WEP_HEX_STRING_LENGTH = 10
-
-MEGABITS_PER_SECOND = 'Mbps'
-
-
-def get_test_name(settings):
-    """Retrieves the test_name value from test_settings"""
-    return settings.get('test_name')
-
-
-class ChannelSweepTest(WifiBaseTest):
-    """Tests channel performance and regulatory compliance..
-
-    Testbed Requirement:
-    * One ACTS compatible device (dut)
-    * One Access Point
-    * One Linux Machine used as IPerfServer if running performance tests
-    Note: Performance tests should be done in isolated testbed.
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-        if 'channel_sweep_test_params' in self.user_params:
-            self.time_to_wait_for_ip_addr = self.user_params[
-                'channel_sweep_test_params'].get(
-                    'time_to_wait_for_ip_addr',
-                    DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR)
-        else:
-            self.time_to_wait_for_ip_addr = DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.android_devices = getattr(self, 'android_devices', [])
-
-        self.access_point = self.access_points[0]
-        self.access_point.stop_all_aps()
-
-        self.iperf_server = None
-        self.iperf_client = None
-
-        self.channel_sweep_test_params = self.user_params.get(
-            'channel_sweep_test_params', {})
-        # Allows users to skip the iperf throughput measurements, just verifying
-        # association.
-        if not self.channel_sweep_test_params.get('skip_performance'):
-            try:
-                self.iperf_server = self.iperf_servers[0]
-                self.iperf_server.start()
-            except AttributeError:
-                self.log.warn(
-                    'Missing iperf config. Throughput cannot be measured, so only '
-                    'association will be tested.')
-
-            if hasattr(self, "iperf_clients") and self.iperf_clients:
-                self.iperf_client = self.iperf_clients[0]
-            else:
-                self.iperf_client = self.dut.create_iperf_client()
-
-        self.regulatory_results = "====CountryCode,Channel,Frequency,ChannelBandwith,Connected/Not-Connected====\n"
-
-    def teardown_class(self):
-        super().teardown_class()
-        output_path = context.get_current_context().get_base_output_path()
-        regulatory_save_path = '%s/ChannelSweepTest/%s' % (
-            output_path, "regulatory_results.txt")
-        f = open(regulatory_save_path, "w")
-        f.write(self.regulatory_results)
-        f.close()
-
-    def setup_test(self):
-        # TODO(fxb/46417): Uncomment when wlanClearCountry is implemented up any
-        # country code changes.
-        # for fd in self.fuchsia_devices:
-        #     phy_ids_response = fd.wlan_lib.wlanPhyIdList()
-        #     if phy_ids_response.get('error'):
-        #         raise ConnectionError(
-        #             'Failed to retrieve phy ids from FuchsiaDevice (%s). '
-        #             'Error: %s' % (fd.ip, phy_ids_response['error']))
-        #     for id in phy_ids_response['result']:
-        #         clear_country_response = fd.wlan_lib.wlanClearCountry(id)
-        #         if clear_country_response.get('error'):
-        #             raise EnvironmentError(
-        #                 'Failed to reset country code on FuchsiaDevice (%s). '
-        #                 'Error: %s' % (fd.ip, clear_country_response['error'])
-        #                 )
-        self.access_point.stop_all_aps()
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-        self.dut.disconnect()
-
-    def teardown_test(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def set_dut_country_code(self, country_code):
-        """Set the country code on the DUT. Then verify that the country
-        code was set successfully
-
-        Args:
-            country_code: string, the 2 character country code to set
-        """
-        self.log.info('Setting DUT country code to %s' % country_code)
-        country_code_response = self.dut.device.sl4f.regulatory_region_lib.setRegion(
-            country_code)
-        if country_code_response.get('error'):
-            raise EnvironmentError(
-                'Failed to set country code (%s) on DUT. Error: %s' %
-                (country_code, country_code_response['error']))
-
-        self.log.info('Verifying DUT country code was correctly set to %s.' %
-                      country_code)
-        phy_ids_response = self.dut.device.sl4f.wlan_lib.wlanPhyIdList()
-        if phy_ids_response.get('error'):
-            raise ConnectionError('Failed to get phy ids from DUT. Error: %s' %
-                                  (country_code, phy_ids_response['error']))
-
-        end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
-        while time.time() < end_time:
-            for id in phy_ids_response['result']:
-                get_country_response = self.dut.device.sl4f.wlan_lib.wlanGetCountry(
-                    id)
-                if get_country_response.get('error'):
-                    raise ConnectionError(
-                        'Failed to query PHY ID (%s) for country. Error: %s' %
-                        (id, get_country_response['error']))
-
-                set_code = ''.join([
-                    chr(ascii_char)
-                    for ascii_char in get_country_response['result']
-                ])
-                if set_code != country_code:
-                    self.log.debug(
-                        'PHY (id: %s) has incorrect country code set. '
-                        'Expected: %s, Got: %s' % (id, country_code, set_code))
-                    break
-            else:
-                self.log.info('All PHYs have expected country code (%s)' %
-                              country_code)
-                break
-            time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
-        else:
-            raise EnvironmentError('Failed to set DUT country code to %s.' %
-                                   country_code)
-
-    def setup_ap(self, channel, channel_bandwidth, security_profile=None):
-        """Start network on AP with basic configuration.
-
-        Args:
-            channel: int, channel to use for network
-            channel_bandwidth: int, channel bandwidth in mhz to use for network,
-            security_profile: Security object, or None if open
-
-        Returns:
-            string, ssid of network running
-
-        Raises:
-            ConnectionError if network is not started successfully.
-        """
-        if channel > MAX_2_4_CHANNEL:
-            vht_bandwidth = channel_bandwidth
-        else:
-            vht_bandwidth = None
-
-        if channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_20MHZ:
-            n_capabilities = N_CAPABILITIES_DEFAULT + [
-                hostapd_constants.N_CAPABILITY_HT20
-            ]
-        elif (channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_40MHZ or
-              channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_80MHZ):
-            if hostapd_config.ht40_plus_allowed(channel):
-                extended_channel = [hostapd_constants.N_CAPABILITY_HT40_PLUS]
-            elif hostapd_config.ht40_minus_allowed(channel):
-                extended_channel = [hostapd_constants.N_CAPABILITY_HT40_MINUS]
-            else:
-                raise ValueError('Invalid Channel: %s' % channel)
-            n_capabilities = N_CAPABILITIES_DEFAULT + extended_channel
-        else:
-            raise ValueError('Invalid Bandwidth: %s' % channel_bandwidth)
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        try:
-            setup_ap(access_point=self.access_point,
-                     profile_name='whirlwind',
-                     channel=channel,
-                     security=security_profile,
-                     n_capabilities=n_capabilities,
-                     ac_capabilities=None,
-                     force_wmm=True,
-                     ssid=ssid,
-                     vht_bandwidth=vht_bandwidth,
-                     setup_bridge=True)
-        except Exception as err:
-            raise ConnectionError(
-                'Failed to setup ap on channel: %s, channel bandwidth: %smhz. '
-                'Error: %s' % (channel, channel_bandwidth, err))
-        else:
-            self.log.info(
-                'Network (ssid: %s) up on channel %s w/ channel bandwidth %smhz'
-                % (ssid, channel, channel_bandwidth))
-
-        return ssid
-
-    def get_and_verify_iperf_address(self, channel, device, interface=None):
-        """Get ip address from a devices interface and verify it belongs to
-        expected subnet based on APs DHCP config.
-
-        Args:
-            channel: int, channel network is running on, to determine subnet
-            device: device to get ip address for
-            interface (default: None): interface on device to get ip address.
-                If None, uses device.test_interface.
-
-        Returns:
-            String, ip address of device on given interface (or test_interface)
-
-        Raises:
-            ConnectionError, if device does not have a valid ip address after
-                all retries.
-        """
-        if channel <= MAX_2_4_CHANNEL:
-            subnet = self.access_point._AP_2G_SUBNET_STR
-        else:
-            subnet = self.access_point._AP_5G_SUBNET_STR
-        end_time = time.time() + self.time_to_wait_for_ip_addr
-        while time.time() < end_time:
-            if interface:
-                device_addresses = device.get_interface_ip_addresses(interface)
-            else:
-                device_addresses = device.get_interface_ip_addresses(
-                    device.test_interface)
-
-            if device_addresses['ipv4_private']:
-                for ip_addr in device_addresses['ipv4_private']:
-                    if utils.ip_in_subnet(ip_addr, subnet):
-                        return ip_addr
-                    else:
-                        self.log.debug(
-                            'Device has an ip address (%s), but it is not in '
-                            'subnet %s' % (ip_addr, subnet))
-            else:
-                self.log.debug(
-                    'Device does not have a valid ip address. Retrying.')
-            time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
-        raise ConnectionError('Device failed to get an ip address.')
-
-    def get_iperf_throughput(self,
-                             iperf_server_address,
-                             iperf_client_address,
-                             reverse=False):
-        """Run iperf between client and server and get the throughput.
-
-        Args:
-            iperf_server_address: string, ip address of running iperf server
-            iperf_client_address: string, ip address of iperf client (dut)
-            reverse (default: False): If True, run traffic in reverse direction,
-                from server to client.
-
-        Returns:
-            int, iperf throughput OR IPERF_NO_THROUGHPUT_VALUE, if iperf fails
-        """
-        if reverse:
-            self.log.info(
-                'Running IPerf traffic from server (%s) to dut (%s).' %
-                (iperf_server_address, iperf_client_address))
-            iperf_results_file = self.iperf_client.start(
-                iperf_server_address,
-                '-i 1 -t 10 -R -J',
-                'channel_sweep_rx',
-                timeout=DEFAULT_IPERF_TIMEOUT)
-        else:
-            self.log.info(
-                'Running IPerf traffic from dut (%s) to server (%s).' %
-                (iperf_client_address, iperf_server_address))
-            iperf_results_file = self.iperf_client.start(
-                iperf_server_address,
-                '-i 1 -t 10 -J',
-                'channel_sweep_tx',
-                timeout=DEFAULT_IPERF_TIMEOUT)
-        if iperf_results_file:
-            iperf_results = IPerfResult(
-                iperf_results_file, reporting_speed_units=MEGABITS_PER_SECOND)
-            return iperf_results.avg_send_rate
-        else:
-            return IPERF_NO_THROUGHPUT_VALUE
-
-    def log_to_file_and_throughput_data(self, channel, channel_bandwidth,
-                                        tx_throughput, rx_throughput):
-        """Write performance info to csv file and to throughput data.
-
-        Args:
-            channel: int, channel that test was run on
-            channel_bandwidth: int, channel bandwidth the test used
-            tx_throughput: float, throughput value from dut to iperf server
-            rx_throughput: float, throughput value from iperf server to dut
-        """
-        test_name = self.throughput_data['test']
-        output_path = context.get_current_context().get_base_output_path()
-        log_path = '%s/ChannelSweepTest/%s' % (output_path, test_name)
-        if not os.path.exists(log_path):
-            os.makedirs(log_path)
-        log_file = '%s/%s_%smhz.csv' % (log_path, test_name, channel_bandwidth)
-        self.log.info('Writing IPerf results for %s to %s' %
-                      (test_name, log_file))
-        with open(log_file, 'a') as csv_file:
-            csv_file.write('%s,%s,%s\n' %
-                           (channel, tx_throughput, rx_throughput))
-        self.throughput_data['results'][str(channel)] = {
-            'tx_throughput': tx_throughput,
-            'rx_throughput': rx_throughput
-        }
-
-    def write_graph(self):
-        """Create graph html files from throughput data, plotting channel vs
-        tx_throughput and channel vs rx_throughput.
-        """
-        # If performance measurement is skipped
-        if not self.iperf_server:
-            return
-
-        try:
-            from bokeh.plotting import ColumnDataSource
-            from bokeh.plotting import figure
-            from bokeh.plotting import output_file
-            from bokeh.plotting import save
-        except ImportError as e:
-            self.log.warn("bokeh is not installed: skipping creation of graphs. "
-                          "Note CSV files are still available. If graphs are "
-                          "desired, install antlion with the \"bokeh\" feature.")
-            return
-
-        output_path = context.get_current_context().get_base_output_path()
-        test_name = self.throughput_data['test']
-        channel_bandwidth = self.throughput_data['channel_bandwidth']
-        output_file_name = '%s/ChannelSweepTest/%s/%s_%smhz.html' % (
-            output_path, test_name, test_name, channel_bandwidth)
-        output_file(output_file_name)
-        channels = []
-        tx_throughputs = []
-        rx_throughputs = []
-        for channel in self.throughput_data['results']:
-            channels.append(str(channel))
-            tx_throughputs.append(
-                self.throughput_data['results'][channel]['tx_throughput'])
-            rx_throughputs.append(
-                self.throughput_data['results'][channel]['rx_throughput'])
-        channel_vs_throughput_data = ColumnDataSource(
-            data=dict(channels=channels,
-                      tx_throughput=tx_throughputs,
-                      rx_throughput=rx_throughputs))
-        TOOLTIPS = [('Channel', '@channels'),
-                    ('TX_Throughput', '@tx_throughput'),
-                    ('RX_Throughput', '@rx_throughput')]
-        channel_vs_throughput_graph = figure(title='Channels vs. Throughput',
-                                             x_axis_label='Channels',
-                                             x_range=channels,
-                                             y_axis_label='Throughput',
-                                             tooltips=TOOLTIPS)
-        channel_vs_throughput_graph.sizing_mode = 'stretch_both'
-        channel_vs_throughput_graph.title.align = 'center'
-        channel_vs_throughput_graph.line('channels',
-                                         'tx_throughput',
-                                         source=channel_vs_throughput_data,
-                                         line_width=2,
-                                         line_color='blue',
-                                         legend_label='TX_Throughput')
-        channel_vs_throughput_graph.circle('channels',
-                                           'tx_throughput',
-                                           source=channel_vs_throughput_data,
-                                           size=GRAPH_CIRCLE_SIZE,
-                                           color='blue')
-        channel_vs_throughput_graph.line('channels',
-                                         'rx_throughput',
-                                         source=channel_vs_throughput_data,
-                                         line_width=2,
-                                         line_color='red',
-                                         legend_label='RX_Throughput')
-        channel_vs_throughput_graph.circle('channels',
-                                           'rx_throughput',
-                                           source=channel_vs_throughput_data,
-                                           size=GRAPH_CIRCLE_SIZE,
-                                           color='red')
-
-        channel_vs_throughput_graph.legend.location = "top_left"
-        graph_file = save([channel_vs_throughput_graph])
-        self.log.info('Saved graph to %s' % graph_file)
-
-    def verify_standard_deviation(self, max_std_dev):
-        """Verifies the standard deviation of the throughput across the channels
-        does not exceed the max_std_dev value.
-
-        Args:
-            max_std_dev: float, max standard deviation of throughput for a test
-                to pass (in Mb/s)
-
-        Raises:
-            TestFailure, if standard deviation of throughput exceeds max_std_dev
-        """
-        # If performance measurement is skipped
-        if not self.iperf_server:
-            return
-        self.log.info('Verifying standard deviation across channels does not '
-                      'exceed max standard deviation of %s Mb/s' % max_std_dev)
-        tx_values = []
-        rx_values = []
-        for channel in self.throughput_data['results']:
-            if self.throughput_data['results'][channel][
-                    'tx_throughput'] is not None:
-                tx_values.append(
-                    self.throughput_data['results'][channel]['tx_throughput'])
-            if self.throughput_data['results'][channel][
-                    'rx_throughput'] is not None:
-                rx_values.append(
-                    self.throughput_data['results'][channel]['rx_throughput'])
-        tx_std_dev = pstdev(tx_values)
-        rx_std_dev = pstdev(rx_values)
-        if tx_std_dev > max_std_dev or rx_std_dev > max_std_dev:
-            asserts.fail(
-                'With %smhz channel bandwidth, throughput standard '
-                'deviation (tx: %s Mb/s, rx: %s Mb/s) exceeds max standard '
-                'deviation (%s Mb/s).' %
-                (self.throughput_data['channel_bandwidth'], tx_std_dev,
-                 rx_std_dev, max_std_dev))
-        else:
-            asserts.explicit_pass(
-                'Throughput standard deviation (tx: %s Mb/s, rx: %s Mb/s) '
-                'with %smhz channel bandwidth does not exceed maximum (%s Mb/s).'
-                % (tx_std_dev, rx_std_dev,
-                   self.throughput_data['channel_bandwidth'], max_std_dev))
-
-    def run_channel_performance_tests(self, settings):
-        """Test function for running channel performance tests. Used by both
-        explicit test cases and debug test cases from config. Runs a performance
-        test for each channel in test_channels with test_channel_bandwidth, then
-        writes a graph and csv file of the channel vs throughput.
-
-        Args:
-            settings: dict, containing the following test settings
-                test_channels: list of channels to test.
-                test_channel_bandwidth: int, channel bandwidth to use for test.
-                test_security (optional): string, security type to use for test.
-                min_tx_throughput (optional, default: 0): float, minimum tx
-                    throughput threshold to pass individual channel tests
-                    (in Mb/s).
-                min_rx_throughput (optional, default: 0): float, minimum rx
-                    throughput threshold to pass individual channel tests
-                    (in Mb/s).
-                max_std_dev (optional, default: 1): float, maximum standard
-                    deviation of throughput across all test channels to pass
-                    test (in Mb/s).
-                base_test_name (optional): string, test name prefix to use with
-                    generated subtests.
-                country_name (optional): string, country name from
-                    hostapd_constants to set on device.
-                country_code (optional): string, two-char country code to set on
-                    the DUT. Takes priority over country_name.
-                test_name (debug tests only): string, the test name for this
-                    parent test case from the config file. In explicit tests,
-                    this is not necessary.
-
-        Writes:
-            CSV file: channel, tx_throughput, rx_throughput
-                for every test channel.
-            Graph: channel vs tx_throughput & channel vs rx_throughput
-
-        Raises:
-            TestFailure, if throughput standard deviation across channels
-                exceeds max_std_dev
-
-            Example Explicit Test (see EOF for debug JSON example):
-            def test_us_2g_20mhz_wpa2(self):
-                self.run_channel_performance_tests(
-                        dict(
-                        test_channels=hostapd_constants.US_CHANNELS_2G,
-                        test_channel_bandwidth=20,
-                        test_security=hostapd_constants.WPA2_STRING,
-                        min_tx_throughput=2,
-                        min_rx_throughput=4,
-                        max_std_dev=0.75,
-                        country_code='US',
-                        base_test_name='test_us'))
-        """
-        test_channels = settings['test_channels']
-        test_channel_bandwidth = settings['test_channel_bandwidth']
-        test_security = settings.get('test_security', None)
-        test_name = settings.get('test_name', self.test_name)
-        base_test_name = settings.get('base_test_name', 'test')
-        min_tx_throughput = settings.get('min_tx_throughput',
-                                         DEFAULT_MIN_THROUGHPUT)
-        min_rx_throughput = settings.get('min_rx_throughput',
-                                         DEFAULT_MIN_THROUGHPUT)
-        max_std_dev = settings.get('max_std_dev', DEFAULT_MAX_STD_DEV)
-        country_code = settings.get('country_code')
-        country_name = settings.get('country_name')
-        country_label = None
-
-        if country_code:
-            country_label = country_code
-            self.set_dut_country_code(country_code)
-        elif country_name:
-            country_label = country_name
-            code = hostapd_constants.COUNTRY_CODE[country_name]['country_code']
-            self.set_dut_country_code(code)
-
-        self.throughput_data = {
-            'test': test_name,
-            'channel_bandwidth': test_channel_bandwidth,
-            'results': {}
-        }
-        test_list = []
-        for channel in test_channels:
-            sub_test_name = 'test_%schannel_%s_%smhz_%s_performance' % (
-                '%s_' % country_label if country_label else '', channel,
-                test_channel_bandwidth,
-                test_security if test_security else 'open')
-            test_list.append({
-                'test_name': sub_test_name,
-                'channel': int(channel),
-                'channel_bandwidth': int(test_channel_bandwidth),
-                'security': test_security,
-                'min_tx_throughput': min_tx_throughput,
-                'min_rx_throughput': min_rx_throughput
-            })
-        self.run_generated_testcases(self.get_channel_performance,
-                                     settings=test_list,
-                                     name_func=get_test_name)
-        self.log.info('Channel tests completed.')
-        self.write_graph()
-        self.verify_standard_deviation(max_std_dev)
-
-    def get_channel_performance(self, settings):
-        """Run a single channel performance test and logs results to csv file
-        and throughput data. Run with generated sub test cases in
-        run_channel_performance_tests.
-
-        1. Sets up network with test settings
-        2. Associates DUT
-        3. Runs traffic between DUT and iperf server (both directions)
-        4. Logs channel, tx_throughput (Mb/s), and rx_throughput (Mb/s) to
-           log file and throughput data.
-        5. Checks throughput values against minimum throughput thresholds.
-
-        Args:
-            settings: see run_channel_performance_tests
-
-        Raises:
-            TestFailure, if throughput (either direction) is less than
-                the directions given minimum throughput threshold.
-        """
-        channel = settings['channel']
-        channel_bandwidth = settings['channel_bandwidth']
-        security = settings['security']
-        test_name = settings['test_name']
-        min_tx_throughput = settings['min_tx_throughput']
-        min_rx_throughput = settings['min_rx_throughput']
-        if security:
-            if security == hostapd_constants.WEP_STRING:
-                password = utils.rand_hex_str(WEP_HEX_STRING_LENGTH)
-            else:
-                password = utils.rand_ascii_str(
-                    hostapd_constants.MIN_WPA_PSK_LENGTH)
-            security_profile = Security(security_mode=security,
-                                        password=password)
-            target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                security)
-        else:
-            password = None
-            security_profile = None
-            target_security = None
-        ssid = self.setup_ap(channel, channel_bandwidth, security_profile)
-        associated = self.dut.associate(ssid,
-                                        target_pwd=password,
-                                        target_security=target_security)
-        if not associated:
-            if self.iperf_server:
-                self.log_to_file_and_throughput_data(channel,
-                                                     channel_bandwidth, None,
-                                                     None)
-            asserts.fail('Device failed to associate with network %s' % ssid)
-        self.log.info('DUT (%s) connected to network %s.' %
-                      (self.dut.device.ip, ssid))
-        if self.iperf_server:
-            self.iperf_server.renew_test_interface_ip_address()
-            self.log.info(
-                'Getting ip address for iperf server. Will retry for %s seconds.'
-                % self.time_to_wait_for_ip_addr)
-            iperf_server_address = self.get_and_verify_iperf_address(
-                channel, self.iperf_server)
-            self.log.info(
-                'Getting ip address for DUT. Will retry for %s seconds.' %
-                self.time_to_wait_for_ip_addr)
-            iperf_client_address = self.get_and_verify_iperf_address(
-                channel, self.dut.device, self.iperf_client.test_interface)
-            tx_throughput = self.get_iperf_throughput(iperf_server_address,
-                                                      iperf_client_address)
-            rx_throughput = self.get_iperf_throughput(iperf_server_address,
-                                                      iperf_client_address,
-                                                      reverse=True)
-            self.log_to_file_and_throughput_data(channel, channel_bandwidth,
-                                                 tx_throughput, rx_throughput)
-            self.log.info('Throughput (tx, rx): (%s Mb/s, %s Mb/s), '
-                          'Minimum threshold (tx, rx): (%s Mb/s, %s Mb/s)' %
-                          (tx_throughput, rx_throughput, min_tx_throughput,
-                           min_rx_throughput))
-            base_message = (
-                'Actual throughput (on channel: %s, channel bandwidth: '
-                '%s, security: %s)' % (channel, channel_bandwidth, security))
-            if (not tx_throughput or not rx_throughput
-                    or tx_throughput < min_tx_throughput
-                    or rx_throughput < min_rx_throughput):
-                asserts.fail('%s below the minimum threshold.' % base_message)
-            asserts.explicit_pass('%s above the minimum threshold.' %
-                                  base_message)
-        else:
-            asserts.explicit_pass(
-                'Association test pass. No throughput measurement taken.')
-
-    def verify_regulatory_compliance(self, settings):
-        """Test function for regulatory compliance tests. Verify device complies
-        with provided regulatory requirements.
-
-        Args:
-            settings: dict, containing the following test settings
-                test_channels: dict, mapping channels to a set of the channel
-                    bandwidths to test (see example for using JSON). Defaults
-                    to hostapd_constants.ALL_CHANNELS.
-                country_code: string, two-char country code to set on device
-                    (prioritized over country_name)
-                country_name: string, country name from hostapd_constants to set
-                    on device.
-                base_test_name (optional): string, test name prefix to use with
-                    generatedsubtests.
-                test_name: string, the test name for this
-                    parent test case from the config file. In explicit tests,
-                    this is not necessary.
-        """
-        country_name = settings.get('country_name')
-        country_code = settings.get('country_code')
-        if not (country_code or country_name):
-            raise ValueError('No country code or name provided.')
-
-        test_channels = settings.get('test_channels',
-                                     hostapd_constants.ALL_CHANNELS)
-        allowed_channels = settings['allowed_channels']
-
-        base_test_name = settings.get('base_test_name', 'test_compliance')
-
-        if country_code:
-            code = country_code
-        else:
-            code = hostapd_constants.COUNTRY_CODE[country_name]['country_code']
-
-        self.set_dut_country_code(code)
-
-        test_list = []
-        for channel in test_channels:
-            for channel_bandwidth in test_channels[channel]:
-                sub_test_name = '%s_channel_%s_%smhz' % (
-                    base_test_name, channel, channel_bandwidth)
-                should_associate = (channel in allowed_channels
-                                    and channel_bandwidth
-                                    in allowed_channels[channel])
-                # Note: these int conversions because when these tests are
-                # imported via JSON, they may be strings since the channels
-                # will be keys. This makes the json/list test_channels param
-                # behave exactly like the in code dict/set test_channels.
-                test_list.append({
-                    'country_code': code,
-                    'channel': int(channel),
-                    'channel_bandwidth': int(channel_bandwidth),
-                    'should_associate': should_associate,
-                    'test_name': sub_test_name
-                })
-        self.run_generated_testcases(test_func=self.verify_channel_compliance,
-                                     settings=test_list,
-                                     name_func=get_test_name)
-
-    def verify_channel_compliance(self, settings):
-        """Verify device complies with provided regulatory requirements for a
-        specific channel and channel bandwidth. Run with generated test cases
-        in the verify_regulatory_compliance parent test.
-_
-        Args:
-            settings: see verify_regulatory_compliance`
-        """
-        channel = settings['channel']
-        channel_bandwidth = settings['channel_bandwidth']
-        code = settings['country_code']
-        should_associate = settings['should_associate']
-
-        ssid = self.setup_ap(channel, channel_bandwidth)
-
-        self.log.info(
-            'Attempting to associate with network (%s) on channel %s @ %smhz. '
-            'Expected behavior: %s' %
-            (ssid, channel, channel_bandwidth, 'Device should associate'
-             if should_associate else 'Device should NOT associate.'))
-
-        associated = self.dut.associate(ssid)
-
-        regulatory_result_marker = "REGTRACKER: %s,%s,%s,%s,%s" % (
-            code, channel, '2.4' if channel < 36 else '5', channel_bandwidth,
-            'c' if associated else 'nc')
-        self.regulatory_results += regulatory_result_marker + "\n"
-        self.log.info(regulatory_result_marker)
-
-        if associated == should_associate:
-            asserts.explicit_pass(
-                'Device complied with %s regulatory requirement for channel %s '
-                ' with channel bandwidth %smhz. %s' %
-                (code, channel, channel_bandwidth,
-                 'Associated.' if associated else 'Refused to associate.'))
-        else:
-            asserts.fail(
-                'Device failed compliance with regulatory domain %s for '
-                'channel %s with channel bandwidth %smhz. Expected: %s, Got: %s'
-                % (code, channel, channel_bandwidth, 'Should associate'
-                   if should_associate else 'Should not associate',
-                   'Associated' if associated else 'Did not associate'))
-
-    # Helper functions to allow explicit tests throughput and standard deviation
-    # thresholds to be passed in via config.
-    def _get_min_tx_throughput(self, test_name):
-        return self.user_params.get('channel_sweep_test_params',
-                                    {}).get(test_name,
-                                            {}).get('min_tx_throughput',
-                                                    DEFAULT_MIN_THROUGHPUT)
-
-    def _get_min_rx_throughput(self, test_name):
-        return self.user_params.get('channel_sweep_test_params',
-                                    {}).get(test_name,
-                                            {}).get('min_rx_throughput',
-                                                    DEFAULT_MIN_THROUGHPUT)
-
-    def _get_max_std_dev(self, test_name):
-        return self.user_params.get('channel_sweep_test_params',
-                                    {}).get(test_name,
-                                            {}).get('min_std_dev',
-                                                    DEFAULT_MAX_STD_DEV)
-
-    # Channel Performance of US Channels: 570 Test Cases
-    # 36 Test Cases
-    def test_us_20mhz_open_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 35 Test Cases
-    def test_us_40mhz_open_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 24 Test Cases
-    def test_us_80mhz_open_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 36 Test Cases
-    def test_us_20mhz_wep_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WEP_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 35 Test Cases
-    def test_us_40mhz_wep_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WEP_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 24 Test Cases
-    def test_us_80mhz_wep_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WEP_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 36 Test Cases
-    def test_us_20mhz_wpa_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WPA_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 35 Test Cases
-    def test_us_40mhz_wpa_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WPA_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 24 Test Cases
-    def test_us_80mhz_wpa_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WPA_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 36 Test Cases
-    def test_us_20mhz_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WPA2_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 35 Test Cases
-    def test_us_40mhz_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WPA2_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 24 Test Cases
-    def test_us_80mhz_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WPA2_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 36 Test Cases
-    def test_us_20mhz_wpa_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WPA_MIXED_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 35 Test Cases
-    def test_us_40mhz_wpa_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WPA_MIXED_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 24 Test Cases
-    def test_us_80mhz_wpa_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WPA_MIXED_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 36 Test Cases
-    def test_us_20mhz_wpa3_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G,
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_20MHZ,
-                 test_security=hostapd_constants.WPA3_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 35 Test Cases
-    def test_us_40mhz_wpa3_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_2G +
-                 hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_40MHZ,
-                 test_security=hostapd_constants.WPA3_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    # 24 Test Cases
-    def test_us_80mhz_wpa3_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                 test_channel_bandwidth=hostapd_constants.
-                 CHANNEL_BANDWIDTH_80MHZ,
-                 test_security=hostapd_constants.WPA3_STRING,
-                 base_test_name=self.test_name,
-                 min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                 min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                 max_std_dev=self._get_max_std_dev(self.test_name)))
-
-    def test_channel_performance_debug(self):
-        """Run channel performance test cases from the ACTS config file.
-
-        Example:
-        "channel_sweep_test_params": {
-            "debug_channel_performance_tests": [
-                {
-                    "test_name": "test_123_20mhz_wpa2_performance"
-                    "test_channels": [1, 2, 3],
-                    "test_channel_bandwidth": 20,
-                    "test_security": "wpa2",
-                    "base_test_name": "test_123_perf",
-                    "min_tx_throughput": 1.1,
-                    "min_rx_throughput": 3,
-                    "max_std_dev": 0.5
-                },
-                ...
-            ]
-        }
-
-        """
-        asserts.skip_if(
-            'debug_channel_performance_tests'
-            not in self.user_params.get('channel_sweep_test_params', {}),
-            'No custom channel performance tests provided in config.')
-        base_tests = self.user_params['channel_sweep_test_params'][
-            'debug_channel_performance_tests']
-        self.run_generated_testcases(self.run_channel_performance_tests,
-                                     settings=base_tests,
-                                     name_func=get_test_name)
-
-    def test_regulatory_compliance(self):
-        """Run regulatory compliance test case from the ACTS config file.
-        Note: only one country_name OR country_code is required.
-
-        Example:
-        "channel_sweep_test_params": {
-            "regulatory_compliance_tests": [
-                {
-                    "test_name": "test_japan_compliance_1_13_36"
-                    "country_name": "JAPAN",
-                    "country_code": "JP",
-                    "test_channels": {
-                        "1": [20, 40], "13": [40], "36": [20, 40, 80]
-                    },
-                    "allowed_channels": {
-                        "1": [20, 40], "36": [20, 40, 80]
-                    },
-                    "base_test_name": "test_japan"
-                },
-                ...
-            ]
-        }
-        """
-        asserts.skip_if(
-            'regulatory_compliance_tests'
-            not in self.user_params.get('channel_sweep_test_params', {}),
-            'No custom regulatory compliance tests provided in config.')
-        base_tests = self.user_params['channel_sweep_test_params'][
-            'regulatory_compliance_tests']
-        self.run_generated_testcases(self.verify_regulatory_compliance,
-                                     settings=base_tests,
-                                     name_func=get_test_name)
diff --git a/src/antlion/tests/wlan/performance/WlanRvrTest.py b/src/antlion/tests/wlan/performance/WlanRvrTest.py
deleted file mode 100644
index 0c11c4b..0000000
--- a/src/antlion/tests/wlan/performance/WlanRvrTest.py
+++ /dev/null
@@ -1,992 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import os
-import time
-import logging
-
-from antlion import asserts, context
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.radvd import Radvd
-from antlion.controllers.ap_lib.radvd_config import RadvdConfig
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.attenuator import get_attenuators_for_device
-from antlion.controllers.iperf_server import IPerfResult
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.utils import rand_ascii_str
-
-AP_11ABG_PROFILE_NAME = 'whirlwind_11ag_legacy'
-REPORTING_SPEED_UNITS = 'Mbps'
-
-RVR_GRAPH_SUMMARY_FILE = 'rvr_summary.html'
-
-DAD_TIMEOUT_SEC = 30
-
-
-def create_rvr_graph(test_name, graph_path, graph_data):
-    """Creates the RvR graphs
-    Args:
-        test_name: The name of test that was run.  This is the title of the
-            graph
-        graph_path: Where to put the graph html file.
-        graph_data: A dictionary of the data to be graphed.
-    Returns:
-        A list of bokeh graph objects.
-        """
-    try:
-        from bokeh.plotting import ColumnDataSource
-        from bokeh.plotting import figure
-        from bokeh.plotting import output_file
-        from bokeh.plotting import save
-    except ImportError as e:
-        logging.warn("bokeh is not installed: skipping creation of graphs. "
-                     "Note CSV files are still available. If graphs are "
-                     "desired, install antlion with the \"bokeh\" feature.")
-        return []
-
-    output_file('%srvr_throughput_vs_attn_%s.html' % (graph_path, test_name),
-                title=test_name)
-    throughput_vs_attn_data = ColumnDataSource(data=dict(
-        relative_attn=graph_data['throughput_vs_attn']['relative_attn'],
-        throughput=graph_data['throughput_vs_attn']['throughput']))
-    TOOLTIPS = [("Attenuation", "@relative_attn"),
-                ("Throughput", "@throughput")]
-    throughput_vs_attn_graph = figure(
-        title="Throughput vs Relative Attenuation (Test Case: %s)" % test_name,
-        x_axis_label=graph_data['throughput_vs_attn']['x_label'],
-        y_axis_label=graph_data['throughput_vs_attn']['y_label'],
-        x_range=graph_data['throughput_vs_attn']['relative_attn'],
-        tooltips=TOOLTIPS)
-    throughput_vs_attn_graph.sizing_mode = 'stretch_width'
-    throughput_vs_attn_graph.title.align = 'center'
-    throughput_vs_attn_graph.line('relative_attn',
-                                  'throughput',
-                                  source=throughput_vs_attn_data,
-                                  line_width=2)
-    throughput_vs_attn_graph.circle('relative_attn',
-                                    'throughput',
-                                    source=throughput_vs_attn_data,
-                                    size=10)
-    save([throughput_vs_attn_graph])
-    return [throughput_vs_attn_graph]
-
-
-def write_csv_rvr_data(test_name, csv_path, csv_data):
-    """Writes the CSV data for the RvR test
-    Args:
-        test_name: The name of test that was run.
-        csv_path: Where to put the csv file.
-        csv_data: A dictionary of the data to be put in the csv file.
-    """
-    csv_file_name = '%srvr_throughput_vs_attn_%s.csv' % (csv_path, test_name)
-    throughput = csv_data['throughput_vs_attn']['throughput']
-    relative_attn = csv_data['throughput_vs_attn']['relative_attn']
-    with open(csv_file_name, 'w+') as csv_fileId:
-        csv_fileId.write('%s,%s\n' %
-                         (csv_data['throughput_vs_attn']['x_label'],
-                          csv_data['throughput_vs_attn']['y_label']))
-        for csv_loop_counter in range(0, len(relative_attn)):
-            csv_fileId.write('%s,%s\n' % (int(relative_attn[csv_loop_counter]),
-                                          throughput[csv_loop_counter]))
-
-
-class WlanRvrTest(WifiBaseTest):
-    """Tests running WLAN RvR.
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    * One attenuator
-    * One Linux iPerf Server
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-        self.rvr_graph_summary = []
-
-    def setup_class(self):
-        super().setup_class()
-        if 'dut' in self.user_params:
-            if self.user_params['dut'] == 'fuchsia_devices':
-                self.dut = create_wlan_device(self.fuchsia_devices[0])
-            elif self.user_params['dut'] == 'android_devices':
-                self.dut = create_wlan_device(self.android_devices[0])
-            else:
-                raise ValueError('Invalid DUT specified in config. (%s)' %
-                                 self.user_params['dut'])
-        else:
-            # Default is an android device, just like the other tests
-            self.dut = create_wlan_device(self.android_devices[0])
-
-        self.starting_attn = (self.user_params['rvr_settings'].get(
-            'starting_attn', 0))
-
-        self.ending_attn = (self.user_params['rvr_settings'].get(
-            'ending_attn', 95))
-
-        self.step_size_in_db = (self.user_params['rvr_settings'].get(
-            'step_size_in_db', 1))
-
-        self.dwell_time_in_secs = (self.user_params['rvr_settings'].get(
-            'dwell_time_in_secs', 10))
-
-        self.reverse_rvr_after_forward = bool(
-            (self.user_params['rvr_settings'].get('reverse_rvr_after_forward',
-                                                  None)))
-
-        self.iperf_flags = (self.user_params['rvr_settings'].get(
-            'iperf_flags', '-i 1'))
-
-        self.iperf_flags = '%s -t %s -J' % (self.iperf_flags,
-                                            self.dwell_time_in_secs)
-
-        self.debug_loop_count = (self.user_params['rvr_settings'].get(
-            'debug_loop_count', 1))
-
-        self.debug_pre_traffic_cmd = (self.user_params['rvr_settings'].get(
-            'debug_pre_traffic_cmd', None))
-
-        self.debug_post_traffic_cmd = (self.user_params['rvr_settings'].get(
-            'debug_post_traffic_cmd', None))
-
-        self.router_adv_daemon = None
-
-        if self.ending_attn == 'auto':
-            self.use_auto_end = True
-            self.ending_attn = 100
-            if self.step_size_in_db > 2:
-                asserts.fail('When using an ending attenuation of \'auto\' '
-                             'please use a value < 2db.  Larger jumps will '
-                             'break the test reporting.')
-
-        self.access_point = self.access_points[0]
-        self.attenuators_2g = get_attenuators_for_device(
-            self.controller_configs['AccessPoint'][0]['Attenuator'],
-            self.attenuators, 'attenuator_ports_wifi_2g')
-        self.attenuators_5g = get_attenuators_for_device(
-            self.controller_configs['AccessPoint'][0]['Attenuator'],
-            self.attenuators, 'attenuator_ports_wifi_5g')
-
-        self.iperf_server = self.iperf_servers[0]
-
-        if hasattr(self, "iperf_clients") and self.iperf_clients:
-            self.dut_iperf_client = self.iperf_clients[0]
-        else:
-            self.dut_iperf_client = self.dut.create_iperf_client()
-
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if self.iperf_server:
-            self.iperf_server.start()
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        self.cleanup_tests()
-
-    def teardown_class(self):
-        if self.router_adv_daemon:
-            self.router_adv_daemon.stop()
-        try:
-            from bokeh.plotting import output_file
-            from bokeh.plotting import save
-
-            output_path = context.get_current_context().get_base_output_path()
-            test_class_name = context.get_current_context().test_class_name
-
-            output_file(f'{output_path}/{test_class_name}/rvr_summary.html',
-                        title='RvR Sumamry')
-            save(list(self.rvr_graph_summary))
-        except ImportError as e:
-            logging.warn("bokeh is not installed: skipping creation of graphs. "
-                         "Note CSV files are still available. If graphs are "
-                         "desired, install antlion with the \"bokeh\" feature.")
-        except Exception as e:
-            self.log.error(f'Unable to generate RvR summary file: {e}')
-
-        super().teardown_class()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.cleanup_tests()
-
-    def cleanup_tests(self):
-        """Cleans up all the dangling pieces of the tests, for example, the
-        iperf server, radvd, all the currently running APs, and the various
-        clients running during the tests.
-        """
-
-        if self.router_adv_daemon:
-            output_path = context.get_current_context().get_base_output_path()
-            full_output_path = os.path.join(output_path, "radvd_log.txt")
-            radvd_log_file = open(full_output_path, 'w')
-            radvd_log_file.write(self.router_adv_daemon.pull_logs())
-            radvd_log_file.close()
-            self.router_adv_daemon.stop()
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        if self.iperf_server:
-            self.iperf_server.stop()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def _wait_for_ipv4_addrs(self):
-        """Wait for an IPv4 addresses to become available on the DUT and iperf
-        server.
-
-        Returns:
-           A string containing the private IPv4 address of the iperf server.
-
-        Raises:
-            TestFailure: If unable to acquire a IPv4 address.
-        """
-        ip_address_checker_counter = 0
-        ip_address_checker_max_attempts = 3
-        while ip_address_checker_counter < ip_address_checker_max_attempts:
-            self.iperf_server.renew_test_interface_ip_address()
-            iperf_server_ip_addresses = (
-                self.iperf_server.get_interface_ip_addresses(
-                    self.iperf_server.test_interface))
-            dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-                self.dut_iperf_client.test_interface)
-
-            self.log.info(
-                'IPerf server IP info: {}'.format(iperf_server_ip_addresses))
-            self.log.info('DUT IP info: {}'.format(dut_ip_addresses))
-
-            if not iperf_server_ip_addresses['ipv4_private']:
-                self.log.warn('Unable to get the iperf server IPv4 '
-                              'address. Retrying...')
-                ip_address_checker_counter += 1
-                time.sleep(1)
-                continue
-
-            if dut_ip_addresses['ipv4_private']:
-                return iperf_server_ip_addresses['ipv4_private'][0]
-
-            self.log.warn('Unable to get the DUT IPv4 address starting at '
-                          'attenuation "{}". Retrying...'.format(
-                              self.starting_attn))
-            ip_address_checker_counter += 1
-            time.sleep(1)
-
-        asserts.fail(
-            'IPv4 addresses are not available on both the DUT and iperf server.'
-        )
-
-    # TODO (b/258264565): Merge with fuchsia_device wait_for_ipv6_addr.
-    def _wait_for_dad(self, device, test_interface):
-        """Wait for Duplicate Address Detection to resolve so that an
-        private-local IPv6 address is available for test.
-
-        Args:
-            device: implementor of get_interface_ip_addresses
-            test_interface: name of interface that DAD is operating on
-
-        Returns:
-            A string containing the private-local IPv6 address of the device.
-
-        Raises:
-            TestFailure: If unable to acquire an IPv6 address.
-        """
-        now = time.time()
-        start = now
-        elapsed = now - start
-
-        while elapsed < DAD_TIMEOUT_SEC:
-            addrs = device.get_interface_ip_addresses(test_interface)
-            now = time.time()
-            elapsed = now - start
-            if addrs['ipv6_private_local']:
-                # DAD has completed
-                addr = addrs['ipv6_private_local'][0]
-                self.log.info('DAD resolved with "{}" after {}s'.format(
-                    addr, elapsed))
-                return addr
-            time.sleep(1)
-        else:
-            asserts.fail(
-                'Unable to acquire a private-local IPv6 address for testing '
-                'after {}s'.format(elapsed))
-
-    def run_rvr(self,
-                ssid,
-                security_mode=None,
-                password=None,
-                band='2g',
-                traffic_dir='tx',
-                ip_version=4):
-        """Setups and runs the RvR test
-
-        Args:
-            ssid: The SSID for the client to associate to.
-            password: Password for the network, if necessary.
-            band: 2g or 5g
-            traffic_dir: rx or tx, bi is not supported by iperf3
-            ip_version: 4 or 6
-
-        Returns:
-            The bokeh graph data.
-        """
-        throughput = []
-        relative_attn = []
-        if band == '2g':
-            rvr_attenuators = self.attenuators_2g
-        elif band == '5g':
-            rvr_attenuators = self.attenuators_5g
-        else:
-            raise ValueError('Invalid WLAN band specified: %s' % band)
-        if ip_version == 6:
-            self.router_adv_daemon = Radvd(
-                self.access_point.ssh,
-                self.access_point.interfaces.get_bridge_interface()[0])
-            radvd_config = RadvdConfig()
-            self.router_adv_daemon.start(radvd_config)
-
-        for _ in range(0, self.debug_loop_count):
-            for rvr_attenuator in rvr_attenuators:
-                rvr_attenuator.set_atten(self.starting_attn)
-
-            associate_counter = 0
-            associate_max_attempts = 3
-            while associate_counter < associate_max_attempts:
-                if self.dut.associate(
-                        ssid,
-                        target_pwd=password,
-                        target_security=hostapd_constants.
-                        SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                            security_mode),
-                        check_connectivity=False):
-                    break
-                else:
-                    associate_counter += 1
-            else:
-                asserts.fail('Unable to associate at starting '
-                             'attenuation: %s' % self.starting_attn)
-
-            if ip_version == 4:
-                iperf_server_ip_address = self._wait_for_ipv4_addrs()
-            elif ip_version == 6:
-                self.iperf_server.renew_test_interface_ip_address()
-                self.log.info('Waiting for iperf server to complete Duplicate '
-                              'Address Detection...')
-                iperf_server_ip_address = self._wait_for_dad(
-                    self.iperf_server, self.iperf_server.test_interface)
-
-                self.log.info('Waiting for DUT to complete Duplicate Address '
-                              'Detection for "{}"...'.format(
-                                  self.dut_iperf_client.test_interface))
-                _ = self._wait_for_dad(self.dut.device,
-                                       self.dut_iperf_client.test_interface)
-            else:
-                raise ValueError('Invalid IP version: {}'.format(ip_version))
-
-            throughput, relative_attn = (self.rvr_loop(
-                traffic_dir,
-                rvr_attenuators,
-                iperf_server_ip_address,
-                ip_version,
-                throughput=throughput,
-                relative_attn=relative_attn))
-            if self.reverse_rvr_after_forward:
-                throughput, relative_attn = self.rvr_loop(
-                    traffic_dir,
-                    rvr_attenuators,
-                    iperf_server_ip_address,
-                    ip_version,
-                    ssid=ssid,
-                    security_mode=security_mode,
-                    password=password,
-                    reverse=True,
-                    throughput=throughput,
-                    relative_attn=relative_attn)
-            self.dut.disconnect()
-
-        throughput_vs_attn = {
-            'throughput': throughput,
-            'relative_attn': relative_attn,
-            'x_label': 'Attenuation(db)',
-            'y_label': 'Throughput(%s)' % REPORTING_SPEED_UNITS
-        }
-        graph_data = {'throughput_vs_attn': throughput_vs_attn}
-        return graph_data
-
-    def rvr_loop(self,
-                 traffic_dir,
-                 rvr_attenuators,
-                 iperf_server_ip_address,
-                 ip_version,
-                 ssid=None,
-                 security_mode=None,
-                 password=None,
-                 reverse=False,
-                 throughput=None,
-                 relative_attn=None):
-        """The loop that goes through each attenuation level and runs the iperf
-        throughput pair.
-        Args:
-            traffic_dir: The traffic direction from the perspective of the DUT.
-            rvr_attenuators: A list of attenuators to set.
-            iperf_server_ip_address: The IP address of the iperf server.
-            ssid: The ssid of the wireless network that the should associated
-                to.
-            password: Password of the wireless network.
-            reverse: Whether to run RvR test starting from the highest
-                attenuation and going to the lowest.  This is run after the
-                normal low attenuation to high attenuation RvR test.
-            throughput: The list of throughput data for the test.
-            relative_attn: The list of attenuation data for the test.
-
-        Returns:
-            throughput: The list of throughput data for the test.
-            relative_attn: The list of attenuation data for the test.
-            """
-        iperf_flags = self.iperf_flags
-        if traffic_dir == 'rx':
-            iperf_flags = '%s -R' % self.iperf_flags
-        starting_attn = self.starting_attn
-        ending_attn = self.ending_attn
-        step_size_in_db = self.step_size_in_db
-        if reverse:
-            starting_attn = self.ending_attn
-            ending_attn = self.starting_attn
-            step_size_in_db = step_size_in_db * -1
-            self.dut.disconnect()
-        for step in range(starting_attn, ending_attn, step_size_in_db):
-            try:
-                for attenuator in rvr_attenuators:
-                    attenuator.set_atten(step)
-            except ValueError as e:
-                self.log.error(
-                    f'{step} is beyond the max or min of the testbed '
-                    f'attenuator\'s capability. Stopping. {e}')
-                break
-            self.log.info('Set relative attenuation to %s db' % step)
-
-            associated = self.dut.is_connected()
-            if associated:
-                self.log.info('DUT is currently associated.')
-            else:
-                self.log.info('DUT is not currently associated.')
-
-            if reverse:
-                if not associated:
-                    self.log.info('Trying to associate at relative '
-                                  'attenuation of %s db' % step)
-                    if self.dut.associate(
-                            ssid,
-                            target_pwd=password,
-                            target_security=hostapd_constants.
-                            SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                                security_mode),
-                            check_connectivity=False):
-                        associated = True
-                        self.log.info('Successfully associated.')
-                    else:
-                        associated = False
-                        self.log.info(
-                            'Association failed. Marking a 0 %s for'
-                            ' throughput. Skipping running traffic.' %
-                            REPORTING_SPEED_UNITS)
-            attn_value_inserted = False
-            value_to_insert = str(step)
-            while not attn_value_inserted:
-                if value_to_insert in relative_attn:
-                    value_to_insert = '%s ' % value_to_insert
-                else:
-                    relative_attn.append(value_to_insert)
-                    attn_value_inserted = True
-
-            dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-                self.dut_iperf_client.test_interface)
-            if ip_version == 4:
-                if not dut_ip_addresses['ipv4_private']:
-                    self.log.info('DUT does not have an IPv4 address. '
-                                  'Traffic attempt to be run if the server '
-                                  'is pingable.')
-                else:
-                    self.log.info('DUT has the following IPv4 address: "%s"' %
-                                  dut_ip_addresses['ipv4_private'][0])
-            elif ip_version == 6:
-                if not dut_ip_addresses['ipv6_private_local']:
-                    self.log.info('DUT does not have an IPv6 address. '
-                                  'Traffic attempt to be run if the server '
-                                  'is pingable.')
-                else:
-                    self.log.info('DUT has the following IPv6 address: "%s"' %
-                                  dut_ip_addresses['ipv6_private_local'][0])
-            server_pingable = self.dut.can_ping(iperf_server_ip_address)
-            if not server_pingable:
-                self.log.info('Iperf server "%s" is not pingable. Marking '
-                              'a 0 %s for throughput. Skipping running '
-                              'traffic.' %
-                              (iperf_server_ip_address, REPORTING_SPEED_UNITS))
-            else:
-                self.log.info('Iperf server "%s" is pingable.' %
-                              iperf_server_ip_address)
-            if self.debug_pre_traffic_cmd:
-                self.log.info('\nDEBUG: Sending command \'%s\' to DUT' %
-                              self.debug_pre_traffic_cmd)
-                self.log.info(
-                    '\n%s' % self.dut.send_command(self.debug_pre_traffic_cmd))
-            if server_pingable:
-                if traffic_dir == 'tx':
-                    self.log.info('Running traffic DUT to %s at relative '
-                                  'attenuation of %s' %
-                                  (iperf_server_ip_address, step))
-                elif traffic_dir == 'rx':
-                    self.log.info('Running traffic %s to DUT at relative '
-                                  'attenuation of %s' %
-                                  (iperf_server_ip_address, step))
-                else:
-                    raise ValueError('Invalid traffic direction')
-                try:
-                    iperf_tag = 'decreasing'
-                    if reverse:
-                        iperf_tag = 'increasing'
-                    iperf_results_file = self.dut_iperf_client.start(
-                        iperf_server_ip_address,
-                        iperf_flags,
-                        '%s_%s_%s' %
-                        (iperf_tag, traffic_dir, self.starting_attn),
-                        timeout=(self.dwell_time_in_secs * 2))
-                except TimeoutError as e:
-                    iperf_results_file = None
-                    self.log.error(
-                        f'Iperf traffic timed out. Marking 0 {REPORTING_SPEED_UNITS} for '
-                        f'throughput. {e}')
-
-                if not iperf_results_file:
-                    throughput.append(0)
-                else:
-                    try:
-                        iperf_results = IPerfResult(
-                            iperf_results_file,
-                            reporting_speed_units=REPORTING_SPEED_UNITS)
-                        if iperf_results.error:
-                            self.iperf_server.stop()
-                            self.iperf_server.start()
-                            self.log.error(
-                                f'Errors in iperf logs:\n{iperf_results.error}'
-                            )
-                        if not iperf_results.avg_send_rate:
-                            throughput.append(0)
-                        else:
-                            throughput.append(iperf_results.avg_send_rate)
-                    except ValueError as e:
-                        self.iperf_server.stop()
-                        self.iperf_server.start()
-                        self.log.error(
-                            f'No data in iPerf3 file. Marking 0 {REPORTING_SPEED_UNITS} '
-                            f'for throughput: {e}')
-                        throughput.append(0)
-                    except Exception as e:
-                        self.iperf_server.stop()
-                        self.iperf_server.start()
-                        self.log.error(
-                            f'Unknown exception. Marking 0 {REPORTING_SPEED_UNITS} for '
-                            f'throughput: {e}')
-                        self.log.error(e)
-                        throughput.append(0)
-
-                self.log.info(
-                    'Iperf traffic complete. %s traffic received at '
-                    '%s %s at relative attenuation of %s db' %
-                    (traffic_dir, throughput[-1], REPORTING_SPEED_UNITS,
-                     str(relative_attn[-1]).strip()))
-
-            else:
-                self.log.debug('DUT Associated: %s' % associated)
-                self.log.debug('%s pingable: %s' %
-                               (iperf_server_ip_address, server_pingable))
-                throughput.append(0)
-            if self.debug_post_traffic_cmd:
-                self.log.info('\nDEBUG: Sending command \'%s\' to DUT' %
-                              self.debug_post_traffic_cmd)
-                self.log.info(
-                    '\n%s' %
-                    self.dut.send_command(self.debug_post_traffic_cmd))
-        return throughput, relative_attn
-
-    def test_rvr_11ac_5g_80mhz_open_tx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='5g',
-                                  traffic_dir='tx',
-                                  ip_version=4)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11ac_5g_80mhz_open_rx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='5g',
-                                  traffic_dir='rx',
-                                  ip_version=4)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11ac_5g_80mhz_open_tx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='5g',
-                                  traffic_dir='tx',
-                                  ip_version=6)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11ac_5g_80mhz_open_rx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='5g',
-                                  traffic_dir='rx',
-                                  ip_version=6)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11ac_5g_80mhz_wpa2_tx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='5g',
-                                  traffic_dir='tx',
-                                  ip_version=4)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11ac_5g_80mhz_wpa2_rx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='5g',
-                                  traffic_dir='rx',
-                                  ip_version=4)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11ac_5g_80mhz_wpa2_tx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='5g',
-                                  traffic_dir='tx',
-                                  ip_version=6)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11ac_5g_80mhz_wpa2_rx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='5g',
-                                  traffic_dir='rx',
-                                  ip_version=6)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11n_2g_20mhz_open_tx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='2g',
-                                  traffic_dir='tx',
-                                  ip_version=4)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11n_2g_20mhz_open_rx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='2g',
-                                  traffic_dir='rx',
-                                  ip_version=4)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11n_2g_20mhz_open_tx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='2g',
-                                  traffic_dir='tx',
-                                  ip_version=6)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11n_2g_20mhz_open_rx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  band='2g',
-                                  traffic_dir='rx',
-                                  ip_version=6)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11n_2g_20mhz_wpa2_tx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='2g',
-                                  traffic_dir='tx',
-                                  ip_version=4)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11n_2g_20mhz_wpa2_rx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='2g',
-                                  traffic_dir='rx',
-                                  ip_version=4)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11n_2g_20mhz_wpa2_tx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='2g',
-                                  traffic_dir='tx',
-                                  ip_version=6)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
-
-    def test_rvr_11n_2g_20mhz_wpa2_rx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode='wpa2', password=password)
-        setup_ap(access_point=self.access_point,
-                 profile_name='whirlwind',
-                 channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                 ssid=ssid,
-                 security=security_profile,
-                 setup_bridge=True)
-        graph_data = self.run_rvr(ssid,
-                                  security_mode='wpa2',
-                                  password=password,
-                                  band='2g',
-                                  traffic_dir='rx',
-                                  ip_version=6)
-        for rvr_graph in create_rvr_graph(
-                self.test_name,
-                context.get_current_context().get_full_output_path(),
-                graph_data):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(), graph_data)
diff --git a/src/antlion/tests/wlan/performance/WlanWmmTest.py b/src/antlion/tests/wlan/performance/WlanWmmTest.py
deleted file mode 100644
index 2094232..0000000
--- a/src/antlion/tests/wlan/performance/WlanWmmTest.py
+++ /dev/null
@@ -1,848 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import operator
-import time
-
-from antlion import asserts
-from antlion import context
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.abstract_devices import wmm_transceiver
-from antlion.test_utils.fuchsia import wmm_test_cases
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-DEFAULT_N_CAPABILITIES_20_MHZ = [
-    hostapd_constants.N_CAPABILITY_LDPC, hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_TX_STBC,
-    hostapd_constants.N_CAPABILITY_RX_STBC1,
-    hostapd_constants.N_CAPABILITY_HT20
-]
-
-DEFAULT_AP_PARAMS = {
-    'profile_name': 'whirlwind',
-    'channel': hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-    'n_capabilities': DEFAULT_N_CAPABILITIES_20_MHZ,
-    'ac_capabilities': None
-}
-
-DEFAULT_BW_PERCENTAGE = 1
-DEFAULT_STREAM_TIMEOUT = 60
-DEFAULT_STREAM_TIME = 10
-
-OPERATORS = {
-    '>': operator.gt,
-    '>=': operator.ge,
-    '<': operator.lt,
-    '<=': operator.le,
-    '==': operator.eq
-}
-
-GRAPH_COLOR_LEN = 10
-GRAPH_DEFAULT_LINE_WIDTH = 2
-GRAPH_DEFAULT_CIRCLE_SIZE = 10
-
-
-def eval_operator(operator_string,
-                  actual_value,
-                  expected_value,
-                  max_bw,
-                  rel_tolerance=0,
-                  abs_tolerance=0,
-                  max_bw_rel_tolerance=0):
-    """
-    Determines if an inequality evaluates to True, given relative and absolute
-    tolerance.
-
-    Args:
-        operator_string: string, the operator to use for the comparison
-        actual_value: the value to compare to some expected value
-        expected_value: the value the actual value is compared to
-        rel_tolerance: decimal representing the percent tolerance, relative to
-            the expected value. E.g. (101 <= 100) w/ rel_tol=0.01 is True
-        abs_tolerance: the lowest actual (not percent) tolerance for error.
-            E.g. (101 == 100) w/ rel_tol=0.005 is False, but
-            (101 == 100) w/ rel_tol=0.005 and abs_tol=1 is True
-        max_bw_rel_tolerance: decimal representing the percent tolerance,
-            relative to the maximimum allowed bandwidth.
-            E.g. (101 <= max bw of 100) w/ max_bw_rel_tol=0.01 is True
-
-
-    Returns:
-        True, if inequality evaluates to True within tolerances
-        False, otherwise
-    """
-    op = OPERATORS[operator_string]
-    if op(actual_value, expected_value):
-        return True
-
-    error = abs(actual_value - expected_value)
-    accepted_error = max(expected_value * rel_tolerance, abs_tolerance,
-                         max_bw * max_bw_rel_tolerance)
-    return error <= accepted_error
-
-
-class WlanWmmTest(WifiBaseTest):
-    """Tests WMM QoS Functionality (Station only)
-
-    Testbed Requirements:
-    * One ACTS compatible wlan_device (staut)
-    * One Whirlwind Access Point
-    * For some tests, One additional ACTS compatible device (secondary_sta)
-
-    For accurate results, must be performed in an RF isolated environment.
-    """
-
-    def setup_class(self):
-        super().setup_class()
-
-        try:
-            self.wmm_test_params = self.user_params['wmm_test_params']
-            self._wmm_transceiver_configs = self.wmm_test_params[
-                'wmm_transceivers']
-        except KeyError:
-            raise AttributeError('Must provide at least 2 WmmTransceivers in '
-                                 '"wmm_test_params" field of ACTS config.')
-
-        if len(self._wmm_transceiver_configs) < 2:
-            raise AttributeError(
-                'At least 2 WmmTransceivers must be provided.')
-
-        self.android_devices = getattr(self, 'android_devices', [])
-        self.fuchsia_devices = getattr(self, 'fuchsia_devices', [])
-
-        self.wlan_devices = [
-            create_wlan_device(device)
-            for device in self.android_devices + self.fuchsia_devices
-        ]
-
-        # Create STAUT transceiver
-        if 'staut' not in self._wmm_transceiver_configs:
-            raise AttributeError(
-                'Must provide a WmmTransceiver labeled "staut" with a '
-                'wlan_device.')
-        self.staut = wmm_transceiver.create(
-            self._wmm_transceiver_configs['staut'],
-            identifier='staut',
-            wlan_devices=self.wlan_devices)
-
-        # Required to for automated power cycling
-        self.dut = self.staut.wlan_device
-
-        # Create AP transceiver
-        if 'access_point' not in self._wmm_transceiver_configs:
-            raise AttributeError(
-                'Must provide a WmmTransceiver labeled "access_point" with a '
-                'access_point.')
-        self.access_point_transceiver = wmm_transceiver.create(
-            self._wmm_transceiver_configs['access_point'],
-            identifier='access_point',
-            access_points=self.access_points)
-
-        self.wmm_transceivers = [self.staut, self.access_point_transceiver]
-
-        # Create secondary station transceiver, if present
-        if 'secondary_sta' in self._wmm_transceiver_configs:
-            self.secondary_sta = wmm_transceiver.create(
-                self._wmm_transceiver_configs['secondary_sta'],
-                identifier='secondary_sta',
-                wlan_devices=self.wlan_devices)
-            self.wmm_transceivers.append(self.secondary_sta)
-        else:
-            self.secondary_sta = None
-
-        self.wmm_transceiver_map = {
-            tc.identifier: tc
-            for tc in self.wmm_transceivers
-        }
-
-    def setup_test(self):
-        for tc in self.wmm_transceivers:
-            if tc.wlan_device:
-                tc.wlan_device.wifi_toggle_state(True)
-                tc.wlan_device.disconnect()
-            if tc.access_point:
-                tc.access_point.stop_all_aps()
-
-    def teardown_test(self):
-        for tc in self.wmm_transceivers:
-            tc.cleanup_asynchronous_streams()
-            if tc.wlan_device:
-                tc.wlan_device.disconnect()
-                tc.wlan_device.reset_wifi()
-            if tc.access_point:
-                self.download_ap_logs()
-                tc.access_point.stop_all_aps()
-
-    def teardown_class(self):
-        for tc in self.wmm_transceivers:
-            tc.destroy_resources()
-        super().teardown_class()
-
-    def on_fail(self, test_name, begin_time):
-        for wlan_device in self.wlan_devices:
-            super().on_device_fail(wlan_device.device, test_name, begin_time)
-
-    def start_ap_with_wmm_params(self, ap_parameters, wmm_parameters):
-        """Sets up WMM network on AP.
-
-        Args:
-            ap_parameters: a dictionary of kwargs to set up on ap
-            wmm_parameters: a dictionary of wmm_params to set up on ap
-
-        Returns:
-            String, subnet of the network setup (e.g. '192.168.1.0/24')
-        """
-        # Defaults for required parameters
-        ap_parameters['force_wmm'] = True
-        if 'ssid' not in ap_parameters:
-            ap_parameters['ssid'] = utils.rand_ascii_str(
-                hostapd_constants.AP_SSID_LENGTH_2G)
-
-        if 'profile_name' not in ap_parameters:
-            ap_parameters['profile_name'] = 'whirlwind'
-
-        if 'channel' not in ap_parameters:
-            ap_parameters['channel'] = 6
-
-        if 'n_capabilities' not in ap_parameters:
-            ap_parameters['n_capabilities'] = DEFAULT_N_CAPABILITIES_20_MHZ
-
-        if 'additional_ap_parameters' in ap_parameters:
-            ap_parameters['additional_ap_parameters'].update(wmm_parameters)
-        else:
-            ap_parameters['additional_ap_parameters'] = wmm_parameters
-
-        # Optional security
-        security_config = ap_parameters.get('security_config', None)
-        if security_config:
-            ap_parameters['security'] = hostapd_security.Security(
-                **security_config)
-            ap_parameters.pop('security_config')
-
-        # Start AP with kwargs
-        self.log.info('Setting up WMM network: %s' % ap_parameters['ssid'])
-        setup_ap(self.access_point_transceiver.access_point, **ap_parameters)
-        self.log.info('Network (%s) is up.' % ap_parameters['ssid'])
-
-        # Return subnet
-        if ap_parameters['channel'] < hostapd_constants.LOWEST_5G_CHANNEL:
-            return self.access_point_transceiver.access_point._AP_2G_SUBNET_STR
-        else:
-            return self.access_point_transceiver.access_point._AP_5G_SUBNET_STR
-
-    def associate_transceiver(self, wmm_transceiver, ap_params):
-        """Associates a WmmTransceiver that has a wlan_device.
-
-        Args:
-            wmm_transceiver: transceiver to associate
-            ap_params: dict, contains ssid and password, if any, for network
-        """
-        if not wmm_transceiver.wlan_device:
-            raise AttributeError(
-                'Cannot associate a WmmTransceiver that does not have a '
-                'WlanDevice.')
-        ssid = ap_params['ssid']
-        password = None
-        target_security = None
-        security = ap_params.get('security')
-        if security:
-            password = security.password
-            target_security = hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                security.security_mode_string)
-        associated = wmm_transceiver.wlan_device.associate(
-            target_ssid=ssid,
-            target_pwd=password,
-            target_security=target_security)
-        if not associated:
-            raise ConnectionError('Failed to associate WmmTransceiver %s.' %
-                                  wmm_transceiver.identifier)
-        self.log.info('WmmTransceiver %s associated.' %
-                      wmm_transceiver.identifier)
-
-    def validate_streams_in_phase(self, phase_id, phases, max_bw):
-        """Validates any stream in a phase that has validation criteria.
-
-        Args:
-            phase_id: identifier of the phase to check
-            phases: dictionary containing phases for retrieving stream
-                transmitters, expected bandwidths, etc.
-            max_bw: the max link bandwidth, measured in the test
-
-        Returns:
-            True, if ALL validation criteria for ALL streams in phase pass
-            False, otherwise
-        """
-        pass_val = True
-        for stream_id, stream in phases[phase_id].items():
-            if 'validation' in stream:
-                transmitter = stream['transmitter']
-                uuid = stream['uuid']
-                actual_bw = transmitter.get_results(uuid).avg_rate
-                if not actual_bw:
-                    raise ConnectionError(
-                        '(Phase: %s, Stream: %s) - Stream results show '
-                        'bandwidth: None' % (phase_id, stream_id))
-                for check in stream['validation']:
-                    operator_str = check['operator']
-                    rel_tolerance = check.get('rel_tolerance', 0)
-                    abs_tolerance = check.get('abs_tolerance', 0)
-                    max_bw_rel_tolerance = check.get('max_bw_rel_tolerance', 0)
-                    expected_bw_percentage = check.get('bandwidth_percentage',
-                                                       DEFAULT_BW_PERCENTAGE)
-                    # Explicit Bandwidth Validation
-                    if 'bandwidth' in check:
-                        comp_bw = check['bandwidth']
-                        log_msg = (
-                            'Expected Bandwidth: %s (explicit validation '
-                            'bandwidth [%s] x expected bandwidth '
-                            'percentage [%s])' %
-                            (expected_bw_percentage * comp_bw, comp_bw,
-                             expected_bw_percentage))
-
-                    # Stream Comparison Validation
-                    elif 'phase' in check and 'stream' in check:
-                        comp_phase_id = check['phase']
-                        comp_stream_id = check['stream']
-                        comp_stream = phases[comp_phase_id][comp_stream_id]
-                        comp_transmitter = comp_stream['transmitter']
-                        comp_uuid = comp_stream['uuid']
-                        comp_bw = comp_transmitter.get_results(
-                            comp_uuid).avg_rate
-                        log_msg = (
-                            'Expected Bandwidth: %s (bandwidth for phase: %s, '
-                            'stream: %s [%s] x expected bandwidth percentage '
-                            '[%s])' %
-                            (expected_bw_percentage * comp_bw, comp_phase_id,
-                             comp_stream_id, comp_bw, expected_bw_percentage))
-
-                    # Expected Bandwidth Validation
-                    else:
-                        if 'bandwidth' in stream:
-                            comp_bw = stream['bandwidth']
-                            log_msg = (
-                                'Expected Bandwidth: %s (expected stream '
-                                'bandwidth [%s] x expected bandwidth '
-                                'percentage [%s])' %
-                                (expected_bw_percentage * comp_bw, comp_bw,
-                                 expected_bw_percentage))
-                        else:
-                            max_bw_percentage = stream.get(
-                                'max_bandwidth_percentage',
-                                DEFAULT_BW_PERCENTAGE)
-                            comp_bw = max_bw * max_bw_percentage
-                            log_msg = (
-                                'Expected Bandwidth: %s (max bandwidth [%s] x '
-                                'stream bandwidth percentage [%s] x expected '
-                                'bandwidth percentage [%s])' %
-                                (expected_bw_percentage * comp_bw, max_bw,
-                                 max_bw_percentage, expected_bw_percentage))
-
-                    self.log.info(
-                        'Validation criteria - Stream: %s, '
-                        'Actual Bandwidth: %s, Operator: %s, %s, '
-                        'Relative Tolerance: %s, Absolute Tolerance: %s, Max '
-                        'Bandwidth Relative Tolerance: %s' %
-                        (stream_id, actual_bw, operator_str, log_msg,
-                         rel_tolerance, abs_tolerance, max_bw_rel_tolerance))
-
-                    if eval_operator(
-                            operator_str,
-                            actual_bw,
-                            comp_bw * expected_bw_percentage,
-                            max_bw,
-                            rel_tolerance=rel_tolerance,
-                            abs_tolerance=abs_tolerance,
-                            max_bw_rel_tolerance=max_bw_rel_tolerance):
-                        self.log.info(
-                            '(Phase: %s, Stream: %s) - PASSES validation check!'
-                            % (phase_id, stream_id))
-                    else:
-                        self.log.info(
-                            '(Phase: %s, Stream: %s) - Stream FAILS validation '
-                            'check.' % (phase_id, stream_id))
-                        pass_val = False
-        if pass_val:
-            self.log.info(
-                '(Phase %s) - All streams\' validation criteria were met.' %
-                phase_id)
-            return True
-        else:
-            self.log.error(
-                '(Phase %s) - At least one stream validation criterion was not '
-                'met.' % phase_id)
-            return False
-
-    def graph_test(self, phases, max_bw):
-        """ Outputs a bokeh html graph of the streams. Saves to ACTS log
-        directory.
-
-        Args:
-            phases: dictionary containing phases for retrieving stream
-                transmitters, expected bandwidths, etc.
-            max_bw: the max link bandwidth, measured in the test
-
-        """
-
-        try:
-            from bokeh.palettes import Category10
-            from bokeh.plotting import ColumnDataSource, figure, output_file, save
-            from bokeh.models import Span, Label
-        except ImportError as e:
-            self.log.warn("bokeh is not installed: skipping creation of graphs. "
-                          "Note CSV files are still available. If graphs are "
-                          "desired, install antlion with the \"bokeh\" feature.")
-            return
-
-        output_path = context.get_current_context().get_base_output_path()
-        output_file_name = '%s/WlanWmmTest/%s.html' % (output_path,
-                                                       self.test_name)
-        output_file(output_file_name)
-
-        start_time = 0
-        graph_lines = []
-
-        # Used for scaling
-        highest_stream_bw = 0
-        lowest_stream_bw = 100000
-
-        for phase_id, phase in phases.items():
-            longest_stream_time = 0
-            for stream_id, stream in phase.items():
-                transmitter = stream['transmitter']
-                uuid = stream['uuid']
-
-                if 'bandwidth' in stream:
-                    stream_bw = "{:.3f}".format(stream['bandwidth'])
-                    stream_bw_formula_str = '%sMb/s' % stream_bw
-                elif 'max_bandwidth_percentage' in stream:
-                    max_bw_percentage = stream['max_bandwidth_percentage']
-                    stream_bw = "{:.3f}".format(max_bw * max_bw_percentage)
-                    stream_bw_formula_str = '%sMb/s (%s%% of max bandwidth)' % (
-                        stream_bw, str(max_bw_percentage * 100))
-                else:
-                    raise AttributeError(
-                        'Stream %s must have either a bandwidth or '
-                        'max_bandwidth_percentage parameter.' % stream_id)
-
-                stream_time = stream.get('time', DEFAULT_STREAM_TIME)
-                longest_stream_time = max(longest_stream_time, stream_time)
-
-                avg_rate = transmitter.get_results(uuid).avg_rate
-
-                instantaneous_rates = transmitter.get_results(
-                    uuid).instantaneous_rates
-                highest_stream_bw = max(highest_stream_bw,
-                                        max(instantaneous_rates))
-                lowest_stream_bw = min(lowest_stream_bw,
-                                       min(instantaneous_rates))
-
-                stream_data = ColumnDataSource(
-                    dict(time=[
-                        x for x in range(start_time, start_time + stream_time)
-                    ],
-                        instantaneous_bws=instantaneous_rates,
-                        avg_bw=[avg_rate for _ in range(stream_time)],
-                        stream_id=[stream_id for _ in range(stream_time)],
-                        attempted_bw=[
-                        stream_bw_formula_str for _ in range(stream_time)
-                    ]))
-                line = {
-                    'x_axis': 'time',
-                    'y_axis': 'instantaneous_bws',
-                    'source': stream_data,
-                    'line_width': GRAPH_DEFAULT_LINE_WIDTH,
-                    'legend_label': '%s:%s' % (phase_id, stream_id)
-                }
-                graph_lines.append(line)
-
-            start_time = start_time + longest_stream_time
-        TOOLTIPS = [('Time', '@time'),
-                    ('Attempted Bandwidth', '@attempted_bw'),
-                    ('Instantaneous Bandwidth', '@instantaneous_bws'),
-                    ('Stream Average Bandwidth', '@avg_bw'),
-                    ('Stream', '@stream_id')]
-
-        # Create and scale graph appropriately
-        time_vs_bandwidth_graph = figure(
-            title=('Bandwidth for %s' % self.test_name),
-            x_axis_label='Time',
-            y_axis_label='Bandwidth',
-            tooltips=TOOLTIPS,
-            y_range=(lowest_stream_bw -
-                     (0.5 * (highest_stream_bw - lowest_stream_bw)),
-                     1.05 * max_bw))
-        time_vs_bandwidth_graph.sizing_mode = 'stretch_both'
-        time_vs_bandwidth_graph.title.align = 'center'
-        colors = Category10[GRAPH_COLOR_LEN]
-        color_ind = 0
-
-        # Draw max bandwidth line
-        max_bw_span = Span(location=max_bw,
-                           dimension='width',
-                           line_color='black',
-                           line_dash='dashed',
-                           line_width=GRAPH_DEFAULT_LINE_WIDTH)
-        max_bw_label = Label(x=(0.5 * start_time),
-                             y=max_bw,
-                             text=('Max Bandwidth: %sMb/s' % max_bw),
-                             text_align='center')
-        time_vs_bandwidth_graph.add_layout(max_bw_span)
-        time_vs_bandwidth_graph.add_layout(max_bw_label)
-
-        # Draw stream lines
-        for line in graph_lines:
-            time_vs_bandwidth_graph.line(line['x_axis'],
-                                         line['y_axis'],
-                                         source=line['source'],
-                                         line_width=line['line_width'],
-                                         legend_label=line['legend_label'],
-                                         color=colors[color_ind])
-            time_vs_bandwidth_graph.circle(line['x_axis'],
-                                           line['y_axis'],
-                                           source=line['source'],
-                                           size=GRAPH_DEFAULT_CIRCLE_SIZE,
-                                           legend_label=line['legend_label'],
-                                           color=colors[color_ind])
-            color_ind = (color_ind + 1) % GRAPH_COLOR_LEN
-        time_vs_bandwidth_graph.legend.location = "top_left"
-        time_vs_bandwidth_graph.legend.click_policy = "hide"
-        graph_file = save([time_vs_bandwidth_graph])
-        self.log.info('Saved graph to %s' % graph_file)
-
-    def run_wmm_test(self,
-                     phases,
-                     ap_parameters=DEFAULT_AP_PARAMS,
-                     wmm_parameters=hostapd_constants.
-                     WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-                     stream_timeout=DEFAULT_STREAM_TIMEOUT):
-        """Runs a WMM test case.
-
-        Args:
-            phases: dictionary of phases of streams to run in parallel,
-                including any validation critera (see example below).
-            ap_parameters: dictionary of custom kwargs to setup on AP (see
-                start_ap_with_wmm_parameters)
-            wmm_parameters: dictionary of WMM AC parameters
-            stream_timeout: int, time in seconds to wait before force joining
-                parallel streams
-
-        Asserts:
-            PASS, if all validation criteria for all phases are met
-            FAIL, otherwise
-        """
-        # Setup AP
-        subnet_str = self.start_ap_with_wmm_params(ap_parameters,
-                                                   wmm_parameters)
-        # Determine transmitters and receivers used in test case
-        transmitters = set()
-        receivers = set()
-        for phase in phases.values():
-            for stream in phase.values():
-                transmitter = self.wmm_transceiver_map[
-                    stream['transmitter_str']]
-                transmitters.add(transmitter)
-                stream['transmitter'] = transmitter
-                receiver = self.wmm_transceiver_map[stream['receiver_str']]
-                receivers.add(receiver)
-                stream['receiver'] = receiver
-        transceivers = transmitters.union(receivers)
-
-        # Associate all transceivers with wlan_devices
-        for tc in transceivers:
-            if tc.wlan_device:
-                self.associate_transceiver(tc, ap_parameters)
-
-        # Determine link max bandwidth
-        self.log.info('Determining link maximum bandwidth.')
-        uuid = self.staut.run_synchronous_traffic_stream(
-            {'receiver': self.access_point_transceiver}, subnet_str)
-        max_bw = self.staut.get_results(uuid).avg_send_rate
-        self.log.info('Link maximum bandwidth: %s Mb/s' % max_bw)
-
-        # Run parallel phases
-        pass_test = True
-        for phase_id, phase in phases.items():
-            self.log.info('Setting up phase: %s' % phase_id)
-
-            for stream_id, stream in phase.items():
-
-                transmitter = stream['transmitter']
-                receiver = stream['receiver']
-                access_category = stream.get('access_category', None)
-                stream_time = stream.get('time', DEFAULT_STREAM_TIME)
-
-                # Determine stream type
-                if 'bandwidth' in stream:
-                    bw = stream['bandwidth']
-                elif 'max_bandwidth_percentage' in stream:
-                    max_bw_percentage = stream['max_bandwidth_percentage']
-                    bw = max_bw * max_bw_percentage
-                else:
-                    raise AttributeError(
-                        'Stream %s must have either a bandwidth or '
-                        'max_bandwidth_percentage parameter.' % stream_id)
-
-                stream_params = {
-                    'receiver': receiver,
-                    'access_category': access_category,
-                    'bandwidth': bw,
-                    'time': stream_time
-                }
-
-                uuid = transmitter.prepare_asynchronous_stream(
-                    stream_params, subnet_str)
-                stream['uuid'] = uuid
-
-            # Start all streams in phase
-            start_time = time.time() + 5
-            for transmitter in transmitters:
-                transmitter.start_asynchronous_streams(start_time=start_time)
-
-            # Wait for streams to join
-            for transmitter in transmitters:
-                end_time = time.time() + stream_timeout
-                while transmitter.has_active_streams:
-                    if time.time() > end_time:
-                        raise ConnectionError(
-                            'Transmitter\'s (%s) active streams are not finishing.'
-                            % transmitter.identifier)
-                    time.sleep(1)
-
-            # Cleanup all streams
-            for transmitter in transmitters:
-                transmitter.cleanup_asynchronous_streams()
-
-            # Validate streams
-            pass_test = pass_test and self.validate_streams_in_phase(
-                phase_id, phases, max_bw)
-
-        self.graph_test(phases, max_bw)
-        if pass_test:
-            asserts.explicit_pass(
-                'Validation criteria met for all streams in all phases.')
-        else:
-            asserts.fail(
-                'At least one stream failed to meet validation criteria.')
-
-# Test Cases
-
-# Internal Traffic Differentiation
-
-    def test_internal_traffic_diff_VO_VI(self):
-        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_VI)
-
-    def test_internal_traffic_diff_VO_BE(self):
-        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_BE)
-
-    def test_internal_traffic_diff_VO_BK(self):
-        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_BK)
-
-    def test_internal_traffic_diff_VI_BE(self):
-        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VI_BE)
-
-    def test_internal_traffic_diff_VI_BK(self):
-        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VI_BK)
-
-    def test_internal_traffic_diff_BE_BK(self):
-        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_BE_BK)
-
-# External Traffic Differentiation
-
-    """Single station, STAUT transmits high priority"""
-
-    def test_external_traffic_diff_staut_VO_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VO_ap_VI)
-
-    def test_external_traffic_diff_staut_VO_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BE)
-
-    def test_external_traffic_diff_staut_VO_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BK)
-
-    def test_external_traffic_diff_staut_VI_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BE)
-
-    def test_external_traffic_diff_staut_VI_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BK)
-
-    def test_external_traffic_diff_staut_BE_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BE_ap_BK)
-
-    """Single station, STAUT transmits low priority"""
-
-    def test_external_traffic_diff_staut_VI_ap_VO(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_VI_ap_VO)
-
-    def test_external_traffic_diff_staut_BE_ap_VO(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VO)
-
-    def test_external_traffic_diff_staut_BK_ap_VO(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VO)
-
-    def test_external_traffic_diff_staut_BE_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VI)
-
-    def test_external_traffic_diff_staut_BK_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VI)
-
-    def test_external_traffic_diff_staut_BK_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_external_traffic_diff_staut_BK_ap_BE)
-
-# # Dual Internal/External Traffic Differentiation (Single station)
-
-    def test_dual_traffic_diff_staut_VO_VI_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VO_VI_ap_VI)
-
-    def test_dual_traffic_diff_staut_VO_BE_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VO_BE_ap_BE)
-
-    def test_dual_traffic_diff_staut_VO_BK_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VO_BK_ap_BK)
-
-    def test_dual_traffic_diff_staut_VI_BE_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VI_BE_ap_BE)
-
-    def test_dual_traffic_diff_staut_VI_BK_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_VI_BK_ap_BK)
-
-    def test_dual_traffic_diff_staut_BE_BK_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_dual_traffic_diff_staut_BE_BK_ap_BK)
-
-# ACM Bit Conformance Tests (Single station, as WFA test below uses two)
-
-    def test_acm_bit_on_VI(self):
-        wmm_params_VI_ACM = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        self.run_wmm_test(wmm_test_cases.test_acm_bit_on_VI,
-                          wmm_parameters=wmm_params_VI_ACM)
-
-# AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
-
-    def test_ac_param_degrade_VO(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_ac_param_degrade_VO,
-            wmm_parameters=hostapd_constants.WMM_DEGRADED_VO_PARAMS)
-
-    def test_ac_param_degrade_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_ac_param_degrade_VI,
-            wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS)
-
-    def test_ac_param_improve_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_ac_param_improve_BE,
-            wmm_parameters=hostapd_constants.WMM_IMPROVE_BE_PARAMS)
-
-    def test_ac_param_improve_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_ac_param_improve_BK,
-            wmm_parameters=hostapd_constants.WMM_IMPROVE_BK_PARAMS)
-
-
-# WFA Test Plan Tests
-
-    """Traffic Differentiation in Single BSS (Single Station)"""
-
-    def test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE)
-
-    def test_wfa_traffic_diff_single_station_staut_VI_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE)
-
-    def test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE(self):
-        self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE)
-
-    def test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK(self):
-        self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK)
-
-    def test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI(self):
-        self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI)
-
-    """Traffic Differentiation in Single BSS (Two Stations)"""
-
-    def test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
-        self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE)
-
-    def test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
-        self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE)
-
-    def test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
-        self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK)
-
-    def test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
-        self.run_wmm_test(
-            wmm_test_cases.
-            test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI)
-
-    """Test ACM Bit Conformance (Two Stations)"""
-
-    def test_wfa_acm_bit_on_VI(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
-        wmm_params_VI_ACM = utils.merge_dicts(
-            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
-            hostapd_constants.WMM_ACM_VI)
-        self.run_wmm_test(wmm_test_cases.test_wfa_acm_bit_on_VI,
-                          wmm_parameters=wmm_params_VI_ACM)
-
-    """Test the AC Parameter Modification"""
-
-    def test_wfa_ac_param_degrade_VI(self):
-        asserts.skip_if(not self.secondary_sta, 'No secondary station.')
-        self.run_wmm_test(
-            wmm_test_cases.test_wfa_ac_param_degrade_VI,
-            wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS)
diff --git a/src/antlion/tests/wlan_policy/HiddenNetworksTest.py b/src/antlion/tests/wlan_policy/HiddenNetworksTest.py
deleted file mode 100644
index aa2c7ba..0000000
--- a/src/antlion/tests/wlan_policy/HiddenNetworksTest.py
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion import signals
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.utils import rand_ascii_str
-
-# These tests should have a longer timeout for connecting than normal connect
-# tests because the device should probabilistically perform active scans for
-# hidden networks. Multiple scans are necessary to verify a very low chance of
-# random failure.
-TIME_WAIT_FOR_CONNECT = 90
-TIME_ATTEMPT_SCANS = 90
-
-CONNECTIONS_ENABLED = "ConnectionsEnabled"
-CONNECTIONS_DISABLED = "ConnectionsDisabled"
-SECURITY_NONE = "none"
-WPA2 = "wpa2"
-
-
-class HiddenNetworksTest(WifiBaseTest):
-    """ Tests that WLAN Policy will detect hidden networks
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        # Start an AP with a hidden network
-        self.hidden_ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        self.access_point = self.access_points[0]
-        self.hidden_password = rand_ascii_str(
-            hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
-        self.hidden_security = WPA2
-        security = hostapd_security.Security(
-            security_mode=self.hidden_security, password=self.hidden_password)
-
-        self.access_point.stop_all_aps()
-        setup_ap(self.access_point,
-                 'whirlwind',
-                 hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 self.hidden_ssid,
-                 hidden=True,
-                 security=security)
-
-        if len(self.fuchsia_devices) < 1:
-            raise EnvironmentError("No Fuchsia devices found.")
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError(
-                    "Failed to remove all networks in setup")
-
-    def teardown_class(self):
-        self.access_point.stop_all_aps()
-
-    def test_scan_hidden_networks(self):
-        # Scan a few times and check that we see the hidden networks in the
-        # results at least once. Even if hidden networks are scanned
-        # probabilistically, we should see it after a few tries.
-        for fd in self.fuchsia_devices:
-            # A hidden network must be saved to be found in scan results.
-            # Stop client connections to not trigger a connect when saving,
-            # which would interfere with requested scans.
-            fd.wlan_policy_controller.stop_client_connections()
-            if not fd.wlan_policy_controller.save_network(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    password=self.hidden_password):
-                raise EnvironmentError("Failed to save network")
-            fd.wlan_policy_controller.start_client_connections()
-            start_time = time.time()
-            num_performed_scans = 0
-
-            while time.time() < start_time + TIME_ATTEMPT_SCANS:
-                num_performed_scans = num_performed_scans + 1
-                scan_result = fd.sl4f.wlan_policy_lib.wlanScanForNetworks()
-                if scan_result["error"] != None:
-                    self.log.warn("Failed to scan for networks with error %s" %
-                                  scan_result["error"])
-                    continue
-                else:
-                    scans = scan_result["result"]
-                if self.hidden_ssid in scans:
-                    self.log.info(
-                        "SSID of hidden network seen after %d scans" %
-                        num_performed_scans)
-                    return
-                # Don't overload SL4F with scan requests
-                time.sleep(1)
-
-            self.log.error("Failed to see SSID after %d scans" %
-                           num_performed_scans)
-            raise signals.TestFailure("Failed to see hidden network in scans")
-
-    def test_auto_connect_hidden_on_startup(self):
-        """ Test that if we are not connected to anything but have a hidden
-            network saved, we will eventually actively scan for it and connect."""
-        # Start up AP with an open network with a random SSID
-
-        for fd in self.fuchsia_devices:
-            # Test that we will auto connect without anything being triggered by
-            # saving a new network.
-            fd.wlan_policy_controller.stop_client_connections()
-
-            # Save the network.
-            if not fd.wlan_policy_controller.save_network(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    password=self.hidden_password):
-                raise EnvironmentError("Failed to save network")
-
-            # Reboot the device and check that it auto connects.
-            fd.reboot()
-            if not fd.wlan_policy_controller.wait_for_connect(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    timeout=TIME_WAIT_FOR_CONNECT):
-                raise signals.TestFailure("Failed to connect to network")
-
-    def test_auto_connect_hidden_on_save(self):
-        """ Test that if we save a hidden network and are not connected to
-            anything, the device will connect to the hidden network that was
-            just saved. """
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.wait_for_no_connections():
-                self.log.info(
-                    "Failed to get into a disconnected state to start the test"
-                )
-                raise EnvironmentError("Failed to disconnect all")
-
-            # Save the network and make sure that we see the device auto connect to it.
-            if not fd.wlan_policy_controller.save_network(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    password=self.hidden_password):
-                raise EnvironmentError("Failed to save network")
-
-            if not fd.wlan_policy_controller.wait_for_connect(
-                    self.hidden_ssid,
-                    self.hidden_security,
-                    timeout=TIME_WAIT_FOR_CONNECT):
-                raise signals.TestFailure("Failed to connect to network")
diff --git a/src/antlion/tests/wlan_policy/PolicyScanTest.py b/src/antlion/tests/wlan_policy/PolicyScanTest.py
deleted file mode 100644
index ca05e6a..0000000
--- a/src/antlion/tests/wlan_policy/PolicyScanTest.py
+++ /dev/null
@@ -1,235 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""This test exercises the Scan functionality for the WLAN Policy API."""
-
-from datetime import datetime
-
-from antlion import signals
-from antlion.controllers.ap_lib import (hostapd_ap_preset, hostapd_bss_settings,
-                                     hostapd_constants, hostapd_security)
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-
-class PolicyScanTest(WifiBaseTest):
-    """WLAN policy scan test class.
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * One Whirlwind Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        if len(self.fuchsia_devices) < 1:
-            raise signals.TestFailure("No fuchsia devices found.")
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
-        if len(self.access_points) < 1:
-            raise signals.TestFailure("No access points found.")
-        # Prepare the AP
-        self.access_point = self.access_points[0]
-        self.access_point.stop_all_aps()
-        # Generate network params.
-        bss_settings_2g = []
-        bss_settings_5g = []
-        open_network = self.get_open_network(False, [])
-        self.open_network_2g = open_network["2g"]
-        self.open_network_5g = open_network["5g"]
-        wpa2_settings = self.get_psk_network(False, [])
-        self.wpa2_network_2g = wpa2_settings["2g"]
-        self.wpa2_network_5g = wpa2_settings["5g"]
-        bss_settings_2g.append(
-            hostapd_bss_settings.BssSettings(
-                name=self.wpa2_network_2g["SSID"],
-                ssid=self.wpa2_network_2g["SSID"],
-                security=hostapd_security.Security(
-                    security_mode=self.wpa2_network_2g["security"],
-                    password=self.wpa2_network_2g["password"])))
-        bss_settings_5g.append(
-            hostapd_bss_settings.BssSettings(
-                name=self.wpa2_network_5g["SSID"],
-                ssid=self.wpa2_network_5g["SSID"],
-                security=hostapd_security.Security(
-                    security_mode=self.wpa2_network_5g["security"],
-                    password=self.wpa2_network_5g["password"])))
-        self.ap_2g = hostapd_ap_preset.create_ap_preset(
-            iface_wlan_2g=self.access_points[0].wlan_2g,
-            iface_wlan_5g=self.access_points[0].wlan_5g,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.open_network_2g["SSID"],
-            bss_settings=bss_settings_2g)
-        self.ap_5g = hostapd_ap_preset.create_ap_preset(
-            iface_wlan_2g=self.access_points[0].wlan_2g,
-            iface_wlan_5g=self.access_points[0].wlan_5g,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=self.open_network_5g["SSID"],
-            bss_settings=bss_settings_5g)
-        # Start the networks
-        self.access_point.start_ap(hostapd_config=self.ap_2g)
-        self.access_point.start_ap(hostapd_config=self.ap_5g)
-        # Save the SSIDs
-        self.all_ssids = [
-            self.open_network_2g["SSID"],
-            self.wpa2_network_2g["SSID"],
-            self.open_network_5g["SSID"],
-            self.wpa2_network_5g["SSID"],
-        ]
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            # stub for setting up all the fuchsia devices in the testbed.
-            return fd.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections(
-            )
-
-    def teardown_test(self):
-        for fd in self.fuchsia_devices:
-            # stub until policy layer has something useful to use here.
-            pass
-
-    def teardown_class(self):
-        pass
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            try:
-                fd.take_bug_report(test_name, begin_time)
-                fd.get_log(test_name, begin_time)
-            except Exception:
-                pass
-
-            try:
-                if fd.device.hard_reboot_on_fail:
-                    fd.hard_power_cycle(self.pdu_devices)
-            except AttributeError:
-                pass
-
-    """Helper Functions"""
-
-    def perform_scan(self, fd):
-        """ Initiates scan on a Fuchsia device and returns results
-
-        Args:
-            fd: A fuchsia device
-
-        Raises:
-            signals.TestFailure: if an error is reported by the device during
-            the scan
-
-        Returns:
-            A list of scan results
-        """
-        start_time = datetime.now()
-
-        scan_response = fd.sl4f.wlan_policy_lib.wlanScanForNetworks()
-
-        # first check if we received an error
-        if scan_response.get("error") is not None:
-            # the response indicates an error - log and raise failure
-            raise signals.TestFailure("Aborting test - scan failed with "
-                                      "error: %s" % scan_response.get("error"))
-
-        # the scan command did not get an error response - go ahead
-        # and check for scan results
-        scan_results = scan_response["result"]
-        total_time_ms = (datetime.now() - start_time).total_seconds() * 1000
-
-        self.log.info("scan contained %d results", len(scan_results))
-        self.log.info("scan time: %d ms", total_time_ms)
-
-        return scan_results
-
-    def connect_to_network(self, wlan_network_params, fd):
-        """ Connects the Fuchsia device to the specified network
-
-        Args:
-            wlan_network_params: A dictionary containing wlan information.
-            fd: A fuchsia device
-
-        Raises:
-            signals.TestFailure: if the device fails to connect
-        """
-        target_ssid = wlan_network_params["SSID"]
-        target_pwd = wlan_network_params.get("password")
-        target_security = wlan_network_params.get("security")
-
-        # TODO(mnck): use the Policy version of this call, when it is available.
-        connection_response = fd.wlan_policy_controller.save_and_connect(
-            target_ssid, target_security, password=target_pwd)
-        if not connection_response:
-            raise signals.TestFailure("Aborting test - Connect call failed")
-        self.log.info("Network connection successful.")
-
-    def assert_network_is_in_results(self, scan_results, *, ssid):
-        """ Verified scan results contain a specified network
-
-        Args:
-            scan_results: Scan results from a fuchsia Policy API scan
-            ssid: SSID for network that should be in the results
-
-        Raises:
-            signals.TestFailure: if the network is not present in the scan
-            results
-        """
-        if ssid not in scan_results:
-            raise signals.TestFailure(
-                'Network "%s" was not found in scan results: %s', ssid,
-                scan_results)
-
-    """Tests"""
-
-    def test_basic_scan_request(self):
-        """Verify a scan returns all expected networks"""
-        for fd in self.fuchsia_devices:
-            scan_results = self.perform_scan(fd)
-            if len(scan_results) == 0:
-                raise signals.TestFailure("Scan failed or did not "
-                                          "find any networks")
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-    def test_scan_while_connected_open_network_2g(self):
-        """Connect to an open 2g network and perform a scan"""
-        for fd in self.fuchsia_devices:
-            self.connect_to_network(self.open_network_2g, fd)
-            scan_results = self.perform_scan(fd)
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-    def test_scan_while_connected_wpa2_network_2g(self):
-        """Connect to a WPA2 2g network and perform a scan"""
-        for fd in self.fuchsia_devices:
-            self.connect_to_network(self.wpa2_network_2g, fd)
-            scan_results = self.perform_scan(fd)
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-    def test_scan_while_connected_open_network_5g(self):
-        """Connect to an open 5g network and perform a scan"""
-        for fd in self.fuchsia_devices:
-            self.connect_to_network(self.open_network_5g, fd)
-            scan_results = self.perform_scan(fd)
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-    def test_scan_while_connected_wpa2_network_5g(self):
-        """Connect to a WPA2 5g network and perform a scan"""
-        for fd in self.fuchsia_devices:
-            self.connect_to_network(self.wpa2_network_5g, fd)
-            scan_results = self.perform_scan(fd)
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
diff --git a/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py b/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py
deleted file mode 100644
index 3e308bb..0000000
--- a/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py
+++ /dev/null
@@ -1,174 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import signals
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-
-
-class RegulatoryRecoveryTest(WifiBaseTest):
-    """Tests the policy layer's response to setting country code.
-
-    Test Bed Requirements:
-    * One Fuchsia device that is capable of operating as a WLAN client and AP.
-
-    Example Config:
-    "regulatory_recovery_test_params": {
-        "country_code": "US"
-    }
-
-    If no configuration information is provided, the test will default to
-    toggling between WW and US.
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        if len(self.fuchsia_devices) < 1:
-            raise EnvironmentError("No Fuchsia devices found.")
-
-        self.config_test_params = self.user_params.get(
-            "regulatory_recovery_test_params", {})
-        self.country_code = self.config_test_params.get("country_code", "US")
-        self.negative_test = self.config_test_params.get(
-            "negative_test", False)
-
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy')
-
-    def teardown_class(self):
-        if not self.negative_test:
-            for fd in self.fuchsia_devices:
-                fd.wlan_controller.set_country_code(self.country_code)
-
-        super().teardown_class()
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            # Remove all network configs.  These tests do not require
-            # connecting to a network, they merely verify whether or not client
-            # mode is functional.
-            if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError(
-                    "Failed to remove all networks in setup")
-
-            # To ensure that DUTs are initially in a known state, set all of
-            # their PHYs to world-wide mode.  Also disable client and AP
-            # functionality so that there is no automated WLAN behavior.
-            fd.wlan_controller.set_country_code("WW")
-            fd.wlan_policy_controller.stop_client_connections()
-            fd.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
-
-    def set_country_code(self, fd):
-        try:
-            fd.wlan_controller.set_country_code(self.country_code)
-        except EnvironmentError as e:
-            if self.negative_test:
-                # In the negative case, setting the country code for an
-                # invalid country should fail.
-                pass
-            else:
-                # If this is not a negative test case, re-raise the
-                # exception.
-                raise e
-        else:
-            # The negative test case should have failed to set the country
-            # code and the positive test case should succeed.
-            if self.negative_test:
-                raise EnvironmentError(
-                    "Setting invalid country code succeeded.")
-            else:
-                pass
-
-    def test_interfaces_not_recreated_when_initially_disabled(self):
-        """This test ensures that after a new regulatory region is applied
-        while client connections and access points are disabled, no new
-        interfaces are automatically recreated.
-        """
-        for fd in self.fuchsia_devices:
-            # Set the region code.
-            self.set_country_code(fd)
-
-            # Reset the listeners and verify the current state.
-            fd.sl4f.wlan_policy_lib.wlanSetNewListener()
-            fd.sl4f.wlan_ap_policy_lib.wlanSetNewListener()
-
-            # Verify that the client and AP are still stopped.
-            client_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
-            if client_state["error"]:
-                raise signals.TestFailure(
-                    "error querying client state: {}".format(
-                        client_state["error"]))
-            elif client_state["result"]["state"] != "ConnectionsDisabled":
-                raise signals.TestFailure(
-                    "client connections in unexpected state: {}".format(
-                        client_state["result"]["state"]))
-
-            ap_state = fd.sl4f.wlan_ap_policy_lib.wlanGetUpdate()
-            if ap_state["error"]:
-                raise signals.TestFailure("error querying AP state: {}".format(
-                    ap_state["error"]))
-
-            ap_updates = ap_state["result"]
-            if ap_updates:
-                raise signals.TestFailure(
-                    "AP in unexpected state: {}".format(ap_updates))
-
-    def test_interfaces_recreated_when_initially_enabled(self):
-        """This test ensures that after a new regulatory region is applied
-        while client connections and access points are enabled, all
-        interfaces are recreated.
-        """
-        test_ssid = "test_ssid"
-        test_security_type = "none"
-        for fd in self.fuchsia_devices:
-            # Start client connections and start an AP before setting the
-            # country code.
-            fd.wlan_policy_controller.start_client_connections()
-            fd.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-                test_ssid, test_security_type, "", "local_only", "any")
-
-            # Set the country code.
-            self.set_country_code(fd)
-
-            # Reset the listeners and verify the current state.
-            fd.sl4f.wlan_policy_lib.wlanSetNewListener()
-            fd.sl4f.wlan_ap_policy_lib.wlanSetNewListener()
-
-            # Verify that client connections are enabled and the AP is brought
-            # up again.
-            client_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
-            if client_state["error"]:
-                raise signals.TestFailure(
-                    "error querying client state: {}".format(
-                        client_state["error"]))
-            elif client_state["result"]["state"] != "ConnectionsEnabled":
-                raise signals.TestFailure(
-                    "client connections in unexpected state: {}".format(
-                        client_state["result"]["state"]))
-
-            ap_state = fd.sl4f.wlan_ap_policy_lib.wlanGetUpdate()
-            if ap_state["error"]:
-                raise signals.TestFailure("error querying AP state: {}".format(
-                    ap_state["error"]))
-
-            ap_updates = ap_state["result"]
-            if len(ap_updates) != 1:
-                raise signals.TestFailure(
-                    "No APs are running: {}".format(ap_updates))
-            else:
-                if ap_updates[0]["id"]["ssid"] != test_ssid or ap_updates[0][
-                        "id"]["type_"].lower() != test_security_type:
-                    raise signals.TestFailure(
-                        "AP in unexpected state: {}".format(ap_updates[0]))
diff --git a/src/antlion/tests/wlan_policy/SavedNetworksTest.py b/src/antlion/tests/wlan_policy/SavedNetworksTest.py
deleted file mode 100644
index ca9ade2..0000000
--- a/src/antlion/tests/wlan_policy/SavedNetworksTest.py
+++ /dev/null
@@ -1,357 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-A test that saves various networks and verifies the behavior of save, get, and
-remove through the ClientController API of WLAN policy.
-"""
-
-from antlion import signals
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.utils import rand_ascii_str, rand_hex_str
-
-PSK_LEN = 64
-TIME_WAIT_FOR_DISCONNECT = 30
-TIME_WAIT_FOR_CONNECT = 30
-
-STATE_CONNECTED = "Connected"
-STATE_CONNECTING = "Connecting"
-CONNECTIONS_ENABLED = "ConnectionsEnabled"
-CONNECTIONS_DISABLED = "ConnectionsDisabled"
-SECURITY_NONE = "none"
-WEP = "wep"
-WPA = "wpa"
-WPA2 = "wpa2"
-WPA3 = "wpa3"
-CREDENTIAL_TYPE_NONE = "none"
-PASSWORD = "password"
-PSK = "psk"
-CREDENTIAL_VALUE_NONE = ""
-
-
-class SavedNetworksTest(WifiBaseTest):
-    """WLAN policy commands test class.
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        # Keep track of whether we have started an access point in a test
-        if len(self.fuchsia_devices) < 1:
-            raise EnvironmentError("No Fuchsia devices found.")
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError(
-                    "Failed to remove all networks in setup")
-        self.access_points[0].stop_all_aps()
-
-    def teardown_class(self):
-        for fd in self.fuchsia_devices:
-            fd.wlan_policy_controller.remove_all_networks()
-        self.access_points[0].stop_all_aps()
-
-    def save_bad_network(self, fd, ssid, security_type, password=""):
-        """ Saves a network as specified on the given device and verify that we
-        Args:
-            fd: The Fuchsia device to save the network on
-            ssid: The SSID or name of the network to save.
-            security_type: The security type to save the network as, ie "none",
-                        "wep", "wpa", "wpa2", or "wpa3"
-            password: The password to save for the network. Empty string represents
-                    no password, and PSK should be provided as 64 character hex string.
-        """
-        if fd.wlan_policy_controller.save_network(ssid,
-                                                  security_type,
-                                                  password=password):
-            self.log.info(
-                "Attempting to save bad network config %s did not give an error"
-                % ssid)
-            raise signals.TestFailure("Failed to get error saving bad network")
-
-    def check_get_saved_network(self, fd, ssid, security_type, credential_type,
-                                credential_value):
-        """ Verify that get saved networks sees the single specified network. Used
-            for the tests that save and get a single network. Maps security types of
-            expected and actual to be case insensitive.
-        Args:
-            fd: Fuchsia device to run on.
-            ssid: The name of the network to check for.
-            security_type: The security of the network, ie "none", "wep", "wpa",
-                        "wpa2", or "wpa3".
-            credential_type: The type of credential saved for the network, ie
-                            "none", "password", or "psk".
-            credential_value: The actual credential, or "" if there is no credential.
-        """
-        expected_networks = [{
-            "ssid": ssid,
-            "security_type": security_type,
-            "credential_type": credential_type,
-            "credential_value": credential_value
-        }]
-        self.check_saved_networks(fd, expected_networks)
-
-    def check_saved_networks(self, fd, expected_networks):
-        """ Verify that the saved networks we get from the device match the provided
-            list of networks.
-        Args:
-            fd: The Fuchsia device to run on.
-            expected_networks: The list of networks we expect to get from the device,
-                            unordered and in the same format as we would get:
-                            [{"credential_type": _, "credential_value": _,
-                            "security_type": _, "ssid": _}, ...] There should be
-                            no duplicates in expected networks.
-        """
-        actual_networks = list(
-            map(self.lower_case_network,
-                fd.wlan_policy_controller.get_saved_networks()))
-        expected_networks = list(
-            map(self.lower_case_network,
-                fd.wlan_policy_controller.get_saved_networks()))
-
-        if len(actual_networks) != len(expected_networks):
-            self.log.info(
-                "Number of expected saved networks does not match the actual number."
-                "Expected: %d, actual: %d" %
-                (len(actual_networks), len(expected_networks)))
-            raise signals.TestFailure(
-                "Failed to get the expected number of saved networks")
-        for network in actual_networks:
-            if network not in expected_networks:
-                self.log.info(
-                    "Actual and expected networks do not match. Actual: %s,\n"
-                    "Expected: %s" % (actual_networks, expected_networks))
-                raise signals.TestFailure("Got an unexpected saved network")
-
-    def lower_case_network(self, network):
-        if "security_type" not in network:
-            self.log.error("Missing security type in network %s" % network)
-            raise signals.TestFailure("Network is missing security type")
-        if "credential_type" not in network:
-            self.log.error("Missing credential type in network %s" % network)
-            raise signals.TestFailure("Network is missing credential type")
-        {"ssid": network["ssid"], "security_type": network["security_type"]}
-
-    def save_and_check_network(self, ssid, security_type, password=""):
-        """ Perform a test for saving, getting, and removing a single network on each
-            device.
-        Args:
-            ssid: The network name to use.
-            security_type: The security of the network as a string, ie "none",
-                        "wep", "wpa", "wpa2", or "wpa3" (case insensitive)
-            password: The password of the network. PSK should be given as 64
-                    hexadecimal characters and none should be an empty string.
-        """
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.save_network(
-                    ssid, security_type, password=password):
-                raise signals.TestFailure("Failed to save network")
-            self.check_get_saved_network(fd, ssid, security_type,
-                                         self.credentialType(password),
-                                         password)
-
-    def start_ap(self, ssid, security_type, password=None, hidden=False):
-        """ Starts an access point.
-        Args:
-            ssid: the SSID of the network to broadcast
-            security_type: the security type of the network to be broadcasted. This can be
-                None, "wep" "wpa", "wpa2", or "wpa3" (or from hostapd_constants.py)
-            password: the password to connect to the broadcasted network. The password is ignored
-                if security type is none.
-        """
-        # Put together the security configuration of the network to be
-        # broadcasted. Open networks are represented by no security.
-        if security_type == None or security_type.upper() == SECURITY_NONE:
-            security = None
-        else:
-            security = hostapd_security.Security(security_mode=security_type,
-                                                 password=password)
-
-        if len(self.access_points) > 0:
-            # Create an AP with default values other than the specified values.
-            setup_ap(self.access_points[0],
-                     'whirlwind',
-                     hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                     ssid,
-                     security=security)
-
-        else:
-            self.log.error(
-                "No access point available for test, please check config")
-            raise EnvironmentError("Failed to set up AP for test")
-
-    def credentialType(self, credentialValue):
-        """ Returns the type of the credential to compare against values reported """
-        if len(credentialValue) == PSK_LEN:
-            return PSK
-        elif len(credentialValue) == 0:
-            return "none"
-        else:
-            return PASSWORD
-
-    def same_network_identifier(self, net_id, ssid, security_type):
-        """ Returns true if the network id is made of the given ssid and security
-            type, and false otherwise. Security type check is case insensitive.
-        """
-        return net_id["ssid"] == ssid and net_id["type_"].upper(
-        ) == security_type.upper()
-
-    """Tests"""
-
-    def test_open_network_with_password(self):
-        for fd in self.fuchsia_devices:
-            # Save an open network with a password and verify that it fails to
-            # save.
-            self.save_bad_network(fd, rand_ascii_str(10), SECURITY_NONE,
-                                  rand_ascii_str(8))
-            self.check_saved_networks(fd, {})
-
-    def test_open_network(self):
-        ssid = rand_ascii_str(10)
-        self.save_and_check_network(ssid, SECURITY_NONE)
-
-    def test_network_with_psk(self):
-        ssid = rand_ascii_str(11)
-        # PSK are translated from hex to bytes when saved, and when returned
-        # by get_saved_networks it will be lower case.
-        psk = rand_hex_str(PSK_LEN).lower()
-        self.save_and_check_network(ssid, WPA2, psk)
-
-    def test_wep_network(self):
-        ssid = rand_ascii_str(12)
-        password = rand_ascii_str(13)
-        self.save_and_check_network(ssid, WEP, password)
-
-    def test_wpa2_network(self):
-        ssid = rand_ascii_str(9)
-        password = rand_ascii_str(15)
-        self.save_and_check_network(ssid, WPA2, password)
-
-    def test_wpa_network(self):
-        ssid = rand_ascii_str(16)
-        password = rand_ascii_str(9)
-        self.save_and_check_network(ssid, WPA, password)
-
-    def test_wpa3_network(self):
-        ssid = rand_ascii_str(9)
-        password = rand_ascii_str(15)
-        self.save_and_check_network(ssid, WPA3, password)
-
-    def test_save_network_persists(self):
-        ssid = rand_ascii_str(10)
-        security = WPA2
-        password = rand_ascii_str(10)
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.save_network(
-                    ssid, security, password=password):
-                raise signals.TestFailure("Failed to save network")
-            # Reboot the device. The network should be persistently saved
-            # before the command is completed.
-            fd.reboot()
-            self.check_get_saved_network(fd, ssid, security, PASSWORD,
-                                         password)
-
-    def test_same_ssid_diff_security(self):
-        for fd in self.fuchsia_devices:
-            saved_networks = fd.wlan_policy_controller.get_saved_networks()
-            ssid = rand_ascii_str(19)
-            password = rand_ascii_str(12)
-            if not fd.wlan_policy_controller.save_network(
-                    ssid, WPA2, password=password):
-                raise signals.TestFailure("Failed to save network")
-            saved_networks.append({
-                "ssid": ssid,
-                "security_type": WPA2,
-                "credential_type": PASSWORD,
-                "credential_value": password
-            })
-            if not fd.wlan_policy_controller.save_network(ssid, SECURITY_NONE):
-                raise signals.TestFailure("Failed to save network")
-            saved_networks.append({
-                "ssid": ssid,
-                "security_type": SECURITY_NONE,
-                "credential_type": CREDENTIAL_TYPE_NONE,
-                "credential_value": CREDENTIAL_VALUE_NONE
-            })
-            actual_networks = fd.wlan_policy_controller.get_saved_networks()
-            # Both should be saved and present in network store since the have
-            # different security types and therefore different network identifiers.
-            self.check_saved_networks(fd, actual_networks)
-
-    def test_remove_disconnects(self):
-        # If we save, connect to, then remove the network while still connected
-        # to it, we expect the network will disconnect. This test requires a
-        # wpa2 network in the test config. Remove all other networks first so
-        # that we can't auto connect to them
-        ssid = rand_ascii_str(10)
-        security = WPA2
-        password = rand_ascii_str(10)
-        self.start_ap(ssid, security, password)
-
-        for fd in self.fuchsia_devices:
-            fd.wlan_policy_controller.wait_for_no_connections()
-
-            if not fd.wlan_policy_controller.save_and_connect:
-                raise signals.TestFailure(
-                    "Failed to saved and connect to network")
-
-            if not fd.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections(
-            ):
-                raise signals.TestFailure(
-                    "Failed to disconnect from removed network")
-
-    def test_auto_connect_open(self):
-        # Start up AP with an open network with a random SSID
-        ssid = rand_ascii_str(10)
-        self.start_ap(ssid, None)
-        for fd in self.fuchsia_devices:
-            fd.wlan_policy_controller.wait_for_no_connections()
-
-            # Save the network and make sure that we see the device auto connect to it.
-            security = SECURITY_NONE
-            password = CREDENTIAL_VALUE_NONE
-            if not fd.wlan_policy_controller.save_network(
-                    ssid, security, password=password):
-                raise signals.TestFailure("Failed to save network")
-            if not fd.wlan_policy_controller.wait_for_connect(
-                    ssid, security, timeout=TIME_WAIT_FOR_CONNECT):
-                raise signals.TestFailure("Failed to connect to network")
-
-    def test_auto_connect_wpa3(self):
-        # Start up AP with an open network with a random SSID
-        ssid = rand_ascii_str(10)
-        security = WPA3
-        password = rand_ascii_str(10)
-        self.start_ap(ssid, security, password)
-        for fd in self.fuchsia_devices:
-            fd.wlan_policy_controller.wait_for_no_connections()
-
-            # Save the network and make sure that we see the device auto connect to it.
-            if not fd.wlan_policy_controller.save_network(
-                    ssid, security, password=password):
-                raise signals.TestFailure("Failed to save network")
-            if not fd.wlan_policy_controller.wait_for_connect(
-                    ssid, security, timeout=TIME_WAIT_FOR_CONNECT):
-                raise signals.TestFailure("Failed to connect to network")
diff --git a/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py b/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py
deleted file mode 100644
index 01a3094..0000000
--- a/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion import signals
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi.WifiBaseTest import WifiBaseTest
-from antlion.utils import rand_ascii_str
-
-DISCONNECTED = "Disconnected"
-CONNECTION_STOPPED = "ConnectionStopped"
-CONNECTIONS_ENABLED = "ConnectionsEnabled"
-CONNECTIONS_DISABLED = "ConnectionsDisabled"
-WPA2 = "wpa2"
-UPDATE_TIMEOUT_SEC = 5
-
-
-class StartStopClientConnectionsTest(WifiBaseTest):
-    """ Tests that we see the expected behavior with enabling and disabling
-        client connections
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        # Start an AP with a hidden network
-        self.ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        self.access_point = self.access_points[0]
-        self.password = rand_ascii_str(
-            hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
-        self.security_type = WPA2
-        security = hostapd_security.Security(security_mode=self.security_type,
-                                             password=self.password)
-
-        self.access_point.stop_all_aps()
-        # TODO(63719) use varying values for AP that shouldn't affect the test.
-        setup_ap(self.access_point,
-                 'whirlwind',
-                 hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                 self.ssid,
-                 security=security)
-
-        if len(self.fuchsia_devices) < 1:
-            raise EnvironmentError("No Fuchsia devices found.")
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism='policy',
-                              preserve_saved_networks=True)
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError(
-                    "Failed to remove all networks in setup")
-
-    def teardown_class(self):
-        self.access_point.stop_all_aps()
-
-    def connect_and_validate(self, fd, ssid, security_type, expected_response):
-        """ Sends a connect request to the device and verifies we get a response
-            without error. This does not validate that a connection will be
-            attempted. This will fail the test if there is an error sending the
-            connect request, or if we don't get the expected connect response."""
-        result_connect = fd.sl4f.wlan_policy_lib.wlanConnect(
-            ssid, security_type)
-        if result_connect.get("error") != None:
-            self.log.error("Error occurred requesting a connection: %s" %
-                           result_connect.get("error"))
-            raise EnvironmentError("Failed to send connect request")
-        response = result_connect.get("result")
-        if response != expected_response:
-            self.log.error(
-                "Incorrect connect request response. Expected: \"%s\", Actual: %s"
-                % (expected_response, response))
-            raise signals.TestFailure(
-                "Failed to get expected connect response")
-
-    def await_state_update(self, fd, desired_state, timeout):
-        """ This function polls the policy client state until it converges to
-            the caller's desired state.
-
-        Args:
-            fd: A FuchsiaDevice
-            desired_state: The expected client policy state.
-            timeout: Number of seconds to wait for the policy state to become
-                     the desired_state.
-        Returns:
-            None assuming the desired state has been reached.
-        Raises:
-            TestFailure if the desired state is not reached by the timeout.
-        """
-        start_time = time.time()
-        curr_state = None
-        while time.time() < start_time + timeout:
-            fd.sl4f.wlan_policy_lib.wlanSetNewListener()
-            curr_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
-            if curr_state.get("error"):
-                self.log.error("Error occurred getting status update: %s" %
-                               curr_state.get("error"))
-                raise EnvironmentError("Failed to get update")
-
-            if curr_state.get("result") and curr_state.get(
-                    "result") == desired_state:
-                return
-
-            time.sleep(1)
-
-        self.log.error(
-            "Client state did not converge to the expected state in %s "
-            "seconds. Expected update: %s Actual update: %s" %
-            (timeout, desired_state, curr_state))
-        raise signals.TestFailure("Client policy layer is in unexpected state")
-
-    def test_stop_client_connections_update(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.stop_client_connections():
-                raise EnvironmentError("Failed to stop client connecions")
-
-            # Check that the most recent update says that the device is not
-            # connected to anything and client connections are disabled
-            expected_update = {"networks": [], "state": CONNECTIONS_DISABLED}
-            self.await_state_update(fd, expected_update, UPDATE_TIMEOUT_SEC)
-
-    def test_start_client_connections_update(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.start_client_connections():
-                raise EnvironmentError("Failed to start client connecions")
-
-            # Check that the most recent update says that the device is not
-            # connected to anything and client connections are disabled
-            expected_update = {"networks": [], "state": CONNECTIONS_ENABLED}
-            self.await_state_update(fd, expected_update, UPDATE_TIMEOUT_SEC)
-
-    def test_stop_client_connections_rejects_connections(self):
-        # Test that if we turn client connections off, our requests to connect
-        # are rejected.
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.stop_client_connections():
-                raise EnvironmentError("Failed to stop client connecions")
-
-            # Save the network, otherwise connecting may fail because the
-            # network is not saved instead of client connections being off
-            if not fd.wlan_policy_controller.save_network(
-                    self.ssid, self.security_type, password=self.password):
-                raise EnvironmentError("Failed to save network")
-            expected_response = "RejectedIncompatibleMode"
-            self.connect_and_validate(fd, self.ssid, self.security_type,
-                                      expected_response)
-
-    def test_start_stop_client_connections(self):
-        # Test that if we turn client connections on the device will connect,
-        # and if we turn of client connections the device will disconnect.
-        for fd in self.fuchsia_devices:
-            # Start client connections and check that we can
-            if not fd.wlan_policy_controller.save_network(
-                    self.ssid, self.security_type, password=self.password):
-                raise EnvironmentError("Failed to save network")
-            if not fd.wlan_policy_controller.start_client_connections():
-                raise EnvironmentError("Failed to start client connections")
-
-            expected_response = "Acknowledged"
-            self.connect_and_validate(fd, self.ssid, self.security_type,
-                                      expected_response)
-
-            if not fd.wlan_policy_controller.wait_for_connect(
-                    self.ssid, self.security_type):
-                raise signals.TestFailure(
-                    "Failed to connect after starting client connections")
-
-            # Stop client connections again and check that we disconnect
-            if not fd.wlan_policy_controller.stop_client_connections():
-                raise EnvironmentError("Failed to stop client connecions")
-            if not fd.wlan_policy_controller.wait_for_disconnect(
-                    self.ssid, self.security_type, DISCONNECTED,
-                    CONNECTION_STOPPED):
-                raise signals.TestFailure(
-                    "Failed to disconnect after client connections stopped")
diff --git a/src/antlion/tracelogger.py b/src/antlion/tracelogger.py
deleted file mode 100644
index 1d2650e..0000000
--- a/src/antlion/tracelogger.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-import os
-
-
-class TraceLogger(object):
-    def __init__(self, logger):
-        self._logger = logger
-
-    @staticmethod
-    def _get_trace_info(level=1, offset=2):
-        # We want the stack frame above this and above the error/warning/info
-        inspect_stack = inspect.stack()
-        trace_info = ''
-        for i in range(level):
-            try:
-                stack_frames = inspect_stack[offset + i]
-                info = inspect.getframeinfo(stack_frames[0])
-                trace_info = '%s[%s:%s:%s]' % (trace_info,
-                                               os.path.basename(info.filename),
-                                               info.function, info.lineno)
-            except IndexError:
-                break
-        return trace_info
-
-    def _log_with(self, logging_lambda, trace_level, msg, *args, **kwargs):
-        trace_info = TraceLogger._get_trace_info(level=trace_level, offset=3)
-        logging_lambda('%s %s' % (msg, trace_info), *args, **kwargs)
-
-    def exception(self, msg, *args, **kwargs):
-        self._log_with(self._logger.exception, 5, msg, *args, **kwargs)
-
-    def debug(self, msg, *args, **kwargs):
-        self._log_with(self._logger.debug, 3, msg, *args, **kwargs)
-
-    def error(self, msg, *args, **kwargs):
-        self._log_with(self._logger.error, 3, msg, *args, **kwargs)
-
-    def warn(self, msg, *args, **kwargs):
-        self._log_with(self._logger.warn, 3, msg, *args, **kwargs)
-
-    def warning(self, msg, *args, **kwargs):
-        self._log_with(self._logger.warning, 3, msg, *args, **kwargs)
-
-    def info(self, msg, *args, **kwargs):
-        self._log_with(self._logger.info, 1, msg, *args, **kwargs)
-
-    def __getattr__(self, name):
-        return getattr(self._logger, name)
diff --git a/src/antlion/unit_tests/AttenuatorSanityTest.py b/src/antlion/unit_tests/AttenuatorSanityTest.py
deleted file mode 100644
index 0864cbb..0000000
--- a/src/antlion/unit_tests/AttenuatorSanityTest.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import random
-from antlion.base_test import BaseTestClass
-
-CONSERVATIVE_MAX_ATTEN_VALUE = 10
-MIN_ATTEN_VALUE = 0
-
-
-class AttenuatorSanityTest(BaseTestClass):
-    def __init__(self, controllers):
-        BaseTestClass.__init__(self, controllers)
-        self.tests = ("test_attenuator_validation",
-                      "test_attenuator_get_max_value", )
-        self.number_of_iteration = 2
-
-    def test_attenuator_validation(self):
-        """Validate attenuator set and get APIs works fine.
-        """
-        for atten in self.attenuators:
-            self.log.info("Attenuator: {}".format(atten))
-            try:
-                atten_max_value = atten.get_max_atten()
-            except ValueError as e:
-                self.log.error(e)
-                self.log.info("Using conservative max value.")
-                atten_max_value = CONSERVATIVE_MAX_ATTEN_VALUE
-
-            atten_value_list = [MIN_ATTEN_VALUE, atten_max_value]
-            for i in range(0, self.number_of_iteration):
-                atten_value_list.append(
-                    int(random.uniform(0, atten_max_value)))
-
-            for atten_val in atten_value_list:
-                self.log.info("Set atten to {}".format(atten_val))
-                atten.set_atten(atten_val)
-                current_atten = int(atten.get_atten())
-                self.log.info("Current atten = {}".format(current_atten))
-                assert atten_val == current_atten, "Setting attenuator failed."
-
-        return True
-
-    def test_attenuator_get_max_value(self):
-        """Validate attenuator get_max_atten APIs works fine.
-        """
-        for atten in self.attenuators:
-            try:
-                atten_max_value = atten.get_max_atten()
-            except ValueError as e:
-                self.log.error(e)
-                return False
-        return True
diff --git a/src/antlion/unit_tests/IntegrationTest.py b/src/antlion/unit_tests/IntegrationTest.py
deleted file mode 100755
index 0dff5f2..0000000
--- a/src/antlion/unit_tests/IntegrationTest.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import asserts
-from antlion import base_test
-from antlion import test_runner
-
-import mock_controller
-
-
-class IntegrationTest(base_test.BaseTestClass):
-    def setup_class(self):
-        self.register_controller(mock_controller)
-
-    def test_hello_world(self):
-        asserts.assert_equal(self.user_params["icecream"], 42)
-        asserts.assert_equal(self.user_params["extra_param"], "haha")
-        self.log.info("This is a bare minimal test to make sure the basic ACTS"
-                      "test flow works.")
-        asserts.explicit_pass("Hello World")
diff --git a/src/antlion/unit_tests/Sl4aSanityTest.py b/src/antlion/unit_tests/Sl4aSanityTest.py
deleted file mode 100644
index a651b78..0000000
--- a/src/antlion/unit_tests/Sl4aSanityTest.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.base_test import BaseTestClass
-
-
-class Sl4aSanityTest(BaseTestClass):
-    """Tests for sl4a basic sanity.
-
-    Run these tests individually with option -r 100.
-    """
-
-    def __init__(self, controllers):
-        BaseTestClass.__init__(self, controllers)
-        self.tests = ("test_bring_up_and_shutdown",
-                      "test_message_then_shutdown_stress")
-
-    def test_bring_up_and_shutdown(self):
-        """Constantly start and terminate sl4a sessions.
-
-        Verify in log that the "manager map key" is always empty before a
-        session starts.
-        Verify in log by looking at timestamps that after the test finishes, no
-        more message regarding sl4a happens.
-        """
-        ad = self.android_devices[0]
-        for i in range(100):
-            self.log.info("Iteration %d, terminating." % i)
-            ad.terminate_all_sessions()
-            self.log.info("Iteration %d, starting." % i)
-            droid, ed = ad.get_droid()
-        return True
diff --git a/src/antlion/unit_tests/SnifferSanityTest.py b/src/antlion/unit_tests/SnifferSanityTest.py
deleted file mode 100644
index 56ae89b..0000000
--- a/src/antlion/unit_tests/SnifferSanityTest.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import base_test
-from antlion.controllers.sniffer import Sniffer
-
-
-class SnifferSanityTest(base_test.BaseTestClass):
-    def setup_class(self):
-        self._channels = [6, 44]
-
-        # capture (sniff) for 30 seconds or 10 packets - whichever comes first
-        self._capture_sec = 30
-        self._packet_count = 10
-
-        self._filter = {"tcpdump": "type mgt subtype beacon",
-                        "tshark": "type mgt subtype beacon"}
-
-    def test_sniffer_validation_using_with(self):
-        """Validate sniffer configuration & capture API using the 'with' clause.
-
-        This is the standard example - this syntax should typically be used.
-        """
-        index = 0
-        for sniffer in self.sniffers:
-            for channel in self._channels:
-                with sniffer.start_capture(
-                        override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                        duration=self._capture_sec,
-                        packet_count=self._packet_count):
-                    self.log.info("Capture: %s", sniffer.get_capture_file())
-
-    def test_sniffer_validation_manual(self):
-        """Validate sniffer configuration & capture API using a manual/raw
-        API mechanism.
-
-        The standard process should use a with clause. This demonstrates the
-        manual process which uses an explicit wait_for_capture() call.
-        Alternatively, could also use a sleep() + stop_capture() process
-        (though that mechanism won't terminate early if the capture is done).
-        """
-        index = 0
-        for sniffer in self.sniffers:
-            for channel in self._channels:
-                sniffer.start_capture(
-                    override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                    packet_count=self._packet_count)
-                self.log.info("Capture: %s", sniffer.get_capture_file())
-                sniffer.wait_for_capture(timeout=self._capture_sec)
-
-    def test_sniffer_validation_capture_3_beacons(self):
-        """Demonstrate the use of additional configuration.
-        """
-        index = 0
-        for sniffer in self.sniffers:
-            for channel in self._channels:
-                with sniffer.start_capture(
-                        override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                        duration=self._capture_sec,
-                        packet_count=3,
-                        additional_args=self._filter[sniffer.get_subtype()]):
-                    self.log.info("Capture: %s", sniffer.get_capture_file())
diff --git a/src/antlion/unit_tests/acts_adb_test.py b/src/antlion/unit_tests/acts_adb_test.py
deleted file mode 100755
index dcaa91b..0000000
--- a/src/antlion/unit_tests/acts_adb_test.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-import mock
-from antlion.controllers import adb
-from antlion.controllers.adb_lib.error import AdbCommandError
-from antlion.controllers.adb_lib.error import AdbError
-
-
-class MockJob(object):
-    def __init__(self, exit_status=0, stderr='', stdout=''):
-        self.exit_status = exit_status
-        self.stderr = stderr
-        self.stdout = stdout
-
-
-class MockAdbProxy(adb.AdbProxy):
-    def __init__(self):
-        pass
-
-
-class ADBTest(unittest.TestCase):
-    """A class for testing antlion/controllers/adb.py"""
-
-    def test__exec_cmd_failure_old_adb(self):
-        mock_job = MockJob(exit_status=1, stderr='error: device not found')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
-            with self.assertRaises(AdbError):
-                MockAdbProxy()._exec_cmd(cmd)
-
-    def test__exec_cmd_failure_new_adb(self):
-        mock_job = MockJob(
-            exit_status=1, stderr='error: device \'DEADBEEF\' not found')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
-            with self.assertRaises(AdbError):
-                MockAdbProxy()._exec_cmd(cmd)
-
-    def test__exec_cmd_pass_basic(self):
-        mock_job = MockJob(exit_status=0, stderr='DEADBEEF', stdout='FEEDACAB')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
-            result = MockAdbProxy()._exec_cmd(cmd)
-        self.assertEqual(result, 'FEEDACAB')
-
-    def test__exec_cmd_ignore_status(self):
-        mock_job = MockJob(exit_status=0, stderr='DEADBEEF', stdout='')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
-            result = MockAdbProxy()._exec_cmd(cmd, ignore_status=True)
-        self.assertEqual(result, 'DEADBEEF')
-
-    def test__exec_cmd_pass_grep(self):
-        mock_job = MockJob(exit_status=1, stderr='', stdout='foo')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"grep foo"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
-            result = MockAdbProxy()._exec_cmd(cmd)
-        self.assertEqual(result, 'foo')
-
-    def test__exec_cmd_failure_ret_nonzero(self):
-        mock_job = MockJob(exit_status=1, stderr='error not related to adb')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
-            with self.assertRaises(AdbCommandError):
-                MockAdbProxy()._exec_cmd(cmd)
-
-    def test__exec_cmd_raises_on_bind_error(self):
-        """Tests _exec_cmd raises an AdbError on port forwarding failure."""
-        mock_job = MockJob(exit_status=1,
-                           stderr='error: cannot bind listener: '
-                                  'Address already in use',
-                           stdout='')
-        cmd = ['adb', '-s', '"SOME_SERIAL"', 'shell', '"SOME_SHELL_CMD"']
-        with mock.patch('antlion.libs.proc.job.run', return_value=mock_job):
-            with self.assertRaises(AdbError):
-                MockAdbProxy()._exec_cmd(cmd)
-
-    def test__get_version_number_gets_version_number(self):
-        """Tests the positive case for AdbProxy.get_version_number()."""
-        proxy = MockAdbProxy()
-        expected_version_number = 39
-        proxy.version = lambda: ('Android Debug Bridge version 1.0.%s\nblah' %
-                                 expected_version_number)
-        self.assertEqual(expected_version_number, proxy.get_version_number())
-
-    def test__get_version_number_raises_upon_parse_failure(self):
-        """Tests the failure case for AdbProxy.get_version_number()."""
-        proxy = MockAdbProxy()
-        proxy.version = lambda: 'Bad format'
-        with self.assertRaises(AdbError):
-            proxy.get_version_number()
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_android_device_test.py b/src/antlion/unit_tests/acts_android_device_test.py
deleted file mode 100755
index b8591fd..0000000
--- a/src/antlion/unit_tests/acts_android_device_test.py
+++ /dev/null
@@ -1,717 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import mock
-import os
-import shutil
-import tempfile
-import unittest
-
-from antlion import logger
-from antlion.controllers import android_device
-from antlion.controllers.android_lib import errors
-
-# Mock log path for a test run.
-MOCK_LOG_PATH = "/tmp/logs/MockTest/xx-xx-xx_xx-xx-xx/"
-
-# Mock start and end time of the adb cat.
-MOCK_ADB_EPOCH_BEGIN_TIME = 191000123
-MOCK_ADB_LOGCAT_BEGIN_TIME = logger.normalize_log_line_timestamp(
-    logger.epoch_to_log_line_timestamp(MOCK_ADB_EPOCH_BEGIN_TIME))
-MOCK_ADB_LOGCAT_END_TIME = "1970-01-02 21:22:02.000"
-
-MOCK_SERIAL = 1
-MOCK_RELEASE_BUILD_ID = "ABC1.123456.007"
-MOCK_DEV_BUILD_ID = "ABC-MR1"
-MOCK_NYC_BUILD_ID = "N4F27P"
-
-
-def get_mock_ads(num):
-    """Generates a list of mock AndroidDevice objects.
-
-    The serial number of each device will be integer 0 through num - 1.
-
-    Args:
-        num: An integer that is the number of mock AndroidDevice objects to
-            create.
-    """
-    ads = []
-    for i in range(num):
-        ad = mock.MagicMock(name="AndroidDevice", serial=i, h_port=None)
-        ad.ensure_screen_on = mock.MagicMock(return_value=True)
-        ads.append(ad)
-    return ads
-
-
-def mock_get_all_instances():
-    return get_mock_ads(5)
-
-
-def mock_list_adb_devices():
-    return [ad.serial for ad in get_mock_ads(5)]
-
-
-class MockAdbProxy(object):
-    """Mock class that swaps out calls to adb with mock calls."""
-
-    def __init__(self,
-                 serial,
-                 fail_br=False,
-                 fail_br_before_N=False,
-                 build_id=MOCK_RELEASE_BUILD_ID,
-                 return_value=None):
-        self.serial = serial
-        self.fail_br = fail_br
-        self.fail_br_before_N = fail_br_before_N
-        self.return_value = return_value
-        self.return_multiple = False
-        self.build_id = build_id
-
-    def shell(self, params, ignore_status=False, timeout=60):
-        if params == "id -u":
-            return "root"
-        elif params == "bugreportz":
-            if self.fail_br:
-                return "OMG I died!\n"
-            return "OK:/path/bugreport.zip\n"
-        elif params == "bugreportz -v":
-            if self.fail_br_before_N:
-                return "/system/bin/sh: bugreportz: not found"
-            return "1.1"
-        else:
-            if self.return_multiple:
-                return self.return_value.pop(0)
-            else:
-                return self.return_value
-
-    def getprop(self, params):
-        if params == "ro.build.id":
-            return self.build_id
-        elif params == "ro.build.version.incremental":
-            return "123456789"
-        elif params == "ro.build.type":
-            return "userdebug"
-        elif params == "ro.build.product" or params == "ro.product.name":
-            return "FakeModel"
-        elif params == "sys.boot_completed":
-            return "1"
-
-    def devices(self):
-        return "\t".join([str(self.serial), "device"])
-
-    def bugreport(self, params, timeout=android_device.BUG_REPORT_TIMEOUT):
-        expected = os.path.join(
-            logging.log_path, "AndroidDevice%s" % self.serial,
-            "AndroidDevice%s_%s.txt" %
-            (self.serial,
-             logger.normalize_log_line_timestamp(MOCK_ADB_LOGCAT_BEGIN_TIME)))
-        assert expected in params, "Expected '%s', got '%s'." % (expected,
-                                                                 params)
-
-    def __getattr__(self, name):
-        """All calls to the none-existent functions in adb proxy would
-        simply return the adb command string.
-        """
-
-        def adb_call(*args, **kwargs):
-            arg_str = ' '.join(str(elem) for elem in args)
-            return arg_str
-
-        return adb_call
-
-
-class MockFastbootProxy():
-    """Mock class that swaps out calls to adb with mock calls."""
-
-    def __init__(self, serial):
-        self.serial = serial
-
-    def devices(self):
-        return "xxxx\tdevice\nyyyy\tdevice"
-
-    def __getattr__(self, name):
-        def fastboot_call(*args):
-            arg_str = ' '.join(str(elem) for elem in args)
-            return arg_str
-
-        return fastboot_call
-
-
-class ActsAndroidDeviceTest(unittest.TestCase):
-    """This test class has unit tests for the implementation of everything
-    under antlion.controllers.android_device.
-    """
-
-    def setUp(self):
-        # Set log_path to logging since acts logger setup is not called.
-        if not hasattr(logging, "log_path"):
-            setattr(logging, "log_path", "/tmp/logs")
-        # Creates a temp dir to be used by tests in this test class.
-        self.tmp_dir = tempfile.mkdtemp()
-
-    def tearDown(self):
-        """Removes the temp dir.
-        """
-        shutil.rmtree(self.tmp_dir)
-
-    # Tests for android_device module functions.
-    # These tests use mock AndroidDevice instances.
-
-    @mock.patch.object(
-        android_device, "get_all_instances", new=mock_get_all_instances)
-    @mock.patch.object(
-        android_device, "list_adb_devices", new=mock_list_adb_devices)
-    def test_create_with_pickup_all(self):
-        pick_all_token = android_device.ANDROID_DEVICE_PICK_ALL_TOKEN
-        actual_ads = android_device.create(pick_all_token)
-        for actual, expected in zip(actual_ads, get_mock_ads(5)):
-            self.assertEqual(actual.serial, expected.serial)
-
-    def test_create_with_empty_config(self):
-        expected_msg = android_device.ANDROID_DEVICE_EMPTY_CONFIG_MSG
-        with self.assertRaisesRegex(errors.AndroidDeviceConfigError,
-                                    expected_msg):
-            android_device.create([])
-
-    def test_create_with_not_list_config(self):
-        expected_msg = android_device.ANDROID_DEVICE_NOT_LIST_CONFIG_MSG
-        with self.assertRaisesRegex(errors.AndroidDeviceConfigError,
-                                    expected_msg):
-            android_device.create("HAHA")
-
-    def test_get_device_success_with_serial(self):
-        ads = get_mock_ads(5)
-        expected_serial = 0
-        ad = android_device.get_device(ads, serial=expected_serial)
-        self.assertEqual(ad.serial, expected_serial)
-
-    def test_get_device_success_with_serial_and_extra_field(self):
-        ads = get_mock_ads(5)
-        expected_serial = 1
-        expected_h_port = 5555
-        ads[1].h_port = expected_h_port
-        ad = android_device.get_device(
-            ads, serial=expected_serial, h_port=expected_h_port)
-        self.assertEqual(ad.serial, expected_serial)
-        self.assertEqual(ad.h_port, expected_h_port)
-
-    def test_get_device_no_match(self):
-        ads = get_mock_ads(5)
-        expected_msg = ("Could not find a target device that matches condition"
-                        ": {'serial': 5}.")
-        with self.assertRaisesRegex(ValueError, expected_msg):
-            ad = android_device.get_device(ads, serial=len(ads))
-
-    def test_get_device_too_many_matches(self):
-        ads = get_mock_ads(5)
-        target_serial = ads[1].serial = ads[0].serial
-        expected_msg = "More than one device matched: \[0, 0\]"
-        with self.assertRaisesRegex(ValueError, expected_msg):
-            ad = android_device.get_device(ads, serial=target_serial)
-
-    def test_start_services_on_ads(self):
-        """Makes sure when an AndroidDevice fails to start some services, all
-        AndroidDevice objects get cleaned up.
-        """
-        msg = "Some error happened."
-        ads = get_mock_ads(3)
-        ads[0].start_services = mock.MagicMock()
-        ads[0].clean_up = mock.MagicMock()
-        ads[1].start_services = mock.MagicMock()
-        ads[1].clean_up = mock.MagicMock()
-        ads[2].start_services = mock.MagicMock(
-            side_effect=errors.AndroidDeviceError(msg))
-        ads[2].clean_up = mock.MagicMock()
-        with self.assertRaisesRegex(errors.AndroidDeviceError, msg):
-            android_device._start_services_on_ads(ads)
-        ads[0].clean_up.assert_called_once_with()
-        ads[1].clean_up.assert_called_once_with()
-        ads[2].clean_up.assert_called_once_with()
-
-    # Tests for android_device.AndroidDevice class.
-    # These tests mock out any interaction with the OS and real android device
-    # in AndroidDeivce.
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_AndroidDevice_instantiation(self, MockFastboot, MockAdbProxy):
-        """Verifies the AndroidDevice object's basic attributes are correctly
-        set after instantiation.
-        """
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        self.assertEqual(ad.serial, 1)
-        self.assertEqual(ad.model, "fakemodel")
-        self.assertIsNone(ad.adb_logcat_process)
-        expected_lp = os.path.join(logging.log_path,
-                                   "AndroidDevice%s" % MOCK_SERIAL)
-        self.assertEqual(ad.log_path, expected_lp)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_AndroidDevice_build_info_release(self, MockFastboot,
-                                              MockAdbProxy):
-        """Verifies the AndroidDevice object's basic attributes are correctly
-        set after instantiation.
-        """
-        ad = android_device.AndroidDevice(serial=1)
-        build_info = ad.build_info
-        self.assertEqual(build_info["build_id"], "ABC1.123456.007")
-        self.assertEqual(build_info["build_type"], "userdebug")
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_DEV_BUILD_ID))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_AndroidDevice_build_info_dev(self, MockFastboot, MockAdbProxy):
-        """Verifies the AndroidDevice object's basic attributes are correctly
-        set after instantiation.
-        """
-        ad = android_device.AndroidDevice(serial=1)
-        build_info = ad.build_info
-        self.assertEqual(build_info["build_id"], "123456789")
-        self.assertEqual(build_info["build_type"], "userdebug")
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL, build_id=MOCK_NYC_BUILD_ID))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_AndroidDevice_build_info_nyc(self, MockFastboot, MockAdbProxy):
-        """Verifies the AndroidDevice object's build id is set correctly for
-        NYC releases.
-        """
-        ad = android_device.AndroidDevice(serial=1)
-        build_info = ad.build_info
-        self.assertEqual(build_info["build_id"], MOCK_NYC_BUILD_ID)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('os.makedirs')
-    @mock.patch('antlion.utils.exe_cmd')
-    @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.device_log_path',
-        new_callable=mock.PropertyMock)
-    def test_AndroidDevice_take_bug_report(self, mock_log_path, exe_mock,
-                                           mock_makedirs, FastbootProxy,
-                                           MockAdbProxy):
-        """Verifies AndroidDevice.take_bug_report calls the correct adb command
-        and writes the bugreport file to the correct path.
-        """
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial)
-        ad.take_bug_report("test_something", 234325.32)
-        mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL, fail_br=True))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('os.makedirs')
-    @mock.patch('antlion.utils.exe_cmd')
-    @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.device_log_path',
-        new_callable=mock.PropertyMock)
-    def test_AndroidDevice_take_bug_report_fail(self, mock_log_path, *_):
-        """Verifies AndroidDevice.take_bug_report writes out the correct message
-        when taking bugreport fails.
-        """
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial)
-        expected_msg = "Failed to take bugreport on 1: OMG I died!"
-        with self.assertRaisesRegex(errors.AndroidDeviceError, expected_msg):
-            ad.take_bug_report("test_something", 4346343.23)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL, fail_br_before_N=True))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('os.makedirs')
-    @mock.patch('antlion.utils.exe_cmd')
-    @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.device_log_path',
-        new_callable=mock.PropertyMock)
-    def test_AndroidDevice_take_bug_report_fallback(
-            self, mock_log_path, exe_mock, mock_makedirs, FastbootProxy,
-            MockAdbProxy):
-        """Verifies AndroidDevice.take_bug_report falls back to traditional
-        bugreport on builds that do not have bugreportz.
-        """
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial)
-        ad.take_bug_report("test_something", MOCK_ADB_EPOCH_BEGIN_TIME)
-        mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('antlion.libs.proc.process.Process')
-    def test_AndroidDevice_start_adb_logcat(self, proc_mock, FastbootProxy,
-                                            MockAdbProxy):
-        """Verifies the AndroidDevice method start_adb_logcat. Checks that the
-        underlying logcat process is started properly and correct warning msgs
-        are generated.
-        """
-        with mock.patch(('antlion.controllers.android_lib.logcat.'
-                         'create_logcat_keepalive_process'),
-                        return_value=proc_mock) as create_proc_mock:
-            ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-            ad.start_adb_logcat()
-            # Verify start did the correct operations.
-            self.assertTrue(ad.adb_logcat_process)
-            log_dir = "AndroidDevice%s" % ad.serial
-            create_proc_mock.assert_called_with(ad.serial, log_dir, '-b all')
-            proc_mock.start.assert_called_with()
-            # Expect warning msg if start is called back to back.
-            expected_msg = "Android device .* already has a running adb logcat"
-            proc_mock.is_running.return_value = True
-            with self.assertLogs(level='WARNING') as log:
-                ad.start_adb_logcat()
-                self.assertRegex(log.output[0], expected_msg)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('antlion.controllers.android_lib.logcat.'
-                'create_logcat_keepalive_process')
-    def test_AndroidDevice_start_adb_logcat_with_user_param(
-            self, create_proc_mock, FastbootProxy, MockAdbProxy):
-        """Verifies that start_adb_logcat generates the correct adb logcat
-        command if adb_logcat_param is specified.
-        """
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb_logcat_param = "-b radio"
-        ad.start_adb_logcat()
-        # Verify that create_logcat_keepalive_process is called with the
-        # correct command.
-        log_dir = "AndroidDevice%s" % ad.serial
-        create_proc_mock.assert_called_with(ad.serial, log_dir, '-b radio')
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    @mock.patch('antlion.libs.proc.process.Process')
-    def test_AndroidDevice_stop_adb_logcat(self, proc_mock, FastbootProxy,
-                                           MockAdbProxy):
-        """Verifies the AndroidDevice method stop_adb_logcat. Checks that the
-        underlying logcat process is stopped properly and correct warning msgs
-        are generated.
-        """
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb_logcat_process = proc_mock
-        # Expect warning msg if stop is called before start.
-        expected_msg = (
-            "Android device .* does not have an ongoing adb logcat")
-        proc_mock.is_running.return_value = False
-        with self.assertLogs(level='WARNING') as log:
-            ad.stop_adb_logcat()
-            self.assertRegex(log.output[0], expected_msg)
-
-        # Verify the underlying process is stopped.
-        proc_mock.is_running.return_value = True
-        ad.stop_adb_logcat()
-        proc_mock.stop.assert_called_with()
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_get_apk_process_id_process_cannot_find(self, fastboot_proxy,
-                                                    adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb.return_value = "does_not_contain_value"
-        self.assertEqual(None, ad.get_package_pid("some_package"))
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_get_apk_process_id_process_exists_second_try(self, fastboot_proxy,
-                                                          adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb.return_multiple = True
-        ad.adb.return_value = ["", "system 1 2 3 4  S com.some_package"]
-        self.assertEqual(1, ad.get_package_pid("some_package"))
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_get_apk_process_id_bad_return(self, fastboot_proxy, adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb.return_value = "bad_return_index_error"
-        self.assertEqual(None, ad.get_package_pid("some_package"))
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_get_apk_process_id_bad_return(self, fastboot_proxy, adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb.return_value = "bad return value error"
-        self.assertEqual(None, ad.get_package_pid("some_package"))
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_enabled_only_system_enabled(self, fastboot_proxy,
-                                                       adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
-
-        ad.adb.get_user_id = mock.MagicMock()
-        ad.adb.get_user_id.return_value = root_user_id
-
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '',  # system.verified
-            '2'
-        ])  # vendor.verified
-        ad.adb.ensure_user = mock.MagicMock()
-        ad.reboot = mock.MagicMock()
-        ad.ensure_verity_enabled()
-        ad.reboot.assert_called_once()
-
-        ad.adb.ensure_user.assert_called_with(root_user_id)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_enabled_only_vendor_enabled(self, fastboot_proxy,
-                                                       adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
-
-        ad.adb.get_user_id = mock.MagicMock()
-        ad.adb.get_user_id.return_value = root_user_id
-
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '2',  # system.verified
-            ''
-        ])  # vendor.verified
-        ad.adb.ensure_user = mock.MagicMock()
-        ad.reboot = mock.MagicMock()
-
-        ad.ensure_verity_enabled()
-
-        ad.reboot.assert_called_once()
-        ad.adb.ensure_user.assert_called_with(root_user_id)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_enabled_both_enabled_at_start(self, fastboot_proxy,
-                                                         adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
-
-        ad.adb.get_user_id = mock.MagicMock()
-        ad.adb.get_user_id.return_value = root_user_id
-
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '2',  # system.verified
-            '2'
-        ])  # vendor.verified
-        ad.adb.ensure_user = mock.MagicMock()
-        ad.reboot = mock.MagicMock()
-        ad.ensure_verity_enabled()
-
-        assert not ad.reboot.called
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_disabled_system_already_disabled(
-            self, fastboot_proxy, adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
-
-        ad.adb.get_user_id = mock.MagicMock()
-        ad.adb.get_user_id.return_value = root_user_id
-
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '2',  # system.verified
-            ''
-        ])  # vendor.verified
-        ad.adb.ensure_user = mock.MagicMock()
-        ad.reboot = mock.MagicMock()
-        ad.ensure_verity_disabled()
-
-        ad.reboot.assert_called_once()
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_disabled_vendor_already_disabled(
-            self, fastboot_proxy, adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
-
-        ad.adb.get_user_id = mock.MagicMock()
-        ad.adb.get_user_id.return_value = root_user_id
-
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '',  # system.verified
-            '2'
-        ])  # vendor.verified
-        ad.adb.ensure_user = mock.MagicMock()
-        ad.reboot = mock.MagicMock()
-
-        ad.ensure_verity_disabled()
-
-        ad.reboot.assert_called_once()
-        ad.adb.ensure_user.assert_called_with(root_user_id)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_ensure_verity_disabled_disabled_at_start(
-            self, fastboot_proxy, adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        root_user_id = '0'
-
-        ad.adb.get_user_id = mock.MagicMock()
-        ad.adb.get_user_id.return_value = root_user_id
-
-        ad.adb.getprop = mock.MagicMock(side_effect=[
-            '',  # system.verified
-            ''
-        ])  # vendor.verified
-        ad.adb.ensure_user = mock.MagicMock()
-        ad.reboot = mock.MagicMock()
-
-        ad.ensure_verity_disabled()
-
-        assert not ad.reboot.called
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_push_system_file(self, fastboot_proxy, adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.ensure_verity_disabled = mock.MagicMock()
-        ad.adb.remount = mock.MagicMock()
-        ad.adb.push = mock.MagicMock()
-
-        ret = ad.push_system_file('asdf', 'jkl')
-        self.assertTrue(ret)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_push_system_file_returns_false_on_error(self, fastboot_proxy,
-                                                     adb_proxy):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.ensure_verity_disabled = mock.MagicMock()
-        ad.adb.remount = mock.MagicMock()
-        ad.adb.push = mock.MagicMock(return_value='error')
-
-        ret = ad.push_system_file('asdf', 'jkl')
-        self.assertFalse(ret)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_get_my_current_focus_window_return_empty_string(self, *_):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb.return_value = ''
-
-        ret = ad.get_my_current_focus_window()
-
-        self.assertEqual('', ret)
-
-    @mock.patch(
-        'antlion.controllers.adb.AdbProxy',
-        return_value=MockAdbProxy(MOCK_SERIAL))
-    @mock.patch(
-        'antlion.controllers.fastboot.FastbootProxy',
-        return_value=MockFastbootProxy(MOCK_SERIAL))
-    def test_get_my_current_focus_window_return_current_window(self, *_):
-        ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
-        ad.adb.return_value = 'mCurrentFocus=Window{a247ded u0 NotificationShade}'
-
-        ret = ad.get_my_current_focus_window()
-
-        self.assertEqual('NotificationShade', ret)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_asserts_test.py b/src/antlion/unit_tests/acts_asserts_test.py
deleted file mode 100755
index 5fbfe5b..0000000
--- a/src/antlion/unit_tests/acts_asserts_test.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion import asserts
-from antlion import signals
-
-MSG_EXPECTED_EXCEPTION = "This is an expected exception."
-
-
-class ActsAssertsTest(unittest.TestCase):
-    """Verifies that asserts.xxx functions raise the correct test signals.
-    """
-
-    def test_assert_false(self):
-        asserts.assert_false(False, MSG_EXPECTED_EXCEPTION)
-        with self.assertRaisesRegexp(signals.TestFailure,
-                                     MSG_EXPECTED_EXCEPTION):
-            asserts.assert_false(True, MSG_EXPECTED_EXCEPTION)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_base_class_test.py b/src/antlion/unit_tests/acts_base_class_test.py
deleted file mode 100755
index 18c989a..0000000
--- a/src/antlion/unit_tests/acts_base_class_test.py
+++ /dev/null
@@ -1,1132 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import shutil
-import tempfile
-import unittest
-
-import mock
-import mock_controller
-
-from antlion import asserts
-from antlion import base_test
-from antlion import signals
-
-from mobly import base_test as mobly_base_test
-import mobly.config_parser as mobly_config_parser
-
-MSG_EXPECTED_EXCEPTION = 'This is an expected exception.'
-MSG_EXPECTED_TEST_FAILURE = 'This is an expected test failure.'
-MSG_UNEXPECTED_EXCEPTION = 'Unexpected exception!'
-
-MOCK_EXTRA = {'key': 'value', 'answer_to_everything': 42}
-
-
-def never_call():
-    raise Exception(MSG_UNEXPECTED_EXCEPTION)
-
-
-class SomeError(Exception):
-    """A custom exception class used for tests in this module."""
-
-
-class ActsBaseClassTest(unittest.TestCase):
-    def setUp(self):
-        self.tmp_dir = tempfile.mkdtemp()
-        self.tb_key = 'testbed_configs'
-        self.test_run_config = mobly_config_parser.TestRunConfig()
-        self.test_run_config.testbed_name = 'SampleTestBed'
-        self.test_run_config.controller_configs = {
-            self.tb_key: {
-                'name': self.test_run_config.testbed_name,
-            },
-        }
-        self.test_run_config.log_path = self.tmp_dir
-        self.test_run_config.user_params = {'some_param': 'hahaha'}
-        self.test_run_config.summary_writer = mock.MagicMock()
-        self.mock_test_name = 'test_something'
-
-    def tearDown(self):
-        shutil.rmtree(self.tmp_dir)
-
-    def test_current_test_case_name(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_true(
-                    self.current_test_name == 'test_func',
-                    'Got unexpected test name %s.' % self.current_test_name)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_self_tests_list(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def __init__(self, controllers):
-                super(MockBaseTest, self).__init__(controllers)
-                self.tests = ('test_something', )
-
-            def test_something(self):
-                pass
-
-            def test_never(self):
-                # This should not execute it's not on default test list.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_something')
-
-    def test_cli_test_selection_match_self_tests_list(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def __init__(self, controllers):
-                super(MockBaseTest, self).__init__(controllers)
-                self.tests = ('test_star1', 'test_star2', 'test_question_mark',
-                              'test_char_seq', 'test_no_match')
-
-            def test_star1(self):
-                pass
-
-            def test_star2(self):
-                pass
-
-            def test_question_mark(self):
-                pass
-
-            def test_char_seq(self):
-                pass
-
-            def test_no_match(self):
-                # This should not execute because it does not match any regex
-                # in the cmd line input.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        test_names = [
-            'test_st*r1', 'test_*2', 'test_?uestion_mark', 'test_c[fghi]ar_seq'
-        ]
-        bt_cls.run(test_names=test_names)
-        passed_names = [p.test_name for p in bt_cls.results.passed]
-        self.assertEqual(len(passed_names), len(test_names))
-        for test in [
-                'test_star1', 'test_star2', 'test_question_mark',
-                'test_char_seq'
-        ]:
-            self.assertIn(test, passed_names)
-
-    def test_default_execution_of_all_tests(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_something(self):
-                pass
-
-            def not_a_test(self):
-                # This should not execute its name doesn't follow test case
-                # naming convention.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_something')
-
-    def test_setup_class_fail_by_exception(self):
-        call_check = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_class(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                # This should not execute because setup_class failed.
-                never_call()
-
-            def on_skip(self, test_name, begin_time):
-                call_check('haha')
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, 'test_something')
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-        call_check.assert_called_once_with('haha')
-
-    def test_setup_test_fail_by_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                # This should not execute because setup_test failed.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_something'])
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_setup_test_fail_by_test_signal(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                raise signals.TestFailure(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                # This should not execute because setup_test failed.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_something'])
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_setup_test_fail_by_return_False(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                return False
-
-            def test_something(self):
-                # This should not execute because setup_test failed.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_something'])
-        actual_record = bt_cls.results.failed[0]
-        expected_msg = 'Setup for %s failed.' % self.mock_test_name
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, expected_msg)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_assert_fail(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_raise_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_executed_if_test_pass(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                my_mock('teardown_test')
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        my_mock.assert_called_once_with('teardown_test')
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 1,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_executed_if_setup_test_fails(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def teardown_test(self):
-                my_mock('teardown_test')
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        my_mock.assert_called_once_with('teardown_test')
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_executed_if_test_fails(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                my_mock('teardown_test')
-
-            def test_something(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        my_mock.assert_called_once_with('teardown_test')
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_exception_executed_if_teardown_test_fails(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_exception(self, test_name, begin_time):
-                my_mock('on_exception')
-
-            def teardown_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        my_mock.assert_called_once_with('on_exception')
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_fail_executed_if_test_fails(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_fail(self, test_name, begin_time):
-                my_mock('on_fail')
-
-            def test_something(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        my_mock.assert_called_once_with('on_fail')
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_fail_executed_if_test_setup_fails_by_exception(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def on_fail(self, test_name, begin_time):
-                my_mock('on_fail')
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        my_mock.assert_called_once_with('on_fail')
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_fail_executed_if_test_setup_fails_by_return_False(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                return False
-
-            def on_fail(self, test_name, begin_time):
-                my_mock('on_fail')
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        my_mock.assert_called_once_with('on_fail')
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details,
-                         'Setup for test_something failed.')
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_failure_to_call_procedure_function_is_recorded(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            # Wrong method signature; will raise exception
-            def on_pass(self):
-                pass
-
-            def test_something(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertIn('_on_pass', actual_record.extra_errors)
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_failure_in_procedure_functions_is_recorded(self):
-        expected_msg = 'Something failed in on_pass.'
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_pass(self, test_name, begin_time):
-                raise Exception(expected_msg)
-
-            def test_something(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_both_teardown_and_test_body_raise_exceptions(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                raise Exception('Test Body Exception.')
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, 'Test Body Exception.')
-        self.assertIsNone(actual_record.extras)
-        self.assertEqual(actual_record.extra_errors['teardown_test'].details,
-                         'This is an expected exception.')
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_explicit_pass_but_teardown_test_raises_an_exception(self):
-        """Test record result should be marked as UNKNOWN as opposed to PASS.
-        """
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                asserts.explicit_pass('Test Passed!')
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, 'Test Passed!')
-        self.assertIsNone(actual_record.extras)
-        self.assertEqual(actual_record.extra_errors['teardown_test'].details,
-                         'This is an expected exception.')
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_pass_raise_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_pass(self, test_name, begin_time):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION,
-                                      extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-        expected_summary = {
-            'Error': 1,
-            'Executed': 1,
-            'Failed': 0,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_fail_raise_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_fail(self, test_name, begin_time):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                asserts.fail(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(bt_cls.results.error, [])
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-        expected_summary = {
-            'Error': 0,
-            'Executed': 1,
-            'Failed': 1,
-            'Passed': 0,
-            'Requested': 1,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_abort_class(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_1(self):
-                pass
-
-            def test_2(self):
-                asserts.abort_class(MSG_EXPECTED_EXCEPTION)
-                never_call()
-
-            def test_3(self):
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_1', 'test_2', 'test_3'])
-        self.assertEqual(bt_cls.results.passed[0].test_name, 'test_1')
-        self.assertEqual(bt_cls.results.failed[0].details,
-                         MSG_EXPECTED_EXCEPTION)
-        expected_summary = {
-            'Error': 0,
-            'Executed': 2,
-            'Failed': 1,
-            'Passed': 1,
-            'Requested': 3,
-            'Skipped': 0
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_uncaught_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-
-    def test_fail(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.fail(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_true(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_true(False,
-                                    MSG_EXPECTED_EXCEPTION,
-                                    extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_equal_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_equal(1, 1, extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_assert_equal_fail(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_equal(1, 2, extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertIn('1 != 2', actual_record.details)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_equal_fail_with_msg(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_equal(1,
-                                     2,
-                                     msg=MSG_EXPECTED_EXCEPTION,
-                                     extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        expected_msg = '1 != 2 ' + MSG_EXPECTED_EXCEPTION
-        self.assertIn(expected_msg, actual_record.details)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_raises_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises(SomeError, extras=MOCK_EXTRA):
-                    raise SomeError(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_assert_raises_regex_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises_regex(
-                        SomeError,
-                        expected_regex=MSG_EXPECTED_EXCEPTION,
-                        extras=MOCK_EXTRA):
-                    raise SomeError(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_assert_raises_fail_with_noop(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises_regex(
-                        SomeError,
-                        expected_regex=MSG_EXPECTED_EXCEPTION,
-                        extras=MOCK_EXTRA):
-                    pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, 'SomeError not raised')
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_raises_fail_with_wrong_regex(self):
-        wrong_msg = 'ha'
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises_regex(
-                        SomeError,
-                        expected_regex=MSG_EXPECTED_EXCEPTION,
-                        extras=MOCK_EXTRA):
-                    raise SomeError(wrong_msg)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        expected_details = ('"This is an expected exception." does not match '
-                            '"%s"') % wrong_msg
-        self.assertEqual(actual_record.details, expected_details)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_raises_fail_with_wrong_error(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises_regex(
-                        SomeError,
-                        expected_regex=MSG_EXPECTED_EXCEPTION,
-                        extras=MOCK_EXTRA):
-                    raise AttributeError(MSG_UNEXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, MSG_UNEXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-
-    def test_explicit_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION,
-                                      extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_implicit_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_skip(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.skip(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
-        actual_record = bt_cls.results.skipped[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_skip_if(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.skip_if(False, MSG_UNEXPECTED_EXCEPTION)
-                asserts.skip_if(True,
-                                MSG_EXPECTED_EXCEPTION,
-                                extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=['test_func'])
-        actual_record = bt_cls.results.skipped[0]
-        self.assertEqual(actual_record.test_name, 'test_func')
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_unpack_userparams_required(self):
-        """Missing a required param should raise an error."""
-        required = ['some_param']
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(required)
-        expected_value = self.test_run_config.user_params['some_param']
-        self.assertEqual(bc.some_param, expected_value)
-
-    def test_unpack_userparams_required_missing(self):
-        """Missing a required param should raise an error."""
-        required = ['something']
-        bc = base_test.BaseTestClass(self.test_run_config)
-        expected_msg = ('Missing required user param "%s" in test '
-                        'configuration.') % required[0]
-        with self.assertRaises(mobly_base_test.Error, msg=expected_msg):
-            bc.unpack_userparams(required)
-
-    def test_unpack_userparams_optional(self):
-        """If an optional param is specified, the value should be what's in the
-        config.
-        """
-        opt = ['some_param']
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(opt_param_names=opt)
-        expected_value = self.test_run_config.user_params['some_param']
-        self.assertEqual(bc.some_param, expected_value)
-
-    def test_unpack_userparams_optional_with_default(self):
-        """If an optional param is specified with a default value, and the
-        param is not in the config, the value should be the default value.
-        """
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(optional_thing='whatever')
-        self.assertEqual(bc.optional_thing, 'whatever')
-
-    def test_unpack_userparams_default_overwrite_by_optional_param_list(self):
-        """If an optional param is specified in kwargs, and the param is in the
-        config, the value should be the one in the config.
-        """
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(some_param='whatever')
-        expected_value = self.test_run_config.user_params['some_param']
-        self.assertEqual(bc.some_param, expected_value)
-
-    def test_unpack_userparams_default_overwrite_by_required_param_list(self):
-        """If an optional param is specified in kwargs, the param is in the
-        required param list, and the param is not specified in the config, the
-        param's alue should be the default value and there should be no error
-        thrown.
-        """
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(req_param_names=['a_kwarg_param'],
-                             a_kwarg_param='whatever')
-        self.assertEqual(bc.a_kwarg_param, 'whatever')
-
-    def test_unpack_userparams_optional_missing(self):
-        """Missing an optional param should not raise an error."""
-        opt = ['something']
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(opt_param_names=opt)
-
-    def test_unpack_userparams_basic(self):
-        """Required and optional params are unpacked properly."""
-        required = ['something']
-        optional = ['something_else']
-        configs = self.test_run_config.copy()
-        configs.user_params['something'] = 42
-        configs.user_params['something_else'] = 53
-        bc = base_test.BaseTestClass(configs)
-        bc.unpack_userparams(req_param_names=required,
-                             opt_param_names=optional)
-        self.assertEqual(bc.something, 42)
-        self.assertEqual(bc.something_else, 53)
-
-    def test_unpack_userparams_default_overwrite(self):
-        default_arg_val = 'haha'
-        actual_arg_val = 'wawa'
-        arg_name = 'arg1'
-        configs = self.test_run_config.copy()
-        configs.user_params[arg_name] = actual_arg_val
-        bc = base_test.BaseTestClass(configs)
-        bc.unpack_userparams(opt_param_names=[arg_name], arg1=default_arg_val)
-        self.assertEqual(bc.arg1, actual_arg_val)
-
-    def test_unpack_userparams_default_None(self):
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(arg1='haha')
-        self.assertEqual(bc.arg1, 'haha')
-
-    def test_register_controller_no_config(self):
-        base_cls = base_test.BaseTestClass(self.test_run_config)
-        with self.assertRaisesRegexp(signals.ControllerError,
-                                     'No corresponding config found for'):
-            base_cls.register_controller(mock_controller)
-
-    def test_register_optional_controller_no_config(self):
-        base_cls = base_test.BaseTestClass(self.test_run_config)
-        self.assertIsNone(
-            base_cls.register_controller(mock_controller, required=False))
-
-    def test_register_controller_third_party_dup_register(self):
-        """Verifies correctness of registration, internal tally of controllers
-        objects, and the right error happen when a controller module is
-        registered twice.
-        """
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            'magic1', 'magic2'
-        ]
-        base_cls = base_test.BaseTestClass(mock_test_config)
-        base_cls.register_controller(mock_controller)
-        registered_name = 'mock_controller'
-        controller_objects = base_cls._controller_manager._controller_objects
-        self.assertTrue(registered_name in controller_objects)
-        mock_ctrlrs = controller_objects[registered_name]
-        self.assertEqual(mock_ctrlrs[0].magic, 'magic1')
-        self.assertEqual(mock_ctrlrs[1].magic, 'magic2')
-        expected_msg = 'Controller module .* has already been registered.'
-        with self.assertRaisesRegexp(signals.ControllerError, expected_msg):
-            base_cls.register_controller(mock_controller)
-
-    def test_register_optional_controller_third_party_dup_register(self):
-        """Verifies correctness of registration, internal tally of controllers
-        objects, and the right error happen when an optional controller module
-        is registered twice.
-        """
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            'magic1', 'magic2'
-        ]
-        base_cls = base_test.BaseTestClass(mock_test_config)
-        base_cls.register_controller(mock_controller, required=False)
-        expected_msg = 'Controller module .* has already been registered.'
-        with self.assertRaisesRegexp(signals.ControllerError, expected_msg):
-            base_cls.register_controller(mock_controller, required=False)
-
-    def test_register_controller_builtin_dup_register(self):
-        """Same as test_register_controller_third_party_dup_register, except
-        this is for a builtin controller module.
-        """
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_ref_name = 'haha'
-        setattr(mock_controller, 'ACTS_CONTROLLER_REFERENCE_NAME',
-                mock_ref_name)
-        try:
-            mock_ctrlr_ref_name = mock_controller.ACTS_CONTROLLER_REFERENCE_NAME
-            mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-                'magic1', 'magic2'
-            ]
-            base_cls = base_test.BaseTestClass(mock_test_config)
-            base_cls.register_controller(mock_controller, builtin=True)
-            self.assertTrue(hasattr(base_cls, mock_ref_name))
-            self.assertTrue(mock_controller.__name__ in
-                            base_cls._controller_manager._controller_objects)
-            mock_ctrlrs = getattr(base_cls, mock_ctrlr_ref_name)
-            self.assertEqual(mock_ctrlrs[0].magic, 'magic1')
-            self.assertEqual(mock_ctrlrs[1].magic, 'magic2')
-            expected_msg = 'Controller module .* has already been registered.'
-            with self.assertRaisesRegexp(signals.ControllerError,
-                                         expected_msg):
-                base_cls.register_controller(mock_controller, builtin=True)
-        finally:
-            delattr(mock_controller, 'ACTS_CONTROLLER_REFERENCE_NAME')
-
-    def test_register_controller_no_get_info(self):
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_ref_name = 'haha'
-        get_info = getattr(mock_controller, 'get_info')
-        delattr(mock_controller, 'get_info')
-        try:
-            mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-                'magic1', 'magic2'
-            ]
-            base_cls = base_test.BaseTestClass(mock_test_config)
-            base_cls.register_controller(mock_controller)
-            self.assertEqual(base_cls.results.controller_info, [])
-        finally:
-            setattr(mock_controller, 'get_info', get_info)
-
-    def test_register_controller_return_value(self):
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            'magic1', 'magic2'
-        ]
-        base_cls = base_test.BaseTestClass(mock_test_config)
-        magic_devices = base_cls.register_controller(mock_controller)
-        self.assertEqual(magic_devices[0].magic, 'magic1')
-        self.assertEqual(magic_devices[1].magic, 'magic2')
-
-    def test_handle_file_user_params_does_not_overwrite_existing_params(self):
-        test_run_config = self.test_run_config.copy()
-        test_run_config.user_params = {
-            'foo': ['good_value'],
-            'local_files': {
-                'foo': ['bad_value']
-            }
-        }
-        test = base_test.BaseTestClass(test_run_config)
-
-        self.assertEqual(test.user_params['foo'], ['good_value'])
-
-    def test_handle_file_user_params_dumps_files_dict(self):
-        test_run_config = self.test_run_config.copy()
-        test_run_config.user_params = {
-            'my_files': {
-                'foo': ['good_value']
-            }
-        }
-        test = base_test.BaseTestClass(test_run_config)
-
-        self.assertEqual(test.user_params['foo'], ['good_value'])
-
-    def test_handle_file_user_params_is_called_in_init(self):
-        test_run_config = self.test_run_config.copy()
-        test_run_config.user_params['files'] = {
-            'file_a': ['/some/path']
-        }
-        test = base_test.BaseTestClass(test_run_config)
-
-        self.assertEqual(test.user_params['file_a'], ['/some/path'])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_context_test.py b/src/antlion/unit_tests/acts_context_test.py
deleted file mode 100755
index 9f634c2..0000000
--- a/src/antlion/unit_tests/acts_context_test.py
+++ /dev/null
@@ -1,246 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from functools import partial
-from unittest import TestCase
-
-from antlion import context
-from antlion.context import RootContext
-from antlion.context import TestCaseContext
-from antlion.context import TestClassContext
-from antlion.context import TestContext
-from antlion.context import _update_test_case_context
-from antlion.context import _update_test_class_context
-from antlion.context import get_context_for_event
-from antlion.context import get_current_context
-from antlion.event.event import TestCaseBeginEvent
-from antlion.event.event import TestCaseEndEvent
-from antlion.event.event import TestCaseEvent
-from antlion.event.event import TestClassBeginEvent
-from antlion.event.event import TestClassEndEvent
-from antlion.event.event import TestClassEvent
-from mock import Mock
-from mock import patch
-
-
-LOGGING = 'antlion.context.logging'
-
-
-def reset_context():
-    context._contexts = [RootContext()]
-
-
-TEST_CASE = 'test_case_name'
-
-
-class TestClass:
-    pass
-
-
-class ModuleTest(TestCase):
-    """Unit tests for the context module."""
-
-    def test_get_context_for_event_for_test_case(self):
-        event = Mock(spec=TestCaseEvent)
-        event.test_class = Mock()
-        event.test_case = Mock()
-        context = get_context_for_event(event)
-
-        self.assertIsInstance(context, TestCaseContext)
-        self.assertEqual(context.test_class, event.test_class)
-        self.assertEqual(context.test_case, event.test_case)
-
-    def test_get_context_for_event_for_test_class(self):
-        event = Mock(spec=TestClassEvent)
-        event.test_class = Mock()
-        context = get_context_for_event(event)
-
-        self.assertIsInstance(context, TestClassContext)
-        self.assertEqual(context.test_class, event.test_class)
-
-    def test_get_context_for_unknown_event_type(self):
-        event = Mock()
-
-        self.assertRaises(TypeError, partial(get_context_for_event, event))
-
-    def test_update_test_class_context_for_test_class_begin(self):
-        event = Mock(spec=TestClassBeginEvent)
-        event.test_class = Mock()
-
-        _update_test_class_context(event)
-        self.assertIsInstance(get_current_context(), TestClassContext)
-        reset_context()
-
-    def test_update_test_class_context_for_test_class_end(self):
-        event = Mock(spec=TestClassBeginEvent)
-        event.test_class = Mock()
-        event2 = Mock(spec=TestClassEndEvent)
-        event2.test_class = Mock()
-
-        _update_test_class_context(event)
-        _update_test_class_context(event2)
-
-        self.assertIsInstance(get_current_context(), RootContext)
-        reset_context()
-
-    def test_update_test_case_context_for_test_case_begin(self):
-        event = Mock(spec=TestClassBeginEvent)
-        event.test_class = Mock()
-        event2 = Mock(spec=TestCaseBeginEvent)
-        event2.test_class = Mock()
-        event2.test_case = Mock()
-
-        _update_test_class_context(event)
-        _update_test_case_context(event2)
-
-        self.assertIsInstance(get_current_context(), TestCaseContext)
-        reset_context()
-
-    def test_update_test_case_context_for_test_case_end(self):
-        event = Mock(spec=TestClassBeginEvent)
-        event.test_class = Mock()
-        event2 = Mock(spec=TestCaseBeginEvent)
-        event2.test_class = Mock()
-        event2.test_case = Mock()
-        event3 = Mock(spec=TestCaseEndEvent)
-        event3.test_class = Mock()
-        event3.test_case = Mock()
-
-        _update_test_class_context(event)
-        _update_test_case_context(event2)
-        _update_test_case_context(event3)
-
-        self.assertIsInstance(get_current_context(), TestClassContext)
-        reset_context()
-
-
-class TestContextTest(TestCase):
-    """Unit tests for the TestContext class."""
-
-    @patch(LOGGING)
-    def test_get_base_output_path_uses_default(self, logging):
-        context = TestContext()
-
-        self.assertEqual(context.get_base_output_path(), logging.log_path)
-
-    @patch(LOGGING)
-    def test_add_base_path_overrides_default(self, _):
-        context = TestContext()
-        mock_path = Mock()
-
-        context.add_base_output_path('basepath', mock_path)
-
-        self.assertEqual(context.get_base_output_path('basepath'), mock_path)
-
-    def test_get_subcontext_returns_empty_string_by_default(self):
-        context = TestContext()
-
-        self.assertEqual(context.get_subcontext(), '')
-
-    def test_add_subcontext_sets_correct_path(self):
-        context = TestContext()
-        mock_path = Mock()
-
-        context.add_subcontext('subcontext', mock_path)
-
-        self.assertEqual(context.get_subcontext('subcontext'), mock_path)
-
-    @patch(LOGGING)
-    @patch('os.makedirs')
-    def test_get_full_output_path_returns_correct_path(self, *_):
-        context = TestClassContext(TestClass())
-        context.add_base_output_path('foo', 'base/path')
-        context.add_subcontext('foo', 'subcontext')
-
-        full_path = 'base/path/TestClass/subcontext'
-        self.assertEqual(context.get_full_output_path('foo'), full_path)
-
-    def test_identifier_not_implemented(self):
-        context = TestContext()
-
-        self.assertRaises(NotImplementedError, lambda: context.identifier)
-
-
-class TestClassContextTest(TestCase):
-    """Unit tests for the TestClassContext class."""
-
-    def test_init_attributes(self):
-        test_class = Mock()
-        context = TestClassContext(test_class)
-
-        self.assertEqual(context.test_class, test_class)
-
-    def test_get_class_name(self):
-        class TestClass:
-            pass
-        test_class = TestClass()
-        context = TestClassContext(test_class)
-
-        self.assertEqual(context.test_class_name, TestClass.__name__)
-
-    def test_context_dir_is_class_name(self):
-        class TestClass:
-            pass
-        test_class = TestClass()
-        context = TestClassContext(test_class)
-
-        self.assertEqual(context._get_default_context_dir(), TestClass.__name__)
-
-    def test_identifier_is_class_name(self):
-        class TestClass:
-            pass
-        test_class = TestClass()
-        context = TestClassContext(test_class)
-
-        self.assertEqual(context.identifier, TestClass.__name__)
-
-
-class TestCaseContextTest(TestCase):
-    """Unit tests for the TestCaseContext class."""
-
-    def test_init_attributes(self):
-        test_class = Mock()
-        test_case = TEST_CASE
-        context = TestCaseContext(test_class, test_case)
-
-        self.assertEqual(context.test_class, test_class)
-        self.assertEqual(context.test_case, test_case)
-        self.assertEqual(context.test_case_name, test_case)
-
-    def test_get_class_name(self):
-        test_class = TestClass()
-        context = TestCaseContext(test_class, TEST_CASE)
-
-        self.assertEqual(context.test_class_name, TestClass.__name__)
-
-    def test_context_dir_is_class_and_test_case_name(self):
-        test_class = TestClass()
-        context = TestCaseContext(test_class, TEST_CASE)
-
-        context_dir = TestClass.__name__ + '/' + TEST_CASE
-        self.assertEqual(context._get_default_context_dir(), context_dir)
-
-    def test_identifier_is_class_and_test_case_name(self):
-        test_class = TestClass()
-        context = TestCaseContext(test_class, TEST_CASE)
-
-        identifier = TestClass.__name__ + '.' + TEST_CASE
-        self.assertEqual(context.identifier, identifier)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_error_test.py b/src/antlion/unit_tests/acts_error_test.py
deleted file mode 100755
index 737000d..0000000
--- a/src/antlion/unit_tests/acts_error_test.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-
-from antlion import error
-
-
-class ActsErrorTest(unittest.TestCase):
-
-    def test_assert_key_pulled_from_acts_error_code(self):
-        e = error.ActsError()
-        self.assertEqual(e.error_code, 100)
-
-    def test_assert_description_pulled_from_docstring(self):
-        e = error.ActsError()
-        self.assertEqual(e.error_doc, 'Base Acts Error')
-
-    def test_error_without_args(self):
-        e = error.ActsError()
-        self.assertEqual(e.details, '')
-
-    def test_error_with_args(self):
-        args = ('hello', )
-        e = error.ActsError(*args)
-        self.assertEqual(e.details, 'hello')
-
-    def test_error_with_kwargs(self):
-        e = error.ActsError(key='value')
-        self.assertIn(('key', 'value'), e.extras.items())
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_host_utils_test.py b/src/antlion/unit_tests/acts_host_utils_test.py
deleted file mode 100755
index 17e389c..0000000
--- a/src/antlion/unit_tests/acts_host_utils_test.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import socket
-import unittest
-
-from antlion.controllers.utils_lib import host_utils
-
-
-class ActsHostUtilsTest(unittest.TestCase):
-    """This test class has unit tests for the implementation of everything
-    under antlion.controllers.adb.
-    """
-
-    def test_detects_udp_port_in_use(self):
-        test_s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-        test_s.bind(('localhost', 0))
-        port = test_s.getsockname()[1]
-        try:
-            self.assertFalse(host_utils.is_port_available(port))
-        finally:
-            test_s.close()
-
-    def test_detects_tcp_port_in_use(self):
-        test_s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        test_s.bind(('localhost', 0))
-        port = test_s.getsockname()[1]
-        try:
-            self.assertFalse(host_utils.is_port_available(port))
-        finally:
-            test_s.close()
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_import_unit_test.py b/src/antlion/unit_tests/acts_import_unit_test.py
deleted file mode 100755
index adbddde..0000000
--- a/src/antlion/unit_tests/acts_import_unit_test.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-import sys
-import uuid
-
-if sys.version_info < (3, ):
-    import warnings
-
-    with warnings.catch_warnings():
-        warnings.filterwarnings('ignore', category=PendingDeprecationWarning)
-        import imp
-
-    import importlib
-    import unittest2 as unittest
-
-    def import_module(name, path):
-        return imp.load_source(name, path)
-
-    def import_acts():
-        return importlib.import_module('antlion')
-else:
-    import importlib.machinery
-    import unittest
-
-    def import_module(name, path):
-        return importlib.machinery.SourceFileLoader(name, path).load_module()
-
-    def import_acts():
-        return importlib.import_module('antlion')
-
-
-PY_FILE_REGEX = re.compile('.+\.py$')
-
-DENYLIST = [
-    'antlion/controllers/rohdeschwarz_lib/contest.py',
-    'antlion/controllers/native.py',
-    'antlion/controllers/native_android_device.py',
-    'antlion/controllers/packet_sender.py',
-    'antlion/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py'
-]
-
-DENYLIST_DIRECTORIES = [
-    'antlion/controllers/buds_lib'
-]
-
-
-class ActsImportUnitTest(unittest.TestCase):
-    """Test that all acts framework imports work."""
-
-    def test_import_acts_successful(self):
-        """Test that importing ACTS works."""
-        acts = import_acts()
-        self.assertIsNotNone(acts)
-
-    # TODO(b/190659975): Re-enable once permission issue is resolved.
-    @unittest.skip("Permission error: b/190659975")
-    def test_import_framework_successful(self):
-        """Dynamically test all imports from the framework."""
-        acts = import_acts()
-        if hasattr(acts, '__path__') and len(antlion.__path__) > 0:
-            acts_path = antlion.__path__[0]
-        else:
-            acts_path = os.path.dirname(antlion.__file__)
-
-        for root, _, files in os.walk(acts_path):
-            for f in files:
-                full_path = os.path.join(root, f)
-                if (any(full_path.endswith(e) for e in DENYLIST)
-                        or any(e in full_path
-                               for e in DENYLIST_DIRECTORIES)):
-                    continue
-
-                path = os.path.relpath(os.path.join(root, f), os.getcwd())
-
-                if PY_FILE_REGEX.match(full_path):
-                    with self.subTest(msg='import %s' % path):
-                        fake_module_name = str(uuid.uuid4())
-                        module = import_module(fake_module_name, path)
-                        self.assertIsNotNone(module)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_job_test.py b/src/antlion/unit_tests/acts_job_test.py
deleted file mode 100755
index 12ff68f..0000000
--- a/src/antlion/unit_tests/acts_job_test.py
+++ /dev/null
@@ -1,121 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import os
-import sys
-import unittest
-
-from antlion.libs.proc import job
-
-if os.name == 'posix' and sys.version_info[0] < 3:
-    import subprocess32 as subprocess
-else:
-    import subprocess
-
-
-class FakePopen(object):
-    """A fake version of the object returned from subprocess.Popen()."""
-
-    def __init__(self,
-                 stdout=None,
-                 stderr=None,
-                 returncode=0,
-                 will_timeout=False):
-        self.returncode = returncode
-        self._stdout = bytes(stdout,
-                             'utf-8') if stdout is not None else bytes()
-        self._stderr = bytes(stderr,
-                             'utf-8') if stderr is not None else bytes()
-        self._will_timeout = will_timeout
-
-    def communicate(self, timeout=None):
-        if self._will_timeout:
-            raise subprocess.TimeoutExpired(
-                -1, 'Timed out according to test logic')
-        return self._stdout, self._stderr
-
-    def kill(self):
-        pass
-
-    def wait(self):
-        pass
-
-
-class JobTestCases(unittest.TestCase):
-    @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(stdout='TEST\n'))
-    def test_run_success(self, popen):
-        """Test running a simple shell command."""
-        result = job.run('echo TEST')
-        self.assertTrue(result.stdout.startswith('TEST'))
-
-    @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(stderr='TEST\n'))
-    def test_run_stderr(self, popen):
-        """Test that we can read process stderr."""
-        result = job.run('echo TEST 1>&2')
-        self.assertEqual(len(result.stdout), 0)
-        self.assertTrue(result.stderr.startswith('TEST'))
-        self.assertFalse(result.stdout)
-
-    @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(returncode=1))
-    def test_run_error(self, popen):
-        """Test that we raise on non-zero exit statuses."""
-        self.assertRaises(job.Error, job.run, 'exit 1')
-
-    @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(returncode=1))
-    def test_run_with_ignored_error(self, popen):
-        """Test that we can ignore exit status on request."""
-        result = job.run('exit 1', ignore_status=True)
-        self.assertEqual(result.exit_status, 1)
-
-    @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(will_timeout=True))
-    def test_run_timeout(self, popen):
-        """Test that we correctly implement command timeouts."""
-        self.assertRaises(job.Error, job.run, 'sleep 5', timeout=0.1)
-
-    @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(stdout='TEST\n'))
-    def test_run_no_shell(self, popen):
-        """Test that we handle running without a wrapping shell."""
-        result = job.run(['echo', 'TEST'])
-        self.assertTrue(result.stdout.startswith('TEST'))
-
-    @mock.patch(
-        'antlion.libs.proc.job.subprocess.Popen',
-        return_value=FakePopen(stdout='TEST\n'))
-    def test_job_env(self, popen):
-        """Test that we can set environment variables correctly."""
-        test_env = {'MYTESTVAR': '20'}
-        result = job.run('printenv', env=test_env.copy())
-        popen.assert_called_once()
-        _, kwargs = popen.call_args
-        self.assertTrue('env' in kwargs)
-        self.assertEqual(kwargs['env'], test_env)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_logger_test.py b/src/antlion/unit_tests/acts_logger_test.py
deleted file mode 100755
index 2b6eeed..0000000
--- a/src/antlion/unit_tests/acts_logger_test.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion import logger
-import os
-import time
-
-
-class ActsLoggerTest(unittest.TestCase):
-    """Verifies code in antlion.logger module.
-    """
-
-    def test_epoch_to_log_line_timestamp(self):
-        os.environ['TZ'] = 'US/Pacific'
-        time.tzset()
-        actual_stamp = logger.epoch_to_log_line_timestamp(1469134262116)
-        self.assertEqual("2016-07-21 13:51:02.116", actual_stamp)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_records_test.py b/src/antlion/unit_tests/acts_records_test.py
deleted file mode 100755
index 8280f27..0000000
--- a/src/antlion/unit_tests/acts_records_test.py
+++ /dev/null
@@ -1,230 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from mobly.records import ControllerInfoRecord
-
-from antlion import records
-from antlion import signals
-
-
-class ActsRecordsTest(unittest.TestCase):
-    """This test class tests the implementation of classes in antlion.records.
-    """
-
-    def setUp(self):
-        self.tn = "test_name"
-        self.details = "Some details about the test execution."
-        self.float_extra = 12345.56789
-        self.json_extra = {"ha": "whatever"}
-
-    def verify_record(self, record, result, details, extras):
-        # Verify each field.
-        self.assertEqual(record.test_name, self.tn)
-        self.assertEqual(record.result, result)
-        self.assertEqual(record.details, details)
-        self.assertEqual(record.extras, extras)
-        self.assertTrue(record.begin_time, "begin time should not be empty.")
-        self.assertTrue(record.end_time, "end time should not be empty.")
-        # UID is not used at the moment, should always be None.
-        self.assertIsNone(record.uid)
-        # Verify to_dict.
-        d = {}
-        d[records.TestResultEnums.RECORD_NAME] = self.tn
-        d[records.TestResultEnums.RECORD_RESULT] = result
-        d[records.TestResultEnums.RECORD_DETAILS] = details
-        d[records.TestResultEnums.RECORD_EXTRAS] = extras
-        d[records.TestResultEnums.RECORD_BEGIN_TIME] = record.begin_time
-        d[records.TestResultEnums.RECORD_END_TIME] = record.end_time
-        d[records.TestResultEnums.
-          RECORD_LOG_BEGIN_TIME] = record.log_begin_time
-        d[records.TestResultEnums.RECORD_LOG_END_TIME] = record.log_end_time
-        d[records.TestResultEnums.RECORD_UID] = None
-        d[records.TestResultEnums.RECORD_CLASS] = None
-        d[records.TestResultEnums.RECORD_EXTRA_ERRORS] = {}
-        d[records.TestResultEnums.RECORD_STACKTRACE] = record.stacktrace
-        actual_d = record.to_dict()
-        self.assertDictEqual(actual_d, d)
-        # Verify that these code paths do not cause crashes and yield non-empty
-        # results.
-        self.assertTrue(str(record), "str of the record should not be empty.")
-        self.assertTrue(repr(record), "the record's repr shouldn't be empty.")
-        self.assertTrue(record.json_str(), ("json str of the record should "
-                                            "not be empty."))
-
-    """ Begin of Tests """
-
-    def test_result_record_pass_none(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        record.test_pass()
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_PASS,
-            details=None,
-            extras=None)
-
-    def test_result_record_pass_with_float_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestPass(self.details, self.float_extra)
-        record.test_pass(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_PASS,
-            details=self.details,
-            extras=self.float_extra)
-
-    def test_result_record_pass_with_json_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestPass(self.details, self.json_extra)
-        record.test_pass(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_PASS,
-            details=self.details,
-            extras=self.json_extra)
-
-    def test_result_record_fail_none(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        record.test_fail()
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_FAIL,
-            details=None,
-            extras=None)
-
-    def test_result_record_fail_with_float_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestFailure(self.details, self.float_extra)
-        record.test_fail(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_FAIL,
-            details=self.details,
-            extras=self.float_extra)
-
-    def test_result_record_fail_with_json_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestFailure(self.details, self.json_extra)
-        record.test_fail(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_FAIL,
-            details=self.details,
-            extras=self.json_extra)
-
-    def test_result_record_skip_none(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        record.test_skip()
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_SKIP,
-            details=None,
-            extras=None)
-
-    def test_result_record_skip_with_float_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestSkip(self.details, self.float_extra)
-        record.test_skip(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_SKIP,
-            details=self.details,
-            extras=self.float_extra)
-
-    def test_result_record_skip_with_json_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestSkip(self.details, self.json_extra)
-        record.test_skip(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_SKIP,
-            details=self.details,
-            extras=self.json_extra)
-
-    def test_result_add_operator_success(self):
-        record1 = records.TestResultRecord(self.tn)
-        record1.test_begin()
-        s = signals.TestPass(self.details, self.float_extra)
-        record1.test_pass(s)
-        tr1 = records.TestResult()
-        tr1.add_record(record1)
-        device1 = ControllerInfoRecord('TestClass', 'MockDevice', 'device1')
-        tr1.add_controller_info_record(device1)
-        record2 = records.TestResultRecord(self.tn)
-        record2.test_begin()
-        s = signals.TestPass(self.details, self.json_extra)
-        record2.test_pass(s)
-        tr2 = records.TestResult()
-        tr2.add_record(record2)
-        device2 = ControllerInfoRecord('TestClass', 'MockDevice', 'device2')
-        tr2.add_controller_info_record(device2)
-        tr2 += tr1
-        self.assertTrue(tr2.passed, [tr1, tr2])
-        self.assertTrue(tr2.controller_info, [device1, device2])
-
-    def test_result_add_operator_type_mismatch(self):
-        record1 = records.TestResultRecord(self.tn)
-        record1.test_begin()
-        s = signals.TestPass(self.details, self.float_extra)
-        record1.test_pass(s)
-        tr1 = records.TestResult()
-        tr1.add_record(record1)
-        expected_msg = "Operand .* of type .* is not a TestResult."
-        with self.assertRaisesRegexp(TypeError, expected_msg):
-            tr1 += "haha"
-
-    def test_is_all_pass(self):
-        s = signals.TestPass(self.details, self.float_extra)
-        record1 = records.TestResultRecord(self.tn)
-        record1.test_begin()
-        record1.test_pass(s)
-        s = signals.TestSkip(self.details, self.float_extra)
-        record2 = records.TestResultRecord(self.tn)
-        record2.test_begin()
-        record2.test_skip(s)
-        tr = records.TestResult()
-        tr.add_record(record1)
-        tr.add_record(record2)
-        tr.add_record(record1)
-        self.assertEqual(len(tr.passed), 2)
-        self.assertTrue(tr.is_all_pass)
-
-    def test_is_all_pass_negative(self):
-        s = signals.TestFailure(self.details, self.float_extra)
-        record1 = records.TestResultRecord(self.tn)
-        record1.test_begin()
-        record1.test_fail(s)
-        record2 = records.TestResultRecord(self.tn)
-        record2.test_begin()
-        record2.test_error(s)
-        tr = records.TestResult()
-        tr.add_record(record1)
-        tr.add_record(record2)
-        self.assertFalse(tr.is_all_pass)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_relay_controller_test.py b/src/antlion/unit_tests/acts_relay_controller_test.py
deleted file mode 100755
index f1d066d..0000000
--- a/src/antlion/unit_tests/acts_relay_controller_test.py
+++ /dev/null
@@ -1,826 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-import shutil
-import tempfile
-import unittest
-
-import antlion.controllers.relay_lib.fugu_remote as fugu_remote
-from antlion.controllers.relay_lib.errors import RelayConfigError
-from antlion.controllers.relay_lib.errors import RelayDeviceConnectionError
-from antlion.controllers.relay_lib.generic_relay_device import GenericRelayDevice
-from antlion.controllers.relay_lib.relay import Relay
-from antlion.controllers.relay_lib.relay import RelayDict
-from antlion.controllers.relay_lib.relay import RelayState
-from antlion.controllers.relay_lib.relay import SynchronizeRelays
-from antlion.controllers.relay_lib.relay_board import RelayBoard
-from antlion.controllers.relay_lib.relay_device import RelayDevice
-from antlion.controllers.relay_lib.relay_rig import RelayRig
-from antlion.controllers.relay_lib.sain_smart_board import SainSmartBoard
-from mock import patch
-
-
-class MockBoard(RelayBoard):
-    def __init__(self, config):
-        self.relay_states = dict()
-        self.relay_previous_states = dict()
-        RelayBoard.__init__(self, config)
-
-    def get_relay_position_list(self):
-        return [0, 1]
-
-    def get_relay_status(self, relay_position):
-        if relay_position not in self.relay_states:
-            self.relay_states[relay_position] = RelayState.NO
-            self.relay_previous_states[relay_position] = RelayState.NO
-        return self.relay_states[relay_position]
-
-    def set(self, relay_position, state):
-        self.relay_previous_states[relay_position] = self.get_relay_status(
-            relay_position)
-        self.relay_states[relay_position] = state
-        return state
-
-
-class ActsRelayTest(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.config = {
-            'name': 'MockBoard',
-            'relays': [{
-                'name': 'Relay',
-                'relay_pos': 0
-            }]
-        }
-        self.board = MockBoard(self.config)
-        self.relay = Relay(self.board, 'Relay')
-        self.board.set(self.relay.position, RelayState.NO)
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_turn_on_from_off(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.relay.set_nc()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-
-    def test_turn_on_from_on(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.relay.set_nc()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-
-    def test_turn_off_from_on(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.relay.set_no()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-
-    def test_turn_off_from_off(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.relay.set_no()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-
-    def test_toggle_off_to_on(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.relay.toggle()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-
-    def test_toggle_on_to_off(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.relay.toggle()
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-
-    def test_set_on(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.relay.set(RelayState.NC)
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-
-    def test_set_off(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.relay.set(RelayState.NO)
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-
-    def test_set_foo(self):
-        with self.assertRaises(ValueError):
-            self.relay.set('FOO')
-
-    def test_set_nc_for(self):
-        # Here we set twice so relay_previous_state will also be OFF
-        self.board.set(self.relay.position, RelayState.NO)
-        self.board.set(self.relay.position, RelayState.NO)
-
-        self.relay.set_nc_for(0)
-
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NO)
-        self.assertEqual(self.board.relay_previous_states[self.relay.position],
-                         RelayState.NC)
-
-    def test_set_no_for(self):
-        # Here we set twice so relay_previous_state will also be OFF
-        self.board.set(self.relay.position, RelayState.NC)
-        self.board.set(self.relay.position, RelayState.NC)
-
-        self.relay.set_no_for(0)
-
-        self.assertEqual(self.board.get_relay_status(self.relay.position),
-                         RelayState.NC)
-        self.assertEqual(self.board.relay_previous_states[self.relay.position],
-                         RelayState.NO)
-
-    def test_get_status_on(self):
-        self.board.set(self.relay.position, RelayState.NC)
-        self.assertEqual(self.relay.get_status(), RelayState.NC)
-
-    def test_get_status_off(self):
-        self.board.set(self.relay.position, RelayState.NO)
-        self.assertEqual(self.relay.get_status(), RelayState.NO)
-
-    def test_clean_up_default_on(self):
-        new_relay = Relay(self.board, 0)
-        new_relay._original_state = RelayState.NO
-        self.board.set(new_relay.position, RelayState.NO)
-        new_relay.clean_up()
-
-        self.assertEqual(self.board.get_relay_status(new_relay.position),
-                         RelayState.NO)
-
-    def test_clean_up_default_off(self):
-        new_relay = Relay(self.board, 0)
-        new_relay._original_state = RelayState.NO
-        self.board.set(new_relay.position, RelayState.NC)
-        new_relay.clean_up()
-
-        self.assertEqual(self.board.get_relay_status(new_relay.position),
-                         RelayState.NO)
-
-    def test_clean_up_original_state_none(self):
-        val = 'STAYS_THE_SAME'
-        new_relay = Relay(self.board, 0)
-        # _original_state is none by default
-        # The line below sets the dict to an impossible value.
-        self.board.set(new_relay.position, val)
-        new_relay.clean_up()
-        # If the impossible value is cleared, then the test should fail.
-        self.assertEqual(self.board.get_relay_status(new_relay.position), val)
-
-
-class ActsSainSmartBoardTest(unittest.TestCase):
-    STATUS_MSG = ('<small><a href="{}"></a>'
-                  '</small><a href="{}/{}TUX">{}TUX</a><p>')
-
-    RELAY_ON_PAGE_CONTENTS = 'relay_on page'
-    RELAY_OFF_PAGE_CONTENTS = 'relay_off page'
-
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.test_dir = 'file://' + tempfile.mkdtemp() + '/'
-
-        # Creates the files used for testing
-        self._set_status_page('0000000000000000')
-        with open(self.test_dir[7:] + '00', 'w+') as file:
-            file.write(self.RELAY_OFF_PAGE_CONTENTS)
-        with open(self.test_dir[7:] + '01', 'w+') as file:
-            file.write(self.RELAY_ON_PAGE_CONTENTS)
-
-        self.config = ({
-            'name':
-            'SSBoard',
-            'base_url':
-            self.test_dir,
-            'relays': [{
-                'name': '0',
-                'relay_pos': 0
-            }, {
-                'name': '1',
-                'relay_pos': 1
-            }, {
-                'name': '2',
-                'relay_pos': 7
-            }]
-        })
-        self.ss_board = SainSmartBoard(self.config)
-        self.r0 = Relay(self.ss_board, 0)
-        self.r1 = Relay(self.ss_board, 1)
-        self.r7 = Relay(self.ss_board, 7)
-
-    def tearDown(self):
-        shutil.rmtree(self.test_dir[7:])
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_get_url_code(self):
-        result = self.ss_board._get_relay_url_code(self.r0.position,
-                                                   RelayState.NO)
-        self.assertEqual(result, '00')
-
-        result = self.ss_board._get_relay_url_code(self.r0.position,
-                                                   RelayState.NC)
-        self.assertEqual(result, '01')
-
-        result = self.ss_board._get_relay_url_code(self.r7.position,
-                                                   RelayState.NO)
-        self.assertEqual(result, '14')
-
-        result = self.ss_board._get_relay_url_code(self.r7.position,
-                                                   RelayState.NC)
-        self.assertEqual(result, '15')
-
-    def test_load_page_status(self):
-        self._set_status_page('0000111100001111')
-        result = self.ss_board._load_page(SainSmartBoard.HIDDEN_STATUS_PAGE)
-        self.assertTrue(
-            result.endswith(
-                '0000111100001111TUX">0000111100001111TUX</a><p>'))
-
-    def test_load_page_relay(self):
-        result = self.ss_board._load_page('00')
-        self.assertEqual(result, self.RELAY_OFF_PAGE_CONTENTS)
-
-        result = self.ss_board._load_page('01')
-        self.assertEqual(result, self.RELAY_ON_PAGE_CONTENTS)
-
-    def test_load_page_no_connection(self):
-        with self.assertRaises(RelayDeviceConnectionError):
-            self.ss_board._load_page('**')
-
-    def _set_status_page(self, status_16_chars):
-        with open(self.test_dir[7:] + '99', 'w+') as status_file:
-            status_file.write(
-                self.STATUS_MSG.format(self.test_dir[:-1], self.test_dir[:-1],
-                                       status_16_chars, status_16_chars))
-
-    def _test_sync_status_dict(self, status_16_chars):
-        self._set_status_page(status_16_chars)
-        expected_dict = dict()
-
-        for index, char in enumerate(status_16_chars):
-            expected_dict[
-                index] = RelayState.NC if char == '1' else RelayState.NO
-
-        self.ss_board._sync_status_dict()
-        self.assertDictEqual(expected_dict, self.ss_board.status_dict)
-
-    def test_sync_status_dict(self):
-        self._test_sync_status_dict('0000111100001111')
-        self._test_sync_status_dict('0000000000000000')
-        self._test_sync_status_dict('0101010101010101')
-        self._test_sync_status_dict('1010101010101010')
-        self._test_sync_status_dict('1111111111111111')
-
-    def test_get_relay_status_status_dict_none(self):
-        self._set_status_page('1111111111111111')
-        self.ss_board.status_dict = None
-        self.assertEqual(self.ss_board.get_relay_status(self.r0.position),
-                         RelayState.NC)
-
-    def test_get_relay_status_status_dict_on(self):
-        self.r0.set(RelayState.NC)
-        self.assertEqual(self.ss_board.get_relay_status(self.r0.position),
-                         RelayState.NC)
-
-    def test_get_relay_status_status_dict_off(self):
-        self.r0.set(RelayState.NO)
-        self.assertEqual(self.ss_board.get_relay_status(self.r0.position),
-                         RelayState.NO)
-
-    def test_set_on(self):
-        patch_path = 'antlion.controllers.relay_lib.sain_smart_board.urlopen'
-        with patch(patch_path) as urlopen:
-            board = SainSmartBoard(self.config)
-            board.status_dict = {}
-            board.set(self.r0.position, RelayState.NC)
-        urlopen.assert_called_once_with('%s%s' %
-                                        (self.ss_board.base_url, '01'))
-
-    def test_set_off(self):
-        patch_path = 'antlion.controllers.relay_lib.sain_smart_board.urlopen'
-        with patch(patch_path) as urlopen:
-            board = SainSmartBoard(self.config)
-            board.status_dict = {}
-            board.set(self.r0.position, RelayState.NO)
-        urlopen.assert_called_once_with('%s%s' %
-                                        (self.ss_board.base_url, '00'))
-
-    def test_connection_error_no_tux(self):
-        default_status_msg = self.STATUS_MSG
-        self.STATUS_MSG = self.STATUS_MSG.replace('TUX', '')
-        try:
-            self._set_status_page('1111111111111111')
-            self.ss_board.get_relay_status(0)
-        except RelayDeviceConnectionError:
-            self.STATUS_MSG = default_status_msg
-            return
-
-        self.fail('Should have thrown an error without TUX appearing.')
-
-
-class ActsRelayRigTest(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.config = {
-            'boards': [{
-                'type': 'SainSmartBoard',
-                'name': 'ss_control',
-                'base_url': 'http://192.168.1.4/30000/'
-            }, {
-                'type': 'SainSmartBoard',
-                'name': 'ss_control_2',
-                'base_url': 'http://192.168.1.4/30000/'
-            }],
-            'devices': [{
-                'type': 'GenericRelayDevice',
-                'name': 'device',
-                'relays': {
-                    'Relay00': 'ss_control/0',
-                    'Relay10': 'ss_control/1'
-                }
-            }]
-        }
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_init_relay_rig_missing_boards(self):
-        flawed_config = copy.deepcopy(self.config)
-        del flawed_config['boards']
-        with self.assertRaises(RelayConfigError):
-            RelayRig(flawed_config)
-
-    def test_init_relay_rig_is_not_list(self):
-        flawed_config = copy.deepcopy(self.config)
-        flawed_config['boards'] = self.config['boards'][0]
-        with self.assertRaises(RelayConfigError):
-            RelayRig(flawed_config)
-
-    def test_init_relay_rig_duplicate_board_names(self):
-        flawed_config = copy.deepcopy(self.config)
-        flawed_config['boards'][1]['name'] = (self.config['boards'][0]['name'])
-        with self.assertRaises(RelayConfigError):
-            RelayRigMock(flawed_config)
-
-    def test_init_relay_rig_device_gets_relays(self):
-        modded_config = copy.deepcopy(self.config)
-        del modded_config['devices'][0]['relays']['Relay00']
-        rig = RelayRigMock(modded_config)
-        self.assertEqual(len(rig.relays), 4)
-        self.assertEqual(len(rig.devices['device'].relays), 1)
-
-        rig = RelayRigMock(self.config)
-        self.assertEqual(len(rig.devices['device'].relays), 2)
-
-    def test_init_relay_rig_correct_device_type(self):
-        rig = RelayRigMock(self.config)
-        self.assertEqual(len(rig.devices), 1)
-        self.assertIsInstance(rig.devices['device'], GenericRelayDevice)
-
-    def test_init_relay_rig_missing_devices_creates_generic_device(self):
-        modded_config = copy.deepcopy(self.config)
-        del modded_config['devices']
-        rig = RelayRigMock(modded_config)
-        self.assertEqual(len(rig.devices), 1)
-        self.assertIsInstance(rig.devices['device'], GenericRelayDevice)
-        self.assertDictEqual(rig.devices['device'].relays, rig.relays)
-
-
-class RelayRigMock(RelayRig):
-    """A RelayRig that substitutes the MockBoard for any board."""
-
-    _board_constructors = {
-        'SainSmartBoard': lambda x: MockBoard(x),
-        'FuguMockBoard': lambda x: FuguMockBoard(x)
-    }
-
-    def __init__(self, config=None):
-        if not config:
-            config = {
-                "boards": [{
-                    'name': 'MockBoard',
-                    'type': 'SainSmartBoard'
-                }]
-            }
-
-        RelayRig.__init__(self, config)
-
-
-class ActsGenericRelayDeviceTest(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.board_config = {'name': 'MockBoard', 'type': 'SainSmartBoard'}
-
-        self.board = MockBoard(self.board_config)
-        self.r0 = self.board.relays[0]
-        self.r1 = self.board.relays[1]
-
-        self.device_config = {
-            'name': 'MockDevice',
-            'relays': {
-                'r0': 'MockBoard/0',
-                'r1': 'MockBoard/1'
-            }
-        }
-        config = {
-            'boards': [self.board_config],
-            'devices': [self.device_config]
-        }
-        self.rig = RelayRigMock(config)
-        self.rig.boards['MockBoard'] = self.board
-        self.rig.relays[self.r0.relay_id] = self.r0
-        self.rig.relays[self.r1.relay_id] = self.r1
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_setup_single_relay(self):
-        self.r0.set(RelayState.NC)
-        self.r1.set(RelayState.NC)
-
-        modified_config = copy.deepcopy(self.device_config)
-        del modified_config['relays']['r1']
-
-        grd = GenericRelayDevice(modified_config, self.rig)
-        grd.setup()
-
-        self.assertEqual(self.r0.get_status(), RelayState.NO)
-        self.assertEqual(self.r1.get_status(), RelayState.NC)
-
-    def test_setup_multiple_relays(self):
-        self.board.set(self.r0.position, RelayState.NC)
-        self.board.set(self.r1.position, RelayState.NC)
-
-        grd = GenericRelayDevice(self.device_config, self.rig)
-        grd.setup()
-
-        self.assertEqual(self.r0.get_status(), RelayState.NO)
-        self.assertEqual(self.r1.get_status(), RelayState.NO)
-
-    def test_cleanup_single_relay(self):
-        self.test_setup_single_relay()
-
-    def test_cleanup_multiple_relays(self):
-        self.test_setup_multiple_relays()
-
-    def change_state(self, begin_state, call, end_state, previous_state=None):
-        self.board.set(self.r0.position, begin_state)
-        grd = GenericRelayDevice(self.device_config, self.rig)
-        call(grd)
-        self.assertEqual(self.r0.get_status(), end_state)
-        if previous_state:
-            self.assertEqual(
-                self.board.relay_previous_states[self.r0.position],
-                previous_state)
-
-    def test_press_while_no(self):
-        self.change_state(RelayState.NO, lambda x: x.press('r0'),
-                          RelayState.NO, RelayState.NC)
-
-    def test_press_while_nc(self):
-        self.change_state(RelayState.NC, lambda x: x.press('r0'),
-                          RelayState.NO, RelayState.NC)
-
-    def test_hold_down_while_no(self):
-        self.change_state(RelayState.NO, lambda x: x.hold_down('r0'),
-                          RelayState.NC)
-
-    def test_hold_down_while_nc(self):
-        self.change_state(RelayState.NC, lambda x: x.hold_down('r0'),
-                          RelayState.NC)
-
-    def test_release_while_nc(self):
-        self.change_state(RelayState.NC, lambda x: x.release('r0'),
-                          RelayState.NO)
-
-
-class ActsRelayDeviceTest(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-
-        self.board_config = {
-            'name': 'MockBoard',
-            'relays': [{
-                'id': 0,
-                'relay_pos': 0
-            }, {
-                'id': 1,
-                'relay_pos': 1
-            }]
-        }
-
-        self.board = MockBoard(self.board_config)
-        self.r0 = Relay(self.board, 0)
-        self.r1 = Relay(self.board, 1)
-        self.board.set(self.r0.position, RelayState.NO)
-        self.board.set(self.r1.position, RelayState.NO)
-
-        self.rig = RelayRigMock()
-        self.rig.boards['MockBoard'] = self.board
-        self.rig.relays[self.r0.relay_id] = self.r0
-        self.rig.relays[self.r1.relay_id] = self.r1
-
-        self.device_config = {
-            "type": "GenericRelayDevice",
-            "name": "device",
-            "relays": {
-                'r0': 'MockBoard/0',
-                'r1': 'MockBoard/1'
-            }
-        }
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_init_raise_on_name_missing(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        del flawed_config['name']
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_name_wrong_type(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['name'] = {}
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_missing(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        del flawed_config['relays']
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_wrong_type(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['relays'] = str
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_is_empty(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['relays'] = []
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_are_dicts_without_names(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['relays'] = [{'id': 0}, {'id': 1}]
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_raise_on_relays_are_dicts_without_ids(self):
-        flawed_config = copy.deepcopy(self.device_config)
-        flawed_config['relays'] = [{'name': 'r0'}, {'name': 'r1'}]
-        with self.assertRaises(RelayConfigError):
-            RelayDevice(flawed_config, self.rig)
-
-    def test_init_pass_relays_have_ids_and_names(self):
-        RelayDevice(self.device_config, self.rig)
-
-
-class TestRelayRigParser(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        Relay.button_press_time = 0
-        self.board_config = {
-            'name': 'MockBoard',
-            'relays': [{
-                'id': 'r0',
-                'relay_pos': 0
-            }, {
-                'id': 'r1',
-                'relay_pos': 1
-            }]
-        }
-        self.r0 = self.board_config['relays'][0]
-        self.r1 = self.board_config['relays'][1]
-        self.board = MockBoard(self.board_config)
-
-    def tearDown(self):
-        Relay.transition_wait_time = .2
-        Relay.button_press_time = .25
-
-    def test_create_relay_board_raise_on_missing_type(self):
-        with self.assertRaises(RelayConfigError):
-            RelayRigMock().create_relay_board(self.board_config)
-
-    def test_create_relay_board_valid_config(self):
-        config = copy.deepcopy(self.board_config)
-        config['type'] = 'SainSmartBoard'
-        RelayRigMock().create_relay_board(config)
-
-    def test_create_relay_board_raise_on_type_not_found(self):
-        flawed_config = copy.deepcopy(self.board_config)
-        flawed_config['type'] = 'NonExistentBoard'
-        with self.assertRaises(RelayConfigError):
-            RelayRigMock().create_relay_board(flawed_config)
-
-    def test_create_relay_device_create_generic_on_missing_type(self):
-        rig = RelayRigMock()
-        rig.relays['r0'] = self.r0
-        rig.relays['r1'] = self.r1
-        config = {
-            'name': 'name',
-            'relays': {
-                'r0': 'MockBoard/0',
-                'r1': 'MockBoard/1'
-            }
-        }
-        device = rig.create_relay_device(config)
-        self.assertIsInstance(device, GenericRelayDevice)
-
-    def test_create_relay_device_config_with_type(self):
-        rig = RelayRigMock()
-        rig.relays['r0'] = self.r0
-        rig.relays['r1'] = self.r1
-        config = {
-            'type': 'GenericRelayDevice',
-            'name': '.',
-            'relays': {
-                'r0': 'MockBoard/0',
-                'r1': 'MockBoard/1'
-            }
-        }
-        device = rig.create_relay_device(config)
-        self.assertIsInstance(device, GenericRelayDevice)
-
-    def test_create_relay_device_raise_on_type_not_found(self):
-        rig = RelayRigMock()
-        rig.relays['r0'] = self.r0
-        rig.relays['r1'] = self.r1
-        config = {
-            'type':
-            'SomeInvalidType',
-            'name':
-            '.',
-            'relays': [{
-                'name': 'r0',
-                'pos': 'MockBoard/0'
-            }, {
-                'name': 'r1',
-                'pos': 'MockBoard/1'
-            }]
-        }
-        with self.assertRaises(RelayConfigError):
-            rig.create_relay_device(config)
-
-
-class TestSynchronizeRelays(unittest.TestCase):
-    def test_synchronize_relays(self):
-        Relay.transition_wait_time = .1
-        with SynchronizeRelays():
-            self.assertEqual(Relay.transition_wait_time, 0)
-        self.assertEqual(Relay.transition_wait_time, .1)
-
-
-class FuguMockBoard(MockBoard):
-    def get_relay_position_list(self):
-        return range(4)
-
-
-class TestFuguRemote(unittest.TestCase):
-    def setUp(self):
-        Relay.transition_wait_time = 0
-        self.mock_rig = RelayRigMock(
-            {"boards": [{
-                'name': 'MockBoard',
-                'type': 'FuguMockBoard'
-            }]})
-        self.mock_board = self.mock_rig.boards['MockBoard']
-        self.fugu_config = {
-            'type': 'FuguRemote',
-            'name': 'UniqueDeviceName',
-            'mac_address': '00:00:00:00:00:00',
-            'relays': {
-                'Power': 'MockBoard/0',
-                fugu_remote.Buttons.BACK.value: 'MockBoard/1',
-                fugu_remote.Buttons.HOME.value: 'MockBoard/2',
-                fugu_remote.Buttons.PLAY_PAUSE.value: 'MockBoard/3'
-            }
-        }
-        Relay.button_press_time = 0
-
-    def tearDown(self):
-        Relay.button_press_time = .25
-        Relay.transition_wait_time = .2
-
-    def test_config_missing_button(self):
-        """FuguRemote __init__ should throw an error if a relay is missing."""
-        flawed_config = copy.deepcopy(self.fugu_config)
-        del flawed_config['relays']['Power']
-        del flawed_config['relays'][fugu_remote.Buttons.BACK.value]
-        with self.assertRaises(RelayConfigError):
-            fugu_remote.FuguRemote(flawed_config, self.mock_rig)
-
-    def test_config_missing_mac_address(self):
-        """FuguRemote __init__ should throw an error without a mac address."""
-        flawed_config = copy.deepcopy(self.fugu_config)
-        del flawed_config['mac_address']
-        with self.assertRaises(RelayConfigError):
-            fugu_remote.FuguRemote(flawed_config, self.mock_rig)
-
-    def test_config_no_issues(self):
-        """FuguRemote __init__ should not throw errors for a correct config."""
-        fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-
-    def test_power_nc_after_setup(self):
-        """Power should be NORMALLY_CLOSED after calling setup if it exists."""
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu.setup()
-        self.assertEqual(self.mock_board.get_relay_status(0), RelayState.NC)
-
-    def press_button_success(self, relay_position):
-        self.assertEqual(self.mock_board.relay_states[relay_position],
-                         RelayState.NO)
-        self.assertEqual(self.mock_board.relay_previous_states[relay_position],
-                         RelayState.NC)
-
-    def test_press_play_pause(self):
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu.press_play_pause()
-        self.press_button_success(3)
-
-    def test_press_back(self):
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu.press_back()
-        self.press_button_success(1)
-
-    def test_press_home(self):
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu.press_home()
-        self.press_button_success(2)
-
-    def test_enter_pairing_mode(self):
-        fugu = fugu_remote.FuguRemote(self.fugu_config, self.mock_rig)
-        fugu_remote.PAIRING_MODE_WAIT_TIME = 0
-        fugu.enter_pairing_mode()
-        self.press_button_success(2)
-        self.press_button_success(1)
-
-
-class TestRelayDict(unittest.TestCase):
-    def test_init(self):
-        mock_device = object()
-        blank_dict = dict()
-        relay_dict = RelayDict(mock_device, blank_dict)
-        self.assertEqual(relay_dict._store, blank_dict)
-        self.assertEqual(relay_dict.relay_device, mock_device)
-
-    def test_get_item_valid_key(self):
-        mock_device = object()
-        blank_dict = {'key': 'value'}
-        relay_dict = RelayDict(mock_device, blank_dict)
-        self.assertEqual(relay_dict['key'], 'value')
-
-    def test_get_item_invalid_key(self):
-        # Create an object with a single attribute 'name'
-        mock_device = type('', (object, ), {'name': 'name'})()
-        blank_dict = {'key': 'value'}
-        relay_dict = RelayDict(mock_device, blank_dict)
-        with self.assertRaises(RelayConfigError):
-            value = relay_dict['not_key']
-
-    def test_iter(self):
-        mock_device = type('', (object, ), {'name': 'name'})()
-        data_dict = {'a': '1', 'b': '2', 'c': '3'}
-        relay_dict = RelayDict(mock_device, data_dict)
-
-        rd_set = set()
-        for key in relay_dict:
-            rd_set.add(key)
-        dd_set = set()
-        for key in data_dict:
-            dd_set.add(key)
-
-        self.assertSetEqual(rd_set, dd_set)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_test_decorators_test.py b/src/antlion/unit_tests/acts_test_decorators_test.py
deleted file mode 100755
index d7bc12d..0000000
--- a/src/antlion/unit_tests/acts_test_decorators_test.py
+++ /dev/null
@@ -1,184 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import shutil
-import tempfile
-import unittest
-import mock
-
-from mobly import config_parser as mobly_config_parser
-
-from antlion import base_test
-from antlion import signals
-from antlion import test_decorators
-from antlion import test_runner
-from antlion.controllers.sl4a_lib import rpc_client
-
-
-def return_true():
-    return True
-
-
-def return_false():
-    return False
-
-
-def raise_pass():
-    raise signals.TestPass('')
-
-
-def raise_failure():
-    raise signals.TestFailure('')
-
-
-def raise_sl4a():
-    raise rpc_client.Sl4aException('')
-
-
-def raise_generic():
-    raise Exception('')
-
-
-class MockTest(base_test.BaseTestClass):
-    TEST_CASE_LIST = 'test_run_mock_test'
-    TEST_LOGIC_ATTR = 'test_logic'
-
-    def test_run_mock_test(self):
-        getattr(MockTest, MockTest.TEST_LOGIC_ATTR, None)()
-
-
-class TestDecoratorIntegrationTests(unittest.TestCase):
-    @classmethod
-    def setUpClass(cls):
-        cls.tmp_dir = tempfile.mkdtemp()
-        cls.MOCK_CONFIG = mobly_config_parser.TestRunConfig()
-        cls.MOCK_CONFIG.testbed_name = 'SampleTestBed'
-        cls.MOCK_CONFIG.log_path = cls.tmp_dir
-
-        cls.MOCK_TEST_RUN_LIST = [(MockTest.__name__,
-                                   [MockTest.TEST_CASE_LIST])]
-
-    @classmethod
-    def tearDownClass(cls):
-        shutil.rmtree(cls.tmp_dir)
-
-    def _run_with_test_logic(self, func):
-        if hasattr(MockTest, MockTest.TEST_LOGIC_ATTR):
-            delattr(MockTest, MockTest.TEST_LOGIC_ATTR)
-        setattr(MockTest, MockTest.TEST_LOGIC_ATTR, func)
-        self.test_runner = test_runner.TestRunner(self.MOCK_CONFIG,
-                                                  self.MOCK_TEST_RUN_LIST)
-        self.test_runner.run(MockTest)
-
-    def _validate_results_has_extra(self, result, extra_key, extra_value):
-        results = self.test_runner.results
-        self.assertGreaterEqual(len(results.executed), 1,
-                                'Expected at least one executed test.')
-        record = results.executed[0]
-        self.assertIsNotNone(record.extras,
-                             'Expected the test record to have extras.')
-        self.assertEqual(record.extras[extra_key], extra_value)
-
-    def test_mock_test_with_raise_pass(self):
-        self._run_with_test_logic(raise_pass)
-
-    def test_mock_test_with_raise_generic(self):
-        self._run_with_test_logic(raise_generic)
-
-
-class RepeatedTestTests(unittest.TestCase):
-    def test_all_error_types_count_toward_failures(self):
-        def result_selector(results, _):
-            self.assertIsInstance(results[0], AssertionError)
-            self.assertIsInstance(results[1], signals.TestFailure)
-            self.assertIsInstance(results[2], signals.TestError)
-            self.assertIsInstance(results[3], IndexError)
-            raise signals.TestPass('Expected failures occurred')
-
-        @test_decorators.repeated_test(1, 3, result_selector)
-        def test_case(_, attempt_number):
-            if attempt_number == 1:
-                raise AssertionError()
-            elif attempt_number == 2:
-                raise signals.TestFailure('Failed')
-            elif attempt_number == 3:
-                raise signals.TestError('Error')
-            else:
-                # Note that any Exception that does not fall into another bucket
-                # is also considered a failure
-                raise IndexError('Bad index')
-
-        with self.assertRaises(signals.TestPass):
-            test_case(mock.Mock())
-
-    def test_passes_stop_repeating_the_test_case(self):
-        def result_selector(results, _):
-            self.assertEqual(len(results), 3)
-            for result in results:
-                self.assertIsInstance(result, signals.TestPass)
-            raise signals.TestPass('Expected passes occurred')
-
-        @test_decorators.repeated_test(3, 0, result_selector)
-        def test_case(*_):
-            raise signals.TestPass('Passed')
-
-        with self.assertRaises(signals.TestPass):
-            test_case(mock.Mock())
-
-    def test_abort_signals_are_uncaught(self):
-        @test_decorators.repeated_test(3, 0)
-        def test_case(*_):
-            raise signals.TestAbortClass('Abort All')
-
-        with self.assertRaises(signals.TestAbortClass):
-            test_case(mock.Mock())
-
-    def test_keyboard_interrupt_is_uncaught(self):
-        @test_decorators.repeated_test(3, 0)
-        def test_case(*_):
-            raise KeyboardInterrupt()
-
-        with self.assertRaises(KeyboardInterrupt):
-            test_case(mock.Mock())
-
-    def test_teardown_and_setup_are_called_between_test_cases(self):
-        mock_test_class = mock.Mock()
-
-        @test_decorators.repeated_test(1, 1)
-        def test_case(*_):
-            raise signals.TestFailure('Failed')
-
-        with self.assertRaises(signals.TestFailure):
-            test_case(mock_test_class)
-
-        self.assertTrue(mock_test_class.setup_test.called)
-        self.assertTrue(mock_test_class.teardown_test.called)
-
-    def test_result_selector_returned_value_gets_raised(self):
-        def result_selector(*_):
-            return signals.TestPass('Expect this to be raised.')
-
-        @test_decorators.repeated_test(3, 0, result_selector=result_selector)
-        def test_case(*_):
-            raise signals.TestFailure('Result selector ignores this.')
-
-        with self.assertRaises(signals.TestPass):
-            test_case(mock.Mock())
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_test_runner_test.py b/src/antlion/unit_tests/acts_test_runner_test.py
deleted file mode 100755
index c9132ad..0000000
--- a/src/antlion/unit_tests/acts_test_runner_test.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import os
-import shutil
-import tempfile
-import unittest
-
-from mobly.config_parser import TestRunConfig
-
-from antlion import keys
-from antlion import test_runner
-
-import acts_android_device_test
-import mock_controller
-import IntegrationTest
-
-
-class ActsTestRunnerTest(unittest.TestCase):
-    """This test class has unit tests for the implementation of everything
-    under antlion.test_runner.
-    """
-    def setUp(self):
-        self.tmp_dir = tempfile.mkdtemp()
-        self.base_mock_test_config = TestRunConfig()
-        self.base_mock_test_config.testbed_name = 'SampleTestBed'
-        self.base_mock_test_config.log_path = self.tmp_dir
-        self.base_mock_test_config.controller_configs = {
-            'testpaths': [os.path.dirname(IntegrationTest.__file__)]
-        }
-        self.base_mock_test_config.user_params = {
-            'icecream': 42,
-            'extra_param': 'haha'
-        }
-        self.mock_run_list = [('SampleTest', None)]
-
-    def tearDown(self):
-        shutil.rmtree(self.tmp_dir)
-
-    def test_run_twice(self):
-        """Verifies that:
-        1. Repeated run works properly.
-        2. The original configuration is not altered if a test controller
-           module modifies configuration.
-        """
-        mock_test_config = self.base_mock_test_config.copy()
-        tb_key = keys.Config.key_testbed.value
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        my_config = [{
-            'serial': 'xxxx',
-            'magic': 'Magic1'
-        }, {
-            'serial': 'xxxx',
-            'magic': 'Magic2'
-        }]
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = my_config
-        tr = test_runner.TestRunner(mock_test_config,
-                                    [('IntegrationTest', None)])
-        tr.run()
-        tr.run()
-        tr.stop()
-        results = tr.results.summary_dict()
-        self.assertEqual(results['Requested'], 2)
-        self.assertEqual(results['Executed'], 2)
-        self.assertEqual(results['Passed'], 2)
-
-    @mock.patch('antlion.controllers.adb.AdbProxy',
-                return_value=acts_android_device_test.MockAdbProxy(
-                    1, return_value=''))
-    @mock.patch('antlion.controllers.fastboot.FastbootProxy',
-                return_value=acts_android_device_test.MockFastbootProxy(1))
-    @mock.patch('antlion.controllers.android_device.list_adb_devices',
-                return_value=['1'])
-    @mock.patch('antlion.controllers.android_device.get_all_instances',
-                return_value=acts_android_device_test.get_mock_ads(1))
-    @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.ensure_screen_on',
-        return_value=True)
-    @mock.patch(
-        'antlion.controllers.android_device.AndroidDevice.exit_setup_wizard',
-        return_value=True)
-    @mock.patch('antlion.controllers.android_device.AndroidDevice.start_services')
-    def test_run_two_test_classes(self, *_):
-        """Verifies that running more than one test class in one test run works
-        properly.
-
-        This requires using a built-in controller module. Using AndroidDevice
-        module since it has all the mocks needed already.
-        """
-        mock_test_config = self.base_mock_test_config.copy()
-        tb_key = keys.Config.key_testbed.value
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        my_config = [{
-            'serial': 'xxxx',
-            'magic': 'Magic1'
-        }, {
-            'serial': 'xxxx',
-            'magic': 'Magic2'
-        }]
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = my_config
-        mock_test_config.controller_configs['AndroidDevice'] = [{
-            'serial':
-            '1',
-            'skip_sl4a':
-            True
-        }]
-        tr = test_runner.TestRunner(mock_test_config,
-                                    [('IntegrationTest', None),
-                                     ('IntegrationTest', None)])
-        tr.run()
-        tr.stop()
-        results = tr.results.summary_dict()
-        self.assertEqual(results['Requested'], 2)
-        self.assertEqual(results['Executed'], 2)
-        self.assertEqual(results['Passed'], 2)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_utils_test.py b/src/antlion/unit_tests/acts_utils_test.py
deleted file mode 100755
index 1c8b21f..0000000
--- a/src/antlion/unit_tests/acts_utils_test.py
+++ /dev/null
@@ -1,584 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import subprocess
-import time
-import unittest
-
-import mock
-
-from antlion import utils
-from antlion import signals
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import SSHConfig, SSHProvider, SSHResult
-from antlion.controllers.utils_lib.ssh.connection import SshConnection
-from antlion.libs.proc import job
-
-PROVISIONED_STATE_GOOD = 1
-
-MOCK_ENO1_IP_ADDRESSES = """100.127.110.79
-2401:fa00:480:7a00:8d4f:85ff:cc5c:787e
-2401:fa00:480:7a00:459:b993:fcbf:1419
-fe80::c66d:3c75:2cec:1d72"""
-
-MOCK_WLAN1_IP_ADDRESSES = ""
-
-FUCHSIA_INTERFACES = {
-    'id':
-    '1',
-    'result': [
-        {
-            'id': 1,
-            'name': 'lo',
-            'ipv4_addresses': [
-                [127, 0, 0, 1],
-            ],
-            'ipv6_addresses': [
-                [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
-            ],
-            'online': True,
-            'mac': [0, 0, 0, 0, 0, 0],
-        },
-        {
-            'id':
-            2,
-            'name':
-            'eno1',
-            'ipv4_addresses': [
-                [100, 127, 110, 79],
-            ],
-            'ipv6_addresses': [
-                [
-                    254, 128, 0, 0, 0, 0, 0, 0, 198, 109, 60, 117, 44, 236, 29,
-                    114
-                ],
-                [
-                    36, 1, 250, 0, 4, 128, 122, 0, 141, 79, 133, 255, 204, 92,
-                    120, 126
-                ],
-                [
-                    36, 1, 250, 0, 4, 128, 122, 0, 4, 89, 185, 147, 252, 191,
-                    20, 25
-                ],
-            ],
-            'online':
-            True,
-            'mac': [0, 224, 76, 5, 76, 229],
-        },
-        {
-            'id':
-            3,
-            'name':
-            'wlanxc0',
-            'ipv4_addresses': [],
-            'ipv6_addresses': [
-                [
-                    254, 128, 0, 0, 0, 0, 0, 0, 96, 255, 93, 96, 52, 253, 253,
-                    243
-                ],
-                [
-                    254, 128, 0, 0, 0, 0, 0, 0, 70, 7, 11, 255, 254, 118, 126,
-                    192
-                ],
-            ],
-            'online':
-            False,
-            'mac': [68, 7, 11, 118, 126, 192],
-        },
-    ],
-    'error':
-    None,
-}
-
-CORRECT_FULL_IP_LIST = {
-    'ipv4_private': [],
-    'ipv4_public': ['100.127.110.79'],
-    'ipv6_link_local': ['fe80::c66d:3c75:2cec:1d72'],
-    'ipv6_private_local': [],
-    'ipv6_public': [
-        '2401:fa00:480:7a00:8d4f:85ff:cc5c:787e',
-        '2401:fa00:480:7a00:459:b993:fcbf:1419'
-    ]
-}
-
-CORRECT_EMPTY_IP_LIST = {
-    'ipv4_private': [],
-    'ipv4_public': [],
-    'ipv6_link_local': [],
-    'ipv6_private_local': [],
-    'ipv6_public': []
-}
-
-
-class ByPassSetupWizardTests(unittest.TestCase):
-    """This test class for unit testing antlion.utils.bypass_setup_wizard."""
-
-    def test_start_standing_subproc(self):
-        with self.assertRaisesRegex(utils.ActsUtilsError,
-                                    'Process .* has terminated'):
-            utils.start_standing_subprocess('sleep 0', check_health_delay=0.1)
-
-    def test_stop_standing_subproc(self):
-        p = utils.start_standing_subprocess('sleep 0')
-        time.sleep(0.1)
-        with self.assertRaisesRegex(utils.ActsUtilsError,
-                                    'Process .* has terminated'):
-            utils.stop_standing_subprocess(p)
-
-    @mock.patch('time.sleep')
-    def test_bypass_setup_wizard_no_complications(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.NO_COMPLICATIONS,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD,
-        ]
-        ad.adb.return_state = BypassSetupWizardReturn.NO_COMPLICATIONS
-        self.assertTrue(utils.bypass_setup_wizard(ad))
-        self.assertFalse(
-            ad.adb.root_adb.called,
-            'The root command should not be called if there are no '
-            'complications.')
-
-    @mock.patch('time.sleep')
-    def test_bypass_setup_wizard_unrecognized_error(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.UNRECOGNIZED_ERR,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD,
-        ]
-        with self.assertRaises(AdbError):
-            utils.bypass_setup_wizard(ad)
-        self.assertFalse(
-            ad.adb.root_adb.called,
-            'The root command should not be called if we do not have a '
-            'codepath for recovering from the failure.')
-
-    @mock.patch('time.sleep')
-    def test_bypass_setup_wizard_need_root_access(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.ROOT_ADB_NO_COMP,
-            # Return value for rooting the device
-            BypassSetupWizardReturn.NO_COMPLICATIONS,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD
-        ]
-
-        utils.bypass_setup_wizard(ad)
-
-        self.assertTrue(
-            ad.adb.root_adb_called,
-            'The command required root access, but the device was never '
-            'rooted.')
-
-    @mock.patch('time.sleep')
-    def test_bypass_setup_wizard_need_root_already_skipped(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.ROOT_ADB_SKIPPED,
-            # Return value for SetupWizardExitActivity after root
-            BypassSetupWizardReturn.ALREADY_BYPASSED,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD
-        ]
-        self.assertTrue(utils.bypass_setup_wizard(ad))
-        self.assertTrue(ad.adb.root_adb_called)
-
-    @mock.patch('time.sleep')
-    def test_bypass_setup_wizard_root_access_still_fails(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.ROOT_ADB_FAILS,
-            # Return value for SetupWizardExitActivity after root
-            BypassSetupWizardReturn.UNRECOGNIZED_ERR,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD
-        ]
-
-        with self.assertRaises(AdbError):
-            utils.bypass_setup_wizard(ad)
-        self.assertTrue(ad.adb.root_adb_called)
-
-
-class BypassSetupWizardReturn:
-    # No complications. Bypass works the first time without issues.
-    NO_COMPLICATIONS = (
-        'Starting: Intent { cmp=com.google.android.setupwizard/'
-        '.SetupWizardExitActivity }')
-
-    # Fail with doesn't need to be skipped/was skipped already.
-    ALREADY_BYPASSED = AdbError('', 'ADB_CMD_OUTPUT:0', 'Error type 3\n'
-                                'Error: Activity class', 1)
-    # Fail with different error.
-    UNRECOGNIZED_ERR = AdbError('', 'ADB_CMD_OUTPUT:0', 'Error type 4\n'
-                                'Error: Activity class', 0)
-    # Fail, get root access, then no complications arise.
-    ROOT_ADB_NO_COMP = AdbError(
-        '', 'ADB_CMD_OUTPUT:255', 'Security exception: Permission Denial: '
-        'starting Intent { flg=0x10000000 '
-        'cmp=com.google.android.setupwizard/'
-        '.SetupWizardExitActivity } from null '
-        '(pid=5045, uid=2000) not exported from uid '
-        '10000', 0)
-    # Even with root access, the bypass setup wizard doesn't need to be skipped.
-    ROOT_ADB_SKIPPED = AdbError(
-        '', 'ADB_CMD_OUTPUT:255', 'Security exception: Permission Denial: '
-        'starting Intent { flg=0x10000000 '
-        'cmp=com.google.android.setupwizard/'
-        '.SetupWizardExitActivity } from null '
-        '(pid=5045, uid=2000) not exported from '
-        'uid 10000', 0)
-    # Even with root access, the bypass setup wizard fails
-    ROOT_ADB_FAILS = AdbError(
-        '', 'ADB_CMD_OUTPUT:255',
-        'Security exception: Permission Denial: starting Intent { '
-        'flg=0x10000000 cmp=com.google.android.setupwizard/'
-        '.SetupWizardExitActivity } from null (pid=5045, uid=2000) not '
-        'exported from uid 10000', 0)
-
-
-class ConcurrentActionsTest(unittest.TestCase):
-    """Tests antlion.utils.run_concurrent_actions and related functions."""
-
-    @staticmethod
-    def function_returns_passed_in_arg(arg):
-        return arg
-
-    @staticmethod
-    def function_raises_passed_in_exception_type(exception_type):
-        raise exception_type
-
-    def test_run_concurrent_actions_no_raise_returns_proper_return_values(
-            self):
-        """Tests run_concurrent_actions_no_raise returns in the correct order.
-
-        Each function passed into run_concurrent_actions_no_raise returns the
-        values returned from each individual callable in the order passed in.
-        """
-        ret_values = utils.run_concurrent_actions_no_raise(
-            lambda: self.function_returns_passed_in_arg('ARG1'),
-            lambda: self.function_returns_passed_in_arg('ARG2'),
-            lambda: self.function_returns_passed_in_arg('ARG3'))
-
-        self.assertEqual(len(ret_values), 3)
-        self.assertEqual(ret_values[0], 'ARG1')
-        self.assertEqual(ret_values[1], 'ARG2')
-        self.assertEqual(ret_values[2], 'ARG3')
-
-    def test_run_concurrent_actions_no_raise_returns_raised_exceptions(self):
-        """Tests run_concurrent_actions_no_raise returns raised exceptions.
-
-        Instead of allowing raised exceptions to be raised in the main thread,
-        this function should capture the exception and return them in the slot
-        the return value should have been returned in.
-        """
-        ret_values = utils.run_concurrent_actions_no_raise(
-            lambda: self.function_raises_passed_in_exception_type(IndexError),
-            lambda: self.function_raises_passed_in_exception_type(KeyError))
-
-        self.assertEqual(len(ret_values), 2)
-        self.assertEqual(ret_values[0].__class__, IndexError)
-        self.assertEqual(ret_values[1].__class__, KeyError)
-
-    def test_run_concurrent_actions_returns_proper_return_values(self):
-        """Tests run_concurrent_actions returns in the correct order.
-
-        Each function passed into run_concurrent_actions returns the values
-        returned from each individual callable in the order passed in.
-        """
-
-        ret_values = utils.run_concurrent_actions(
-            lambda: self.function_returns_passed_in_arg('ARG1'),
-            lambda: self.function_returns_passed_in_arg('ARG2'),
-            lambda: self.function_returns_passed_in_arg('ARG3'))
-
-        self.assertEqual(len(ret_values), 3)
-        self.assertEqual(ret_values[0], 'ARG1')
-        self.assertEqual(ret_values[1], 'ARG2')
-        self.assertEqual(ret_values[2], 'ARG3')
-
-    def test_run_concurrent_actions_raises_exceptions(self):
-        """Tests run_concurrent_actions raises exceptions from given actions."""
-        with self.assertRaises(KeyError):
-            utils.run_concurrent_actions(
-                lambda: self.function_returns_passed_in_arg('ARG1'), lambda:
-                self.function_raises_passed_in_exception_type(KeyError))
-
-    def test_test_concurrent_actions_raises_non_test_failure(self):
-        """Tests test_concurrent_actions raises the given exception."""
-        with self.assertRaises(KeyError):
-            utils.test_concurrent_actions(
-                lambda: self.function_raises_passed_in_exception_type(KeyError
-                                                                      ),
-                failure_exceptions=signals.TestFailure)
-
-    def test_test_concurrent_actions_raises_test_failure(self):
-        """Tests test_concurrent_actions raises the given exception."""
-        with self.assertRaises(signals.TestFailure):
-            utils.test_concurrent_actions(
-                lambda: self.function_raises_passed_in_exception_type(KeyError
-                                                                      ),
-                failure_exceptions=KeyError)
-
-
-class SuppressLogOutputTest(unittest.TestCase):
-    """Tests SuppressLogOutput"""
-
-    def test_suppress_log_output(self):
-        """Tests that the SuppressLogOutput context manager removes handlers
-        of the specified levels upon entry and re-adds handlers upon exit.
-        """
-        handlers = [
-            logging.NullHandler(level=lvl)
-            for lvl in (logging.DEBUG, logging.INFO, logging.ERROR)
-        ]
-        log = logging.getLogger('test_log')
-        for handler in handlers:
-            log.addHandler(handler)
-        with utils.SuppressLogOutput(log, [logging.INFO, logging.ERROR]):
-            self.assertTrue(
-                any(handler.level == logging.DEBUG
-                    for handler in log.handlers))
-            self.assertFalse(
-                any(handler.level in (logging.INFO, logging.ERROR)
-                    for handler in log.handlers))
-        self.assertCountEqual(handlers, log.handlers)
-
-
-class IpAddressUtilTest(unittest.TestCase):
-
-    def test_positive_ipv4_normal_address(self):
-        ip_address = "192.168.1.123"
-        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
-    def test_positive_ipv4_any_address(self):
-        ip_address = "0.0.0.0"
-        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
-    def test_positive_ipv4_broadcast(self):
-        ip_address = "255.255.255.0"
-        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
-    def test_negative_ipv4_with_ipv6_address(self):
-        ip_address = "fe80::f693:9fff:fef4:1ac"
-        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
-    def test_negative_ipv4_with_invalid_string(self):
-        ip_address = "fdsafdsafdsafdsf"
-        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
-    def test_negative_ipv4_with_invalid_number(self):
-        ip_address = "192.168.500.123"
-        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
-    def test_positive_ipv6(self):
-        ip_address = 'fe80::f693:9fff:fef4:1ac'
-        self.assertTrue(utils.is_valid_ipv6_address(ip_address))
-
-    def test_positive_ipv6_link_local(self):
-        ip_address = 'fe80::'
-        self.assertTrue(utils.is_valid_ipv6_address(ip_address))
-
-    def test_negative_ipv6_with_ipv4_address(self):
-        ip_address = '192.168.1.123'
-        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
-    def test_negative_ipv6_invalid_characters(self):
-        ip_address = 'fe80:jkyr:f693:9fff:fef4:1ac'
-        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
-    def test_negative_ipv6_invalid_string(self):
-        ip_address = 'fdsafdsafdsafdsf'
-        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_local_get_interface_ip_addresses_full(self, job_mock):
-        job_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, 'utf-8'),
-                       encoding='utf-8'),
-        ]
-        self.assertEqual(utils.get_interface_ip_addresses(job, 'eno1'),
-                         CORRECT_FULL_IP_LIST)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_local_get_interface_ip_addresses_empty(self, job_mock):
-        job_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, 'utf-8'),
-                       encoding='utf-8'),
-        ]
-        self.assertEqual(utils.get_interface_ip_addresses(job, 'wlan1'),
-                         CORRECT_EMPTY_IP_LIST)
-
-    @mock.patch(
-        'antlion.controllers.utils_lib.ssh.connection.SshConnection.run')
-    def test_ssh_get_interface_ip_addresses_full(self, ssh_mock):
-        ssh_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, 'utf-8'),
-                       encoding='utf-8'),
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(SshConnection('mock_settings'),
-                                             'eno1'), CORRECT_FULL_IP_LIST)
-
-    @mock.patch(
-        'antlion.controllers.utils_lib.ssh.connection.SshConnection.run')
-    def test_ssh_get_interface_ip_addresses_empty(self, ssh_mock):
-        ssh_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, 'utf-8'),
-                       encoding='utf-8'),
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(SshConnection('mock_settings'),
-                                             'wlan1'), CORRECT_EMPTY_IP_LIST)
-
-    @mock.patch('antlion.controllers.adb.AdbProxy')
-    @mock.patch.object(AndroidDevice, 'is_bootloader', return_value=True)
-    def test_android_get_interface_ip_addresses_full(self, is_bootloader,
-                                                     adb_mock):
-        adb_mock().shell.side_effect = [
-            MOCK_ENO1_IP_ADDRESSES,
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(AndroidDevice(), 'eno1'),
-            CORRECT_FULL_IP_LIST)
-
-    @mock.patch('antlion.controllers.adb.AdbProxy')
-    @mock.patch.object(AndroidDevice, 'is_bootloader', return_value=True)
-    def test_android_get_interface_ip_addresses_empty(self, is_bootloader,
-                                                      adb_mock):
-        adb_mock().shell.side_effect = [
-            MOCK_WLAN1_IP_ADDRESSES,
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(AndroidDevice(), 'wlan1'),
-            CORRECT_EMPTY_IP_LIST)
-
-    @mock.patch('antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f',
-                new_callable=mock.PropertyMock)
-    @mock.patch('antlion.controllers.fuchsia_device.FuchsiaDevice.ffx',
-                new_callable=mock.PropertyMock)
-    @mock.patch('antlion.controllers.fuchsia_lib.utils_lib.wait_for_port')
-    @mock.patch('antlion.controllers.fuchsia_lib.ssh.SSHProvider.run')
-    @mock.patch(
-        'antlion.controllers.fuchsia_lib.sl4f.SL4F._verify_sl4f_connection')
-    @mock.patch('antlion.controllers.fuchsia_device.'
-                'FuchsiaDevice._generate_ssh_config')
-    @mock.patch('antlion.controllers.'
-                'fuchsia_lib.netstack.netstack_lib.'
-                'FuchsiaNetstackLib.netstackListInterfaces')
-    def test_fuchsia_get_interface_ip_addresses_full(
-            self, list_interfaces_mock, generate_ssh_config_mock,
-            verify_sl4f_conn_mock, ssh_run_mock, wait_for_port_mock, ffx_mock,
-            sl4f_mock):
-        # Configure the log path which is required by ACTS logger.
-        logging.log_path = '/tmp/unit_test_garbage'
-
-        ssh = SSHProvider(SSHConfig('192.168.1.1', 22, '/dev/null'))
-        ssh_run_mock.return_value = SSHResult(
-            subprocess.CompletedProcess([], 0, stdout=b'', stderr=b''))
-
-        # Don't try to wait for the SL4F server to start; it's not being used.
-        wait_for_port_mock.return_value = None
-
-        sl4f_mock.return_value = SL4F(ssh, 'http://192.168.1.1:80')
-        verify_sl4f_conn_mock.return_value = None
-
-        list_interfaces_mock.return_value = FUCHSIA_INTERFACES
-        self.assertEqual(
-            utils.get_interface_ip_addresses(
-                FuchsiaDevice({'ip': '192.168.1.1'}), 'eno1'),
-            CORRECT_FULL_IP_LIST)
-
-    @mock.patch('antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f',
-                new_callable=mock.PropertyMock)
-    @mock.patch('antlion.controllers.fuchsia_device.FuchsiaDevice.ffx',
-                new_callable=mock.PropertyMock)
-    @mock.patch('antlion.controllers.fuchsia_lib.utils_lib.wait_for_port')
-    @mock.patch('antlion.controllers.fuchsia_lib.ssh.SSHProvider.run')
-    @mock.patch(
-        'antlion.controllers.fuchsia_lib.sl4f.SL4F._verify_sl4f_connection')
-    @mock.patch('antlion.controllers.fuchsia_device.'
-                'FuchsiaDevice._generate_ssh_config')
-    @mock.patch('antlion.controllers.'
-                'fuchsia_lib.netstack.netstack_lib.'
-                'FuchsiaNetstackLib.netstackListInterfaces')
-    def test_fuchsia_get_interface_ip_addresses_empty(
-            self, list_interfaces_mock, generate_ssh_config_mock,
-            verify_sl4f_conn_mock, ssh_run_mock, wait_for_port_mock, ffx_mock,
-            sl4f_mock):
-        # Configure the log path which is required by ACTS logger.
-        logging.log_path = '/tmp/unit_test_garbage'
-
-        ssh = SSHProvider(SSHConfig('192.168.1.1', 22, '/dev/null'))
-        ssh_run_mock.return_value = SSHResult(
-            subprocess.CompletedProcess([], 0, stdout=b'', stderr=b''))
-
-        # Don't try to wait for the SL4F server to start; it's not being used.
-        wait_for_port_mock.return_value = None
-
-        sl4f_mock.return_value = SL4F(ssh, 'http://192.168.1.1:80')
-        verify_sl4f_conn_mock.return_value = None
-
-        list_interfaces_mock.return_value = FUCHSIA_INTERFACES
-        self.assertEqual(
-            utils.get_interface_ip_addresses(
-                FuchsiaDevice({'ip': '192.168.1.1'}), 'wlan1'),
-            CORRECT_EMPTY_IP_LIST)
-
-
-class GetDeviceTest(unittest.TestCase):
-
-    class TestDevice:
-
-        def __init__(self, id, device_type=None) -> None:
-            self.id = id
-            if device_type:
-                self.device_type = device_type
-
-    def test_get_device_none(self):
-        devices = []
-        self.assertRaises(ValueError, utils.get_device, devices, 'DUT')
-
-    def test_get_device_default_one(self):
-        devices = [self.TestDevice(0)]
-        self.assertEqual(utils.get_device(devices, 'DUT').id, 0)
-
-    def test_get_device_default_many(self):
-        devices = [self.TestDevice(0), self.TestDevice(1)]
-        self.assertEqual(utils.get_device(devices, 'DUT').id, 0)
-
-    def test_get_device_specified_one(self):
-        devices = [self.TestDevice(0), self.TestDevice(1, 'DUT')]
-        self.assertEqual(utils.get_device(devices, 'DUT').id, 1)
-
-    def test_get_device_specified_many(self):
-        devices = [self.TestDevice(0, 'DUT'), self.TestDevice(1, 'DUT')]
-        self.assertRaises(ValueError, utils.get_device, devices, 'DUT')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/abstract_inst_test.py b/src/antlion/unit_tests/controllers/abstract_inst_test.py
deleted file mode 100755
index 7952f11..0000000
--- a/src/antlion/unit_tests/controllers/abstract_inst_test.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python unittest module for GNSS Abstract Instrument Library."""
-
-import socket
-import unittest
-from unittest.mock import Mock
-from unittest.mock import patch
-import antlion.controllers.abstract_inst as pyinst
-
-
-class SocketInstrumentTest(unittest.TestCase):
-    """A class for unit-testing antlion.controllers.gnssinst_lib.abstract_inst"""
-
-    @patch('socket.create_connection')
-    def test__connect_socket(self, mock_connect):
-        """test socket connection normal completion."""
-        mock_connect.return_value.recv.return_value = b'Dummy Instrument\n'
-
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-        test_inst._connect_socket()
-
-        mock_connect.assert_called_with(('192.168.1.11', '5050'), timeout=120)
-
-    @patch('socket.create_connection')
-    def test__connect_socket_timeout(self, mock_connect):
-        """test socket connection with timeout."""
-        mock_connect.side_effect = socket.timeout
-
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._connect_socket()
-
-    @patch('socket.create_connection')
-    def test__connect_socket_error(self, mock_connect):
-        """test socket connection with socket error."""
-        mock_connect.side_effect = socket.error
-
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._connect_socket()
-
-    def test__send(self):
-        """test send function with normal completion."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-
-        test_inst._send('TestCommand')
-
-        test_inst._socket.sendall.assert_called_with(b'TestCommand\n')
-
-    def test__send_timeout(self):
-        """test send function with timeout."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.sendall.side_effect = socket.timeout
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._send('TestCommand')
-
-    def test__send_error(self):
-        """test send function with error."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.sendall.side_effect = socket.error
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._send('TestCommand')
-
-    def test__recv(self):
-        """test recv function with normal completion."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.recv.return_value = b'TestResponse\n'
-
-        mock_resp = test_inst._recv()
-
-        self.assertEqual(mock_resp, 'TestResponse')
-
-    def test__recv_timeout(self):
-        """test recv function with timeout."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.recv.side_effect = socket.timeout
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._recv()
-
-    def test__recv_error(self):
-        """test recv function with error."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.recv.side_effect = socket.error
-
-        with self.assertRaises(pyinst.SocketInstrumentError):
-            test_inst._recv()
-
-    @patch('socket.create_connection')
-    def test__close_socket(self, mock_connect):
-        """test socket close normal completion."""
-        mock_connect.return_value.recv.return_value = b'Dummy Instrument\n'
-
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-        test_inst._connect_socket()
-        test_inst._close_socket()
-
-        mock_connect.return_value.shutdown.assert_called_with(socket.SHUT_RDWR)
-        mock_connect.return_value.close.assert_called_with()
-
-    def test__query(self):
-        """test query function with normal completion."""
-        test_inst = pyinst.SocketInstrument('192.168.1.11', '5050')
-
-        test_inst._socket = Mock()
-        test_inst._socket.recv.return_value = b'TestResponse\n'
-
-        mock_resp = test_inst._query('TestCommand')
-
-        test_inst._socket.sendall.assert_called_with(b'TestCommand;*OPC?\n')
-        self.assertEqual(mock_resp, 'TestResponse')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/android_lib/logcat_test.py b/src/antlion/unit_tests/controllers/android_lib/logcat_test.py
deleted file mode 100644
index dd0c57f..0000000
--- a/src/antlion/unit_tests/controllers/android_lib/logcat_test.py
+++ /dev/null
@@ -1,175 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import unittest
-
-import mock
-from antlion.controllers.android_lib import logcat
-from antlion.controllers.android_lib.logcat import TimestampTracker
-
-BASE_TIMESTAMP = '2000-01-01 12:34:56.789   123 75348 '
-
-
-class LogcatTest(unittest.TestCase):
-    """Tests antlion.controllers.android_lib.logcat"""
-
-    @staticmethod
-    def patch(patched):
-        return mock.patch('antlion.controllers.android_lib.logcat.%s' % patched)
-
-    def setUp(self):
-        self._get_log_level = logcat._get_log_level
-
-    def tearDown(self):
-        logcat._get_log_level = self._get_log_level
-
-    # TimestampTracker
-
-    def test_read_output_sets_last_timestamp_if_found(self):
-        tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + 'D message')
-
-        self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
-
-    def test_read_output_keeps_last_timestamp_if_no_new_stamp_is_found(self):
-        tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + 'D message')
-        tracker.read_output('--------- beginning of main')
-
-        self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
-
-    def test_read_output_updates_timestamp_to_first_in_results(self):
-        tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + 'D 9999-99-99 12:34:56.789')
-
-        self.assertEqual(tracker.last_timestamp, '2000-01-01 12:34:56.789')
-
-    # _get_log_level
-
-    def test_get_log_level_verbose(self):
-        """Tests that Logcat's verbose logs make it to the debug level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'V')
-
-        self.assertEqual(level, logging.DEBUG)
-
-    def test_get_log_level_debug(self):
-        """Tests that Logcat's debug logs make it to the debug level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'D')
-
-        self.assertEqual(level, logging.DEBUG)
-
-    def test_get_log_level_info(self):
-        """Tests that Logcat's info logs make it to the info level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'I')
-
-        self.assertEqual(level, logging.INFO)
-
-    def test_get_log_level_warning(self):
-        """Tests that Logcat's warning logs make it to the warning level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'W')
-
-        self.assertEqual(level, logging.WARNING)
-
-    def test_get_log_level_error(self):
-        """Tests that Logcat's error logs make it to the error level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + 'E')
-
-        self.assertEqual(level, logging.ERROR)
-
-    def test_get_log_level_markers(self):
-        """Tests that Logcat's marker logs make it to the error level."""
-        level = logcat._get_log_level('--------- beginning of main')
-
-        self.assertEqual(level, logging.ERROR)
-
-    # _log_line_func
-
-    def test_log_line_func_returns_func_that_logs_to_given_logger(self):
-        logcat._get_log_level = lambda message: logging.INFO
-        tracker = mock.Mock()
-        log = mock.Mock()
-        message = 'MESSAGE'
-
-        logcat._log_line_func(log, tracker)(message)
-
-        self.assertEqual(log.log.called, True)
-        log.log.assert_called_once_with(logging.INFO, message)
-
-    def test_log_line_func_returns_func_that_updates_the_timestamp(self):
-        logcat._get_log_level = lambda message: logging.INFO
-        tracker = mock.Mock()
-        log = mock.Mock()
-        message = 'MESSAGE'
-
-        logcat._log_line_func(log, tracker)(message)
-
-        self.assertEqual(tracker.read_output.called, True)
-        tracker.read_output.assert_called_once_with(message)
-
-    # _on_retry
-
-    def test_on_retry_returns_func_that_formats_with_last_timestamp(self):
-        tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP)
-        new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
-
-        self.assertIn('-T "%s"' % tracker.last_timestamp, new_command)
-
-    def test_on_retry_func_returns_string_that_contains_the_given_serial(self):
-        tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP)
-        new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
-
-        self.assertTrue('-s S3R14L' in new_command)
-
-    def test_on_retry_func_returns_string_that_contains_any_extra_params(self):
-        tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP)
-        new_command = logcat._on_retry('S3R14L', 'extra_params', tracker)(None)
-
-        self.assertTrue('extra_params' in new_command)
-
-    # create_logcat_keepalive_process
-
-    def test_create_logcat_keepalive_process_creates_a_new_logger(self):
-        with self.patch('log_stream') as log_stream, self.patch('Process'):
-            logcat.create_logcat_keepalive_process('S3R14L', 'dir')
-        self.assertEqual(log_stream.create_logger.call_args[0][0],
-                         'adblog_S3R14L')
-        self.assertEqual(log_stream.create_logger.call_args[1]['subcontext'],
-                         'dir')
-
-    def test_create_logcat_keepalive_process_creates_a_new_process(self):
-        with self.patch('log_stream'), self.patch('Process') as process:
-            logcat.create_logcat_keepalive_process('S3R14L', 'dir')
-
-        self.assertIn('S3R14L', process.call_args[0][0])
-
-    def test_create_logcat_keepalive_process_sets_output_callback(self):
-        with self.patch('log_stream'), self.patch('Process'):
-            process = logcat.create_logcat_keepalive_process('S3R14L', 'dir')
-
-        self.assertEqual(process.set_on_output_callback.called, True)
-
-    def test_create_logcat_keepalive_process_sets_on_terminate_callback(self):
-        with self.patch('log_stream'), self.patch('Process'):
-            process = logcat.create_logcat_keepalive_process('S3R14L', 'dir')
-
-        self.assertEqual(process.set_on_terminate_callback.called, True)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/android_lib/services_test.py b/src/antlion/unit_tests/controllers/android_lib/services_test.py
deleted file mode 100644
index 709d6e5..0000000
--- a/src/antlion/unit_tests/controllers/android_lib/services_test.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from unittest import mock
-
-from antlion.controllers.android_lib import services
-from antlion.controllers.android_lib.events import AndroidStartServicesEvent
-from antlion.controllers.android_lib.events import AndroidStopServicesEvent
-from antlion.event import event_bus
-
-
-class ServicesTest(unittest.TestCase):
-    """Tests antlion.controllers.android_lib.services"""
-
-    # AndroidService
-
-    def test_register_adds_both_start_and_stop_methods(self):
-        """Test that both the _start and _stop methods are registered to
-        their respective events upon calling register().
-        """
-        event_bus._event_bus = event_bus._EventBus()
-        service = services.AndroidService(mock.Mock())
-        service.register()
-        subscriptions = event_bus._event_bus._subscriptions
-        self.assertTrue(
-            any(subscription._func == service._start for subscription in
-                subscriptions[AndroidStartServicesEvent]))
-        self.assertTrue(
-            any(subscription._func == service._stop for subscription in
-                subscriptions[AndroidStopServicesEvent]))
-
-    @unittest.mock.patch.object(services.AndroidService, '_start')
-    def test_event_deliver_only_to_matching_serial(self, start_fn):
-        """Test that the service only responds to events that matches its
-        device serial.
-        """
-        event_bus._event_bus = event_bus._EventBus()
-        service = services.AndroidService(mock.Mock())
-        service.ad.serial = 'right_serial'
-        service.register()
-
-        wrong_ad = mock.Mock()
-        wrong_ad.serial = 'wrong_serial'
-        wrong_event = AndroidStartServicesEvent(wrong_ad)
-        event_bus.post(wrong_event)
-        start_fn.assert_not_called()
-
-        right_ad = mock.Mock()
-        right_ad.serial = 'right_serial'
-        right_event = AndroidStartServicesEvent(right_ad)
-        event_bus.post(right_event)
-        start_fn.assert_called_with(right_event)
-
-    def test_unregister_removes_both_start_and_stop_methods(self):
-        """Test that both the _start and _stop methods are unregistered from
-        their respective events upon calling unregister().
-        """
-        event_bus._event_bus = event_bus._EventBus()
-        service = services.AndroidService(mock.Mock())
-        service.register()
-        service.unregister()
-        subscriptions = event_bus._event_bus._subscriptions
-        self.assertFalse(
-            any(subscription._func == service._start for subscription in
-                subscriptions[AndroidStartServicesEvent]))
-        self.assertFalse(
-            any(subscription._func == service._stop for subscription in
-                subscriptions[AndroidStopServicesEvent]))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py b/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
deleted file mode 100644
index f63ea5c..0000000
--- a/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import unittest
-
-from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet, StaticMapping
-
-
-class DhcpConfigTest(unittest.TestCase):
-    def setUp(self):
-        super().setUp()
-        # These config files may have long diffs, modify this setting to
-        # ensure they're printed.
-        self.maxDiff = None
-
-    def test_basic_dhcp_config(self):
-        dhcp_conf = DhcpConfig()
-
-        expected_config = ('default-lease-time 600;\n' 'max-lease-time 7200;')
-
-        self.assertEqual(expected_config, dhcp_conf.render_config_file())
-
-    def test_dhcp_config_with_lease_times(self):
-        default_lease_time = 350
-        max_lease_time = 5000
-        dhcp_conf = DhcpConfig(default_lease_time=default_lease_time,
-                               max_lease_time=max_lease_time)
-
-        expected_config = (f'default-lease-time {default_lease_time};\n'
-                           f'max-lease-time {max_lease_time};')
-
-        self.assertEqual(expected_config, dhcp_conf.render_config_file())
-
-    def test_dhcp_config_with_subnets(self):
-        default_lease_time = 150
-        max_lease_time = 3000
-        subnets = [
-            # addresses from 10.10.1.0 - 10.10.1.255
-            Subnet(ipaddress.ip_network('10.10.1.0/24')),
-            # 4 addresses from 10.10.3.0 - 10.10.3.3
-            Subnet(ipaddress.ip_network('10.10.3.0/30')),
-            # 6 addresses from 10.10.5.20 - 10.10.5.25
-            Subnet(ipaddress.ip_network('10.10.5.0/24'),
-                   start=ipaddress.ip_address('10.10.5.20'),
-                   end=ipaddress.ip_address('10.10.5.25'),
-                   router=ipaddress.ip_address('10.10.5.255'),
-                   lease_time=60)
-        ]
-        dhcp_conf = DhcpConfig(subnets=subnets,
-                               default_lease_time=default_lease_time,
-                               max_lease_time=max_lease_time)
-
-        # Unless an explicit start/end address is provided, the second
-        # address in the range is used for "start", and the second to
-        # last address is used for "end".
-        expected_config = (f'default-lease-time {default_lease_time};\n'
-                           f'max-lease-time {max_lease_time};\n'
-                           'subnet 10.10.1.0 netmask 255.255.255.0 {\n'
-                           '\tpool {\n'
-                           '\t\toption subnet-mask 255.255.255.0;\n'
-                           '\t\toption routers 10.10.1.1;\n'
-                           '\t\trange 10.10.1.2 10.10.1.254;\n'
-                           '\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n'
-                           '\t}\n'
-                           '}\n'
-                           'subnet 10.10.3.0 netmask 255.255.255.252 {\n'
-                           '\tpool {\n'
-                           '\t\toption subnet-mask 255.255.255.252;\n'
-                           '\t\toption routers 10.10.3.1;\n'
-                           '\t\trange 10.10.3.2 10.10.3.2;\n'
-                           '\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n'
-                           '\t}\n'
-                           '}\n'
-                           'subnet 10.10.5.0 netmask 255.255.255.0 {\n'
-                           '\tpool {\n'
-                           '\t\toption subnet-mask 255.255.255.0;\n'
-                           '\t\toption routers 10.10.5.255;\n'
-                           '\t\trange 10.10.5.20 10.10.5.25;\n'
-                           '\t\tdefault-lease-time 60;\n'
-                           '\t\tmax-lease-time 60;\n'
-                           '\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n'
-                           '\t}\n'
-                           '}')
-
-        self.assertEqual(expected_config, dhcp_conf.render_config_file())
-
-    def test_additional_subnet_parameters_and_options(self):
-        default_lease_time = 150
-        max_lease_time = 3000
-        subnets = [
-            Subnet(ipaddress.ip_network('10.10.1.0/24'),
-                   additional_parameters={
-                       'allow': 'unknown-clients',
-                       'foo': 'bar'
-                   },
-                   additional_options={'my-option': 'some-value'}),
-        ]
-        dhcp_conf = DhcpConfig(subnets=subnets,
-                               default_lease_time=default_lease_time,
-                               max_lease_time=max_lease_time)
-
-        # Unless an explicit start/end address is provided, the second
-        # address in the range is used for "start", and the second to
-        # last address is used for "end".
-        expected_config = (f'default-lease-time {default_lease_time};\n'
-                           f'max-lease-time {max_lease_time};\n'
-                           'subnet 10.10.1.0 netmask 255.255.255.0 {\n'
-                           '\tpool {\n'
-                           '\t\toption subnet-mask 255.255.255.0;\n'
-                           '\t\toption routers 10.10.1.1;\n'
-                           '\t\trange 10.10.1.2 10.10.1.254;\n'
-                           '\t\tallow unknown-clients;\n'
-                           '\t\tfoo bar;\n'
-                           '\t\toption my-option some-value;\n'
-                           '\t\toption domain-name-servers 8.8.8.8, 4.4.4.4;\n'
-                           '\t}\n'
-                           '}')
-
-        self.assertEqual(expected_config, dhcp_conf.render_config_file())
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py b/src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
deleted file mode 100644
index 61c8cb6..0000000
--- a/src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.controllers.ap_lib.radio_measurement import BssidInformation, BssidInformationCapabilities, NeighborReportElement, PhyType
-
-EXPECTED_BSSID = '01:23:45:ab:cd:ef'
-EXPECTED_BSSID_INFO_CAP = BssidInformationCapabilities(
-    spectrum_management=True, qos=True, apsd=True, radio_measurement=True)
-EXPECTED_OP_CLASS = 81
-EXPECTED_CHAN = 11
-EXPECTED_PHY = PhyType.HT
-EXPECTED_BSSID_INFO = BssidInformation(capabilities=EXPECTED_BSSID_INFO_CAP,
-                                       high_throughput=True)
-
-
-class RadioMeasurementTest(unittest.TestCase):
-    def test_bssid_information_capabilities(self):
-        self.assertTrue(EXPECTED_BSSID_INFO_CAP.spectrum_management)
-        self.assertTrue(EXPECTED_BSSID_INFO_CAP.qos)
-        self.assertTrue(EXPECTED_BSSID_INFO_CAP.apsd)
-        self.assertTrue(EXPECTED_BSSID_INFO_CAP.radio_measurement)
-        # Must also test the numeric representation.
-        self.assertEqual(int(EXPECTED_BSSID_INFO_CAP), 0b111100)
-
-    def test_bssid_information(self):
-        self.assertEqual(EXPECTED_BSSID_INFO.capabilities,
-                         EXPECTED_BSSID_INFO_CAP)
-        self.assertEqual(EXPECTED_BSSID_INFO.high_throughput, True)
-        # Must also test the numeric representation.
-        self.assertEqual(int(EXPECTED_BSSID_INFO),
-                         0b10001111000100000000000000000000)
-
-    def test_neighbor_report_element(self):
-        element = NeighborReportElement(bssid=EXPECTED_BSSID,
-                                        bssid_information=EXPECTED_BSSID_INFO,
-                                        operating_class=EXPECTED_OP_CLASS,
-                                        channel_number=EXPECTED_CHAN,
-                                        phy_type=EXPECTED_PHY)
-        self.assertEqual(element.bssid, EXPECTED_BSSID)
-        self.assertEqual(element.bssid_information, EXPECTED_BSSID_INFO)
-        self.assertEqual(element.operating_class, EXPECTED_OP_CLASS)
-        self.assertEqual(element.channel_number, EXPECTED_CHAN)
-        self.assertEqual(element.phy_type, EXPECTED_PHY)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py b/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py
deleted file mode 100644
index 001ee8d..0000000
--- a/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import unittest
-from unittest.mock import patch
-
-from antlion.controllers.ap_lib import radvd_constants
-from antlion.controllers.ap_lib.radvd import Error
-from antlion.controllers.ap_lib.radvd import Radvd
-
-from antlion.controllers.ap_lib.radvd_config import RadvdConfig
-
-SEARCH_FILE = ('antlion.controllers.utils_lib.commands.shell.'
-               'ShellCommand.search_file')
-DELETE_FILE = ('antlion.controllers.utils_lib.commands.shell.ShellCommand.'
-               'delete_file')
-
-CORRECT_COMPLEX_RADVD_CONFIG = ("""interface wlan0 {
-    IgnoreIfMissing on;
-    AdvSendAdvert off;
-    UnicastOnly on;
-    MaxRtrAdvInterval 60;
-    MinRtrAdvInterval 5;
-    MinDelayBetweenRAs 5;
-    AdvManagedFlag off;
-    AdvOtherConfigFlag on;
-    AdvLinkMTU 1400;
-    AdvReachableTime 3600000;
-    AdvRetransTimer 10;
-    AdvCurHopLimit 50;
-    AdvDefaultLifetime 8000;
-    AdvDefaultPreference off;
-    AdvSourceLLAddress on;
-    AdvHomeAgentFlag off;
-    AdvHomeAgentInfo on;
-    HomeAgentLifetime 100;
-    HomeAgentPreference 100;
-    AdvMobRtrSupportFlag off;
-    AdvIntervalOpt on;
-    prefix fd00::/64
-    {
-        AdvOnLink off;
-        AdvAutonomous on;
-        AdvRouterAddr off;
-        AdvValidLifetime 86400;
-        AdvPreferredLifetime 14400;
-        Base6to4Interface NA;
-    };
-    clients
-    {
-        fe80::c66d:3c75:2cec:1d72;
-        fe80::c66d:3c75:2cec:1d73;
-    };
-    route fd00::/64 {
-        AdvRouteLifetime 1024;
-        AdvRoutePreference high;
-    };
-    RDNSS 2401:fa00:480:7a00:4d56:5373:4549:1e29 2401:fa00:480:7a00:4d56:5373:4549:1e30 {
-        AdvRDNSSPreference 8;
-        AdvRDNSSOpen on;
-        AdvRDNSSLifetime 1025;
-    };
-};""".replace("    ", "\t"))
-
-CORRECT_SIMPLE_RADVD_CONFIG = ("""interface wlan0 {
-    AdvSendAdvert on;
-    prefix fd00::/64
-    {
-        AdvOnLink on;
-        AdvAutonomous on;
-    };
-};""".replace("    ", "\t"))
-
-
-def delete_file_mock(file_to_delete):
-    if os.path.exists(file_to_delete):
-        os.remove(file_to_delete)
-
-
-def write_configs_mock(config_file_with_path, output_config):
-    with open(config_file_with_path, 'w+') as config_fileId:
-        config_fileId.write(output_config)
-
-
-class RadvdTest(unittest.TestCase):
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.kill')
-    def test_radvd_ikill(self, kill):
-        kill.return_value = True
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        self.assertIsNone(radvd_mock.stop())
-
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive')
-    def test_radvd_is_alive_True(self, is_alive_mock):
-        is_alive_mock.return_value = True
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        self.assertTrue(radvd_mock.is_alive())
-
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.is_alive')
-    def test_radvd_is_alive_False(self, is_alive_mock):
-        is_alive_mock.return_value = False
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        self.assertFalse(radvd_mock.is_alive())
-
-    @patch('antlion.controllers.ap_lib.radvd.Radvd._scan_for_errors')
-    @patch('antlion.controllers.ap_lib.radvd.Radvd.is_alive')
-    def test_wait_for_process_process_alive(self, is_alive_mock,
-                                            _scan_for_errors_mock):
-        is_alive_mock.return_value = True
-        _scan_for_errors_mock.return_value = True
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        self.assertIsNone(radvd_mock._wait_for_process(timeout=2))
-
-    @patch('antlion.controllers.ap_lib.radvd.Radvd.is_alive')
-    @patch(SEARCH_FILE)
-    def test_scan_for_errors_is_dead(self, search_file_mock, is_alive_mock):
-        is_alive_mock.return_value = False
-        search_file_mock.return_value = False
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        with self.assertRaises(Error) as context:
-            radvd_mock._scan_for_errors(True)
-        self.assertTrue('Radvd failed to start' in str(context.exception))
-
-    @patch('antlion.controllers.ap_lib.radvd.Radvd.is_alive')
-    @patch(SEARCH_FILE)
-    def test_scan_for_errors_exited_prematurely(self, search_file_mock,
-                                                is_alive_mock):
-        is_alive_mock.return_value = True
-        search_file_mock.return_value = True
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        with self.assertRaises(Error) as context:
-            radvd_mock._scan_for_errors(True)
-        self.assertTrue('Radvd exited prematurely.' in str(context.exception))
-
-    @patch('antlion.controllers.ap_lib.radvd.Radvd.is_alive')
-    @patch(SEARCH_FILE)
-    def test_scan_for_errors_success(self, search_file_mock, is_alive_mock):
-        is_alive_mock.return_value = True
-        search_file_mock.return_value = False
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        self.assertIsNone(radvd_mock._scan_for_errors(True))
-
-    @patch(DELETE_FILE)
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file')
-    def test_write_configs_simple(self, write_file, delete_file):
-        delete_file.side_effect = delete_file_mock
-        write_file.side_effect = write_configs_mock
-        basic_radvd_config = RadvdConfig()
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        radvd_mock._write_configs(basic_radvd_config)
-        radvd_config = radvd_mock._config_file
-        with open(radvd_config, 'r') as radvd_config_fileId:
-            config_data = radvd_config_fileId.read()
-            self.assertTrue(CORRECT_SIMPLE_RADVD_CONFIG == config_data)
-
-    @patch(DELETE_FILE)
-    @patch('antlion.controllers.utils_lib.commands.shell.ShellCommand.write_file')
-    def test_write_configs_complex(self, write_file, delete_file):
-        delete_file.side_effect = delete_file_mock
-        write_file.side_effect = write_configs_mock
-        complex_radvd_config = RadvdConfig(
-            clients=['fe80::c66d:3c75:2cec:1d72', 'fe80::c66d:3c75:2cec:1d73'],
-            route=radvd_constants.DEFAULT_PREFIX,
-            rdnss=[
-                '2401:fa00:480:7a00:4d56:5373:4549:1e29',
-                '2401:fa00:480:7a00:4d56:5373:4549:1e30',
-            ],
-            ignore_if_missing=radvd_constants.IGNORE_IF_MISSING_ON,
-            adv_send_advert=radvd_constants.ADV_SEND_ADVERT_OFF,
-            unicast_only=radvd_constants.UNICAST_ONLY_ON,
-            max_rtr_adv_interval=60,
-            min_rtr_adv_interval=5,
-            min_delay_between_ras=5,
-            adv_managed_flag=radvd_constants.ADV_MANAGED_FLAG_OFF,
-            adv_other_config_flag=radvd_constants.ADV_OTHER_CONFIG_FLAG_ON,
-            adv_link_mtu=1400,
-            adv_reachable_time=3600000,
-            adv_retrans_timer=10,
-            adv_cur_hop_limit=50,
-            adv_default_lifetime=8000,
-            adv_default_preference=radvd_constants.ADV_DEFAULT_PREFERENCE_OFF,
-            adv_source_ll_address=radvd_constants.ADV_SOURCE_LL_ADDRESS_ON,
-            adv_home_agent_flag=radvd_constants.ADV_HOME_AGENT_FLAG_OFF,
-            adv_home_agent_info=radvd_constants.ADV_HOME_AGENT_INFO_ON,
-            home_agent_lifetime=100,
-            home_agent_preference=100,
-            adv_mob_rtr_support_flag=radvd_constants.
-            ADV_MOB_RTR_SUPPORT_FLAG_OFF,
-            adv_interval_opt=radvd_constants.ADV_INTERVAL_OPT_ON,
-            adv_on_link=radvd_constants.ADV_ON_LINK_OFF,
-            adv_autonomous=radvd_constants.ADV_AUTONOMOUS_ON,
-            adv_router_addr=radvd_constants.ADV_ROUTER_ADDR_OFF,
-            adv_valid_lifetime=86400,
-            adv_preferred_lifetime=14400,
-            base_6to4_interface='NA',
-            adv_route_lifetime=1024,
-            adv_route_preference=radvd_constants.ADV_ROUTE_PREFERENCE_HIGH,
-            adv_rdnss_preference=8,
-            adv_rdnss_open=radvd_constants.ADV_RDNSS_OPEN_ON,
-            adv_rdnss_lifetime=1025)
-        radvd_mock = Radvd('mock_runner', 'wlan0')
-        radvd_mock._write_configs(complex_radvd_config)
-        radvd_config = radvd_mock._config_file
-        with open(radvd_config, 'r') as radvd_config_fileId:
-            config_data = radvd_config_fileId.read()
-            self.assertTrue(CORRECT_COMPLEX_RADVD_CONFIG == config_data)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py b/src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
deleted file mode 100644
index 18d7aa7..0000000
--- a/src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.controllers.ap_lib.radio_measurement import BssidInformation, NeighborReportElement, PhyType
-from antlion.controllers.ap_lib.wireless_network_management import BssTransitionCandidateList, BssTransitionManagementRequest
-
-EXPECTED_NEIGHBOR_1 = NeighborReportElement(
-    bssid='01:23:45:ab:cd:ef',
-    bssid_information=BssidInformation(),
-    operating_class=81,
-    channel_number=1,
-    phy_type=PhyType.HT)
-EXPECTED_NEIGHBOR_2 = NeighborReportElement(
-    bssid='cd:ef:ab:45:67:89',
-    bssid_information=BssidInformation(),
-    operating_class=121,
-    channel_number=149,
-    phy_type=PhyType.VHT)
-EXPECTED_NEIGHBORS = [EXPECTED_NEIGHBOR_1, EXPECTED_NEIGHBOR_2]
-EXPECTED_CANDIDATE_LIST = BssTransitionCandidateList(EXPECTED_NEIGHBORS)
-
-
-class WirelessNetworkManagementTest(unittest.TestCase):
-    def test_bss_transition_management_request(self):
-        request = BssTransitionManagementRequest(
-            disassociation_imminent=True,
-            abridged=True,
-            candidate_list=EXPECTED_NEIGHBORS)
-        self.assertTrue(request.disassociation_imminent)
-        self.assertTrue(request.abridged)
-        self.assertIn(EXPECTED_NEIGHBOR_1, request.candidate_list)
-        self.assertIn(EXPECTED_NEIGHBOR_2, request.candidate_list)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/bits_lib/__init__.py b/src/antlion/unit_tests/controllers/bits_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/bits_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/bits_lib/bits_client_test.py b/src/antlion/unit_tests/controllers/bits_lib/bits_client_test.py
deleted file mode 100644
index 8acdbe7..0000000
--- a/src/antlion/unit_tests/controllers/bits_lib/bits_client_test.py
+++ /dev/null
@@ -1,326 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from datetime import datetime
-import unittest
-
-from antlion.libs.proc import job
-from antlion.controllers.bits_lib import bits_client
-from antlion.controllers.bits_lib import bits_service_config
-import mock
-
-CONTROLLER_CONFIG_WITH_MONSOON = {
-    'Monsoon': {'serial_num': 1234, 'monsoon_voltage': 4.2}
-}
-
-MONSOONED_CONFIG = bits_service_config.BitsServiceConfig(
-    CONTROLLER_CONFIG_WITH_MONSOON, lvpm_monsoon_bin='lvpm.par')
-
-CONTROLLER_CONFIG_WITHOUT_MONSOON = {}
-
-NON_MONSOONED_CONFIG = bits_service_config.BitsServiceConfig(
-    CONTROLLER_CONFIG_WITHOUT_MONSOON)
-
-KIBBLES_CONFIG = bits_service_config.BitsServiceConfig(
-    {
-        'Kibbles': [{
-            'board':     'board',
-            'connector': 'connector',
-            'serial':    'serial',
-        }],
-    },
-    kibble_bin='bin',
-    kibble_board_file='file.board',
-    virtual_metrics_file='file.vm')
-
-
-class BitsClientTest(unittest.TestCase):
-
-    def setUp(self):
-        super().setUp()
-        self.mock_service = mock.Mock()
-        self.mock_service.port = '42'
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_execute_generic_command(self, mock_run):
-        mock_service = mock.Mock()
-        mock_service.port = '1337'
-        client = bits_client.BitsClient('bits.par', mock_service,
-                                        service_config=KIBBLES_CONFIG)
-
-        client.run_cmd('-i', '-am', '-not', '-a', '-teapot', timeout=12345)
-
-        expected_final_command = ['bits.par',
-                                  '--port',
-                                  '1337',
-                                  '-i',
-                                  '-am',
-                                  '-not',
-                                  '-a',
-                                  '-teapot']
-        mock_run.assert_called_with(expected_final_command, timeout=12345)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_start_collection__without_monsoon__does_not_disconnect_monsoon(
-        self,
-        mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=NON_MONSOONED_CONFIG)
-
-        client.start_collection('collection')
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        non_expected_call = list(
-            filter(lambda call: 'usb_disconnect' in call.args[0],
-                   args_list))
-        self.assertEqual(len(non_expected_call), 0,
-                         'did not expect call with usb_disconnect')
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_start_collection__frecuency_arg_gets_populated(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.start_collection('collection', default_sampling_rate=12345)
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_calls = list(
-            filter(lambda call: '--time' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 1, 'expected 1 calls with --time')
-        self.assertIn('--default_sampling_rate', expected_calls[0][0][0])
-        self.assertIn('12345', expected_calls[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_start_collection__sampling_rate_defaults_to_1000(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.start_collection('collection')
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_calls = list(
-            filter(lambda call: '--time' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 1, 'expected 1 calls with --time')
-        self.assertIn('--default_sampling_rate', expected_calls[0][0][0])
-        self.assertIn('1000', expected_calls[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_stop_collection__usb_not_automanaged__does_not_connect_monsoon(
-        self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.stop_collection('collection')
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        non_expected_call = list(
-            filter(lambda call: 'usb_connect' in call.args[0], args_list))
-        self.assertEqual(len(non_expected_call), 0,
-                         'did not expect call with usb_connect')
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_export_ignores_dataseries_gaps(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.export('collection', '/path/a.7z.bits')
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(
-                lambda call: '--ignore_gaps' in call.args[0] and '--export' in
-                             call.args[0], args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --ignore_gaps and --export')
-        self.assertIn('--ignore_gaps', expected_call[0].args[0])
-
-    def test_export_path_must_end_in_bits_file_extension(self):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        self.assertRaisesRegex(
-            bits_client.BitsClientError,
-            r'collections can only be exported to files ending in .7z.bits',
-            client.export, 'collection', '/path/')
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_export_as_csv(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-        output_file = '/path/to/csv'
-        collection = 'collection'
-
-        client.export_as_csv([':mW', ':mV'], collection, output_file)
-
-        mock_run.assert_called()
-        cmd = mock_run.call_args_list[0].args[0]
-        self.assertIn(collection, cmd)
-        self.assertIn(output_file, cmd)
-        self.assertIn(':mW,:mV', cmd)
-        self.assertNotIn('--vm_file', cmd)
-        self.assertNotIn('default', cmd)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_export_as_csv_with_virtual_metrics_file(self, mock_run):
-        output_file = '/path/to/csv'
-        collection = 'collection'
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=KIBBLES_CONFIG)
-
-        client.export_as_csv([':mW', ':mV'], collection, output_file)
-
-        mock_run.assert_called()
-        cmd = mock_run.call_args_list[0].args[0]
-        self.assertIn(collection, cmd)
-        self.assertIn(':mW,:mV', cmd)
-        self.assertIn('--vm_file', cmd)
-        self.assertIn('default', cmd)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_add_markers(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.add_markers('collection', [(1, 'ein'),
-                                          (2, 'zwei'),
-                                          (3, 'drei')])
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_calls = list(
-            filter(lambda call: '--log' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 3, 'expected 3 calls with --log')
-        self.assertIn('--log_ts', expected_calls[0][0][0])
-        self.assertIn('1', expected_calls[0][0][0])
-        self.assertIn('ein', expected_calls[0][0][0])
-
-        self.assertIn('--log_ts', expected_calls[1][0][0])
-        self.assertIn('2', expected_calls[1][0][0])
-        self.assertIn('zwei', expected_calls[1][0][0])
-
-        self.assertIn('--log_ts', expected_calls[2][0][0])
-        self.assertIn('3', expected_calls[2][0][0])
-        self.assertIn('drei', expected_calls[2][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_add_markers_with_datetimes(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.add_markers('collection',
-                           [(datetime.utcfromtimestamp(1), 'ein'),
-                            (2e9, 'zwei'),
-                            (datetime.utcfromtimestamp(3), 'drei')])
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_calls = list(
-            filter(lambda call: '--log' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 3, 'expected 3 calls with --log')
-        self.assertIn('--log_ts', expected_calls[0][0][0])
-        self.assertIn(str(int(1e9)), expected_calls[0][0][0])
-        self.assertIn('ein', expected_calls[0][0][0])
-
-        self.assertIn('--log_ts', expected_calls[1][0][0])
-        self.assertIn(str(int(2e9)), expected_calls[1][0][0])
-        self.assertIn('zwei', expected_calls[1][0][0])
-
-        self.assertIn('--log_ts', expected_calls[2][0][0])
-        self.assertIn(str(int(3e9)), expected_calls[2][0][0])
-        self.assertIn('drei', expected_calls[2][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_get_metrics(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.get_metrics('collection', 8888, 9999)
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(lambda call: '--aggregates_yaml_path' in call.args[0],
-                   args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --aggregates_yaml_path')
-        self.assertIn('8888', expected_call[0][0][0])
-        self.assertIn('--ignore_gaps', expected_call[0][0][0])
-        self.assertIn('--abs_stop_time', expected_call[0][0][0])
-        self.assertIn('9999', expected_call[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_get_metrics_with_datetime_markers(self, mock_run):
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-
-        client.get_metrics('collection',
-                           datetime.utcfromtimestamp(1),
-                           datetime.utcfromtimestamp(2))
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(lambda call: '--aggregates_yaml_path' in call.args[0],
-                   args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --aggregates_yaml_path')
-        self.assertIn(str(int(1e9)), expected_call[0][0][0])
-        self.assertIn('--ignore_gaps', expected_call[0][0][0])
-        self.assertIn('--abs_stop_time', expected_call[0][0][0])
-        self.assertIn(str(int(2e9)), expected_call[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_get_metrics_with_virtual_metrics_file(self, mock_run):
-        service_config = mock.Mock()
-        service_config.has_virtual_metrics_file = True
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=service_config)
-
-        client.get_metrics(8888, 9999)
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(lambda call: '--aggregates_yaml_path' in call.args[0],
-                   args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --aggregates_yaml_path')
-        self.assertIn('--vm_file', expected_call[0][0][0])
-        self.assertIn('default', expected_call[0][0][0])
-
-    @mock.patch('antlion.libs.proc.job.run',
-                return_value=job.Result(stdout=bytes('device', 'utf-8')))
-    def test_list_devices(self, mock_run):
-        service_config = mock.Mock()
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=service_config)
-
-        result = client.list_devices()
-
-        mock_run.assert_called()
-        cmd = mock_run.call_args_list[0].args[0]
-        self.assertIn('--list', cmd)
-        self.assertIn('devices', cmd)
-        self.assertEqual(result, 'device')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/bits_lib/bits_service_config_test.py b/src/antlion/unit_tests/controllers/bits_lib/bits_service_config_test.py
deleted file mode 100644
index 497239e..0000000
--- a/src/antlion/unit_tests/controllers/bits_lib/bits_service_config_test.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.controllers.bits_lib import bits_service_config
-
-
-class BitsServiceConfigTest(unittest.TestCase):
-
-    def test_basic_config(self):
-        config_dic = bits_service_config.BitsServiceConfig({}).config_dic
-        self.assertIn('devices', config_dic)
-        self.assertIn('default_device', config_dic['devices'])
-        self.assertIn('collectors', config_dic['devices']['default_device'])
-
-    def test_bits_service_config_has_an_enabled_default_device(self):
-        config_dic = bits_service_config.BitsServiceConfig({}).config_dic
-        self.assertEqual(1, config_dic['devices']['default_device']['enabled'])
-
-
-class BitsServiceConfigWithMonsoonTest(unittest.TestCase):
-
-    def test_monsoon_with_serial_less_than_20000_is_configured_as_non_hv(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 19999, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='lvpm_bin', hvpm_monsoon_bin='hvpm_bin')
-        self.assertEqual(0, config.config_dic['hv_monsoon'])
-        self.assertEqual('lvpm_bin', config.config_dic['monsoon_binary_path'])
-
-    def test_lvpm_monsoon_requires_lvpm_binary(self):
-        self.assertRaisesRegex(ValueError,
-                               r'lvpm_monsoon binary is needed but was None.',
-                               bits_service_config._BitsMonsoonConfig,
-                               {'serial_num': 19999, 'monsoon_voltage': 1},
-                               hvpm_monsoon_bin='hvpm_bin')
-
-    def test_monsoon_with_serial_greater_than_20000_is_configured_as_hv(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 20001, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='lvpm_bin', hvpm_monsoon_bin='hvpm_bin')
-        self.assertEqual(1, config.config_dic['hv_monsoon'])
-        self.assertEqual('hvpm_bin', config.config_dic['monsoon_binary_path'])
-
-    def test_hvpm_monsoon_requires_hvpm_binary(self):
-        self.assertRaisesRegex(ValueError,
-                               r'hvpm_monsoon binary is needed but was None.',
-                               bits_service_config._BitsMonsoonConfig,
-                               {'serial_num': 20001, 'monsoon_voltage': 1},
-                               lvpm_monsoon_bin='hvpm_bin')
-
-    def test_monsoon_config_fails_without_voltage(self):
-        self.assertRaisesRegex(ValueError,
-                               r'Monsoon voltage can not be undefined.',
-                               bits_service_config._BitsMonsoonConfig,
-                               {'serial_num': 1},
-                               lvpm_monsoon_bin='lvpm_bin')
-
-    def test_monsoon_config_fails_without_serial(self):
-        self.assertRaisesRegex(ValueError,
-                               r'Monsoon serial_num can not be undefined.',
-                               bits_service_config._BitsMonsoonConfig,
-                               {'monsoon_voltage': 1},
-                               lvpm_monsoon_bin='lvpm_bin')
-
-    def test_monsoon_config_is_always_enabled(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 1, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='bin')
-        self.assertEqual(1, config.config_dic['enabled'])
-
-    def test_monsoon_config_disables_monsoon_reseting(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 1, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='bin')
-        self.assertEqual(0, config.config_dic['monsoon_reset'])
-
-    def test_monsoon_config_type_is_monsooncollector(self):
-        config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 1, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='bin')
-        self.assertEqual('monsooncollector', config.config_dic['type'])
-
-    def test_bits_service_config_without_monsoon(self):
-        service_config = bits_service_config.BitsServiceConfig({})
-        self.assertFalse(service_config.has_monsoon)
-
-    def test_bits_service_config_with_a_monsoon(self):
-        service_config = bits_service_config.BitsServiceConfig(
-            {'Monsoon': {'serial_num': 1, 'monsoon_voltage': 1}},
-            lvpm_monsoon_bin='bin')
-        config_dic = service_config.config_dic
-
-        self.assertTrue(service_config.has_monsoon)
-        self.assertIn('Monsoon',
-                      config_dic['devices']['default_device'][
-                          'collectors'])
-
-        monsoon_config = bits_service_config._BitsMonsoonConfig(
-            {'serial_num': 1, 'monsoon_voltage': 1},
-            lvpm_monsoon_bin='bin').config_dic
-        self.assertEqual(monsoon_config,
-                         config_dic['devices']['default_device'][
-                             'collectors']['Monsoon'])
-
-
-class BitsServiceConfigWithKibblesTest(unittest.TestCase):
-    def test_bits_service_config_without_kibbles(self):
-        service_config = bits_service_config.BitsServiceConfig({})
-        self.assertFalse(service_config.has_kibbles)
-
-    def test_bits_service_config_with_kibbles_but_no_vm_files(self):
-        service_config = bits_service_config.BitsServiceConfig({'Kibbles': [
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}]},
-            kibble_bin='bin',
-            kibble_board_file='file.board')
-
-        self.assertFalse(service_config.has_virtual_metrics_file)
-
-    def test_bits_service_config_with_kibbles_and_vm_files(self):
-        service_config = bits_service_config.BitsServiceConfig({'Kibbles': [
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}]},
-            kibble_bin='bin',
-            kibble_board_file='file.board',
-            virtual_metrics_file='some_file.vm')
-        config_dic = service_config.config_dic
-
-        self.assertTrue(service_config.has_virtual_metrics_file)
-        self.assertIn('some_file.vm',
-                      config_dic['devices']['default_device']['vm_files'])
-
-    def test_bits_service_config_with_kibbles(self):
-        service_config = bits_service_config.BitsServiceConfig({'Kibbles': [
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}]},
-            kibble_bin='bin',
-            kibble_board_file='file.board')
-        config_dic = service_config.config_dic
-
-        self.assertTrue(service_config.has_kibbles)
-        self.assertIn('BOARD',
-                      config_dic['devices']['default_device']['collectors'])
-
-        boards_config = bits_service_config._BitsKibblesConfig([
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}],
-            kibble_bin='bin', kibble_board_file='file.board').boards_configs
-        self.assertEqual(boards_config['BOARD'],
-                         config_dic['devices']['default_device'][
-                             'collectors']['BOARD'])
-
-    def test_kibbles_get_grouped_by_board(self):
-        boards_config = bits_service_config._BitsKibblesConfig([
-            {'board': 'BOARD1', 'connector': 'A', 'serial': 'SERIAL1'},
-            {'board': 'BOARD2', 'connector': 'B', 'serial': 'SERIAL2'},
-            {'board': 'BOARD2', 'connector': 'C', 'serial': 'SERIAL3'}],
-            kibble_bin='bin',
-            kibble_board_file='file.board').boards_configs
-
-        self.assertIn('BOARD1', boards_config)
-        board1 = boards_config['BOARD1']
-        self.assertEqual(1, len(board1['attached_kibbles']))
-        self.assertIn('SERIAL1', board1['attached_kibbles'])
-
-        self.assertIn('BOARD2', boards_config)
-        board2 = boards_config['BOARD2']
-        self.assertEqual(2, len(board2['attached_kibbles']))
-        self.assertIn('SERIAL2', board2['attached_kibbles'])
-        self.assertIn('SERIAL3', board2['attached_kibbles'])
-
-    def test_kibble_config_type_is_kibblecollector(self):
-        board_config = bits_service_config._BitsKibblesConfig([
-            {'board': 'BOARD', 'connector': 'CONNECTOR', 'serial': 'SERIAL'}],
-            kibble_bin='bin',
-            kibble_board_file='file.board').boards_configs['BOARD']
-
-        self.assertEqual('kibblecollector', board_config['type'])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/bits_lib/bits_service_test.py b/src/antlion/unit_tests/controllers/bits_lib/bits_service_test.py
deleted file mode 100644
index 8c0f405..0000000
--- a/src/antlion/unit_tests/controllers/bits_lib/bits_service_test.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from antlion.controllers.bits_lib import bits_service
-from antlion.controllers.bits_lib import bits_service_config
-import mock
-
-SERVICE_CONFIG = bits_service_config.BitsServiceConfig(
-    {'Monsoon': {'serial_num': 538141, 'monsoon_voltage': 4.2}},
-    hvpm_monsoon_bin='hvpm.par')
-
-
-@mock.patch('antlion.controllers.bits_lib.bits_service.atexit')
-@mock.patch('builtins.open')
-class BitsServiceTest(unittest.TestCase):
-    def test_output_log_opens_on_creation(self, mock_open, *_):
-        bits_service.BitsService(SERVICE_CONFIG, 'binary', 'log_path')
-
-        mock_open.assert_called_with('log_path', 'w')
-
-    @mock.patch.object(bits_service.BitsService, '_write_extra_debug_logs')
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_output_log_gets_closed_on_cleanup(self, _, __, mock_open, *___):
-        mock_log = mock.Mock()
-        mock_open.return_value = mock_log
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        service._cleanup()
-
-        mock_log.close.assert_called_with()
-
-    @mock.patch('antlion.libs.proc.job.run')
-    def test_monsoons_usb_gets_connected_on_cleanup(self, mock_run, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-
-        service._cleanup()
-
-        mock_run.assert_called()
-        self.assertIn('--usbpassthrough', mock_run.call_args[0][0])
-        self.assertIn('on', mock_run.call_args[0][0])
-
-    def test_service_can_not_be_started_twice(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        service.service_state = bits_service.BitsServiceStates.STARTED
-        with self.assertRaises(bits_service.BitsServiceError):
-            service.start()
-
-    def test_service_can_not_be_stoped_twice(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        service.service_state = bits_service.BitsServiceStates.STOPPED
-        with self.assertRaises(bits_service.BitsServiceError):
-            service.stop()
-
-    def test_stopped_service_can_not_be_started(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        service.service_state = bits_service.BitsServiceStates.STOPPED
-        with self.assertRaises(bits_service.BitsServiceError):
-            service.start()
-
-    def test_service_output_changes_service_reported_state(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-        self.assertEqual(bits_service.BitsServiceStates.NOT_STARTED,
-                         service.service_state)
-
-        service.port = '1234'
-        service._output_callback('Started server!')
-
-        self.assertEqual(bits_service.BitsServiceStates.STARTED,
-                         service.service_state)
-
-    def test_service_output_defines_port(self, *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-
-        service._output_callback('Server listening on ...:6174.')
-
-        self.assertIsNotNone(service.port)
-
-    @mock.patch('antlion.context.get_current_context')
-    @mock.patch('antlion.libs.proc.process.Process')
-    def test_top_level_call_is_timeout_if_timeout_is_defined(self, mock_process,
-                                                             *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path',
-                                           timeout=42)
-
-        def side_effect(*_, **__):
-            service.service_state = bits_service.BitsServiceStates.STARTED
-            return mock.Mock()
-
-        mock_process.side_effect = side_effect
-
-        service.start()
-
-        args, kwargs = mock_process.call_args
-        self.assertEqual('timeout', args[0][0])
-        self.assertEqual('--signal=SIGTERM', args[0][1])
-        self.assertEqual('--kill-after=60', args[0][2])
-        self.assertEqual('42', args[0][3])
-        self.assertEqual('binary', args[0][4])
-
-    @mock.patch.object(bits_service.BitsService, '_write_extra_debug_logs')
-    @mock.patch('antlion.context.get_current_context')
-    @mock.patch('antlion.libs.proc.process.Process')
-    def test_top_level_call_is_binary_if_timeout_is_not_defined(self,
-                                                                mock_process,
-                                                                *_):
-        service = bits_service.BitsService(SERVICE_CONFIG, 'binary',
-                                           'log_path')
-
-        def side_effect(*_, **__):
-            service.service_state = bits_service.BitsServiceStates.STARTED
-            return mock.Mock()
-
-        mock_process.side_effect = side_effect
-
-        service.start()
-
-        args, kwargs = mock_process.call_args
-        self.assertEqual('binary', args[0][0])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/bits_test.py b/src/antlion/unit_tests/controllers/bits_test.py
deleted file mode 100644
index 3b026ac..0000000
--- a/src/antlion/unit_tests/controllers/bits_test.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from antlion.controllers import bits
-from antlion.controllers import power_metrics
-
-
-class BitsTest(unittest.TestCase):
-
-    def test_metric_name_transformation_for_legacy_support(self):
-        avg_current = bits._transform_name('default_name.Monsoon.Monsoon:mA')
-        avg_power = bits._transform_name('default_name.Monsoon.Monsoon:mW')
-
-        self.assertEqual('avg_current', avg_current)
-        self.assertEqual('avg_power', avg_power)
-
-    def test_metric_name_transformation(self):
-        avg_current = bits._transform_name('default_name.slider.XYZ:mA')
-        avg_power = bits._transform_name('default_name.slider.ABCD:mW')
-        unknown_unit = bits._transform_name('default_name.aaaaa.QWERTY:unknown')
-
-        self.assertEqual('XYZ_avg_current', avg_current)
-        self.assertEqual('ABCD_avg_power', avg_power)
-        self.assertEqual('QWERTY', unknown_unit)
-
-    def test_raw_data_to_metrics(self):
-        raw_data = {'data': [
-            {'name': 'default_device.Monsoon.Monsoon:mA',
-             'avg': 21,
-             'unit': 'mA'},
-            {'name': 'default_device.Monsoon.Monsoon:mW',
-             'avg': 91,
-             'unit': 'mW'}]}
-
-        metrics = bits._raw_data_to_metrics(raw_data)
-        self.assertEqual(2, len(metrics))
-        self.assertEqual(
-            power_metrics.Metric(21, 'current', 'mA', 'avg_current'),
-            metrics[0])
-        self.assertEqual(
-            power_metrics.Metric(91, 'power', 'mW', 'avg_power'),
-            metrics[1])
-
-    def test_raw_data_to_metrics_messages_are_ignored(self):
-        raw_data = {'data': [
-            {'name': 'default_device.Log.UserInputs',
-             'avg': float('nan'),
-             'unit': 'Msg'},
-            {'name': 'default_device.Log.Warnings',
-             'avg': float('nan'),
-             'unit': 'Msg'}]}
-
-        metrics = bits._raw_data_to_metrics(raw_data)
-        self.assertEqual(0, len(metrics))
-
-    def test_get_single_file_get_first_element_of_a_list(self):
-        registry = {'some_key': ['first_element', 'second_element']}
-
-        result = bits._get_single_file(registry, 'some_key')
-
-        self.assertEqual('first_element', result)
-
-    def test_get_single_file_gets_string_if_registry_contains_string(self):
-        registry = {'some_key': 'this_is_a_string'}
-
-        result = bits._get_single_file(registry, 'some_key')
-
-        self.assertEqual('this_is_a_string', result)
-
-    def test_get_single_file_gets_none_if_value_is_undefined_or_empty_list(self):
-        registry = {'some_key': []}
-
-        result1 = bits._get_single_file(registry, 'some_key')
-        result2 = bits._get_single_file(registry, 'key_that_is_not_in_registry')
-
-        self.assertEqual(None, result1)
-        self.assertEqual(None, result2)
-
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/data/sample_monsoon_data b/src/antlion/unit_tests/controllers/data/sample_monsoon_data
deleted file mode 100644
index 2a70273..0000000
--- a/src/antlion/unit_tests/controllers/data/sample_monsoon_data
+++ /dev/null
@@ -1,10 +0,0 @@
-0s    3.67
-1s    3.69
-2s    0.95
-3s    3.06
-4s    2.17
-5s    1.62
-6s    3.95
-7s    2.47
-8s    1.11
-9s    0.47
diff --git a/src/antlion/unit_tests/controllers/iperf_client_test.py b/src/antlion/unit_tests/controllers/iperf_client_test.py
deleted file mode 100644
index bf20000..0000000
--- a/src/antlion/unit_tests/controllers/iperf_client_test.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import unittest
-
-import mock
-import os
-
-from antlion.controllers import iperf_client
-from antlion.controllers.iperf_client import IPerfClient
-from antlion.controllers.iperf_client import IPerfClientBase
-from antlion.controllers.iperf_client import IPerfClientOverAdb
-from antlion.controllers.iperf_client import IPerfClientOverSsh
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-# The position in the call tuple that represents the kwargs dict.
-KWARGS = 1
-
-
-class IPerfClientModuleTest(unittest.TestCase):
-    """Tests the antlion.controllers.iperf_client module functions."""
-
-    def test_create_can_create_client_over_adb(self):
-        self.assertIsInstance(
-            iperf_client.create([{'AndroidDevice': 'foo'}])[0],
-            IPerfClientOverAdb,
-            'Unable to create IPerfClientOverAdb from create().'
-        )
-
-    def test_create_can_create_client_over_ssh(self):
-        self.assertIsInstance(
-            iperf_client.create([{'ssh_config': {'user': '', 'host': ''}}])[0],
-            IPerfClientOverSsh,
-            'Unable to create IPerfClientOverSsh from create().'
-        )
-
-    def test_create_can_create_local_client(self):
-        self.assertIsInstance(
-            iperf_client.create([{}])[0],
-            IPerfClient,
-            'Unable to create IPerfClient from create().'
-        )
-
-
-class IPerfClientBaseTest(unittest.TestCase):
-    """Tests antlion.controllers.iperf_client.IPerfClientBase."""
-
-    @mock.patch('os.makedirs')
-    def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
-        # Will never actually be created/used.
-        logging.log_path = '/tmp/unit_test_garbage'
-
-        full_file_path = IPerfClientBase._get_full_file_path(0)
-
-        self.assertTrue(
-            mock_makedirs.called,
-            'Did not attempt to create a directory.'
-        )
-        self.assertEqual(
-            os.path.dirname(full_file_path),
-            mock_makedirs.call_args[ARGS][0],
-            'The parent directory of the full file path was not created.'
-        )
-
-
-class IPerfClientTest(unittest.TestCase):
-    """Tests antlion.controllers.iperf_client.IPerfClient."""
-
-    @mock.patch('builtins.open')
-    @mock.patch('subprocess.call')
-    def test_start_writes_to_full_file_path(self, mock_call, mock_open):
-        client = IPerfClient()
-        file_path = '/path/to/foo'
-        client._get_full_file_path = lambda _: file_path
-
-        client.start('127.0.0.1', 'IPERF_ARGS', 'TAG')
-
-        mock_open.assert_called_with(file_path, 'w')
-        self.assertEqual(
-            mock_call.call_args[KWARGS]['stdout'],
-            mock_open().__enter__.return_value,
-            'IPerfClient did not write the logs to the expected file.'
-        )
-
-
-class IPerfClientOverSshTest(unittest.TestCase):
-    """Test antlion.controllers.iperf_client.IPerfClientOverSshTest."""
-
-    @mock.patch('builtins.open')
-    def test_start_writes_output_to_full_file_path(self, mock_open):
-        client = IPerfClientOverSsh({'host': '', 'user': ''})
-        client._ssh_session = mock.Mock()
-        file_path = '/path/to/foo'
-        client._get_full_file_path = lambda _: file_path
-
-        client.start('127.0.0.1', 'IPERF_ARGS', 'TAG')
-
-        mock_open.assert_called_with(file_path, 'w')
-        mock_open().__enter__().write.assert_called_with(
-            client._ssh_session.run().stdout
-        )
-
-
-class IPerfClientOverAdbTest(unittest.TestCase):
-    """Test antlion.controllers.iperf_client.IPerfClientOverAdb."""
-
-    @mock.patch('builtins.open')
-    def test_start_writes_output_to_full_file_path(self, mock_open):
-        client = IPerfClientOverAdb(None)
-        file_path = '/path/to/foo'
-        expected_output = 'output'
-        client._get_full_file_path = lambda _: file_path
-
-        with mock.patch('antlion.controllers.iperf_client.'
-                        'IPerfClientOverAdb._android_device') as adb_device:
-            adb_device.adb.shell.return_value = 'output'
-            client.start('127.0.0.1', 'IPERF_ARGS', 'TAG')
-
-        mock_open.assert_called_with(file_path, 'w')
-        mock_open().__enter__().write.assert_called_with('output')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/iperf_server_test.py b/src/antlion/unit_tests/controllers/iperf_server_test.py
deleted file mode 100644
index e9fc810..0000000
--- a/src/antlion/unit_tests/controllers/iperf_server_test.py
+++ /dev/null
@@ -1,348 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import unittest
-
-import mock
-import os
-
-from antlion.controllers import iperf_server
-from antlion.controllers.iperf_server import IPerfServer
-from antlion.controllers.iperf_server import IPerfServerOverAdb
-from antlion.controllers.iperf_server import IPerfServerOverSsh
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-# The position in the call tuple that represents the kwargs dict.
-KWARGS = 1
-
-MOCK_LOGFILE_PATH = '/path/to/foo'
-
-
-class IPerfServerModuleTest(unittest.TestCase):
-    """Tests the antlion.controllers.iperf_server module."""
-    def test_create_creates_local_iperf_server_with_int(self):
-        self.assertIsInstance(
-            iperf_server.create([12345])[0], IPerfServer,
-            'create() failed to create IPerfServer for integer input.')
-
-    def test_create_creates_local_iperf_server_with_str(self):
-        self.assertIsInstance(
-            iperf_server.create(['12345'])[0], IPerfServer,
-            'create() failed to create IPerfServer for integer input.')
-
-    def test_create_cannot_create_local_iperf_server_with_bad_str(self):
-        with self.assertRaises(ValueError):
-            iperf_server.create(['12345BAD_STRING'])
-
-    @mock.patch('antlion.controllers.iperf_server.utils')
-    def test_create_creates_server_over_ssh_with_ssh_config_and_port(self, _):
-        self.assertIsInstance(
-            iperf_server.create([{
-                'ssh_config': {
-                    'user': '',
-                    'host': ''
-                },
-                'port': ''
-            }])[0], IPerfServerOverSsh,
-            'create() failed to create IPerfServerOverSsh for a valid config.')
-
-    def test_create_creates_server_over_adb_with_proper_config(self):
-        self.assertIsInstance(
-            iperf_server.create([{
-                'AndroidDevice': '53R147',
-                'port': 0
-            }])[0], IPerfServerOverAdb,
-            'create() failed to create IPerfServerOverAdb for a valid config.')
-
-    def test_create_raises_value_error_on_bad_config_dict(self):
-        with self.assertRaises(ValueError):
-            iperf_server.create([{
-                'AndroidDevice': '53R147',
-                'ssh_config': {}
-            }])
-
-    def test_get_port_from_ss_output_returns_correct_port_ipv4(self):
-        ss_output = ('tcp LISTEN  0 5 127.0.0.1:<PORT>  *:*'
-                     ' users:(("cmd",pid=<PID>,fd=3))')
-        self.assertEqual(
-            iperf_server._get_port_from_ss_output(ss_output, '<PID>'),
-            '<PORT>')
-
-    def test_get_port_from_ss_output_returns_correct_port_ipv6(self):
-        ss_output = ('tcp LISTEN  0 5 ff:ff:ff:ff:ff:ff:<PORT>  *:*'
-                     ' users:(("cmd",pid=<PID>,fd=3))')
-        self.assertEqual(
-            iperf_server._get_port_from_ss_output(ss_output, '<PID>'),
-            '<PORT>')
-
-
-class IPerfServerBaseTest(unittest.TestCase):
-    """Tests antlion.controllers.iperf_server.IPerfServerBase."""
-    @mock.patch('os.makedirs')
-    def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
-        # Will never actually be created/used.
-        logging.log_path = '/tmp/unit_test_garbage'
-
-        server = IPerfServer('port')
-
-        full_file_path = server._get_full_file_path()
-
-        self.assertTrue(mock_makedirs.called,
-                        'Did not attempt to create a directory.')
-        self.assertEqual(
-            os.path.dirname(full_file_path), mock_makedirs.call_args[ARGS][0],
-            'The parent directory of the full file path was not created.')
-
-
-class IPerfServerTest(unittest.TestCase):
-    """Tests antlion.controllers.iperf_server.IPerfServer."""
-
-    PID = 123456
-
-    def setUp(self):
-        iperf_server._get_port_from_ss_output = lambda *_: IPerfServerTest.PID
-
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    @mock.patch('antlion.controllers.iperf_server.job')
-    def test_start_makes_started_true(self, mock_job, __, ___):
-        """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServer('port')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server.start()
-
-        self.assertTrue(server.started)
-
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    @mock.patch('antlion.controllers.iperf_server.job')
-    def test_start_stop_makes_started_false(self, _, __, ___):
-        """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServer('port')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-
-        server.start()
-        server.stop()
-
-        self.assertFalse(server.started)
-
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    @mock.patch('antlion.controllers.iperf_server.job')
-    def test_start_sets_current_log_file(self, _, __, ___):
-        server = IPerfServer('port')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-
-        server.start()
-
-        self.assertEqual(
-            server._current_log_file, MOCK_LOGFILE_PATH,
-            'The _current_log_file was not received from _get_full_file_path.')
-
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    def test_stop_returns_current_log_file(self, _, __):
-        server = IPerfServer('port')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._current_log_file = MOCK_LOGFILE_PATH
-        server._iperf_process = mock.Mock()
-
-        log_file = server.stop()
-
-        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
-                         'The _current_log_file was not returned by stop().')
-
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.subprocess')
-    @mock.patch('antlion.controllers.iperf_server.job')
-    def test_start_does_not_run_two_concurrent_processes(
-            self, start_proc, _, __):
-        server = IPerfServer('port')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._iperf_process = mock.Mock()
-
-        server.start()
-
-        self.assertFalse(
-            start_proc.called,
-            'start() should not begin a second process if another is running.')
-
-    @mock.patch('antlion.utils.stop_standing_subprocess')
-    def test_stop_exits_early_if_no_process_has_started(self, stop_proc):
-        server = IPerfServer('port')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._iperf_process = None
-
-        server.stop()
-
-        self.assertFalse(
-            stop_proc.called,
-            'stop() should not kill a process if no process is running.')
-
-
-class IPerfServerOverSshTest(unittest.TestCase):
-    """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
-
-    INIT_ARGS = [{'host': 'TEST_HOST', 'user': 'test'}, 'PORT']
-
-    @mock.patch('antlion.controllers.iperf_server.connection')
-    def test_start_makes_started_true(self, _):
-        """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServerOverSsh(*self.INIT_ARGS)
-        server._ssh_session = mock.Mock()
-        server._cleanup_iperf_port = mock.Mock()
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-
-        server.start()
-
-        self.assertTrue(server.started)
-
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.connection')
-    def test_start_stop_makes_started_false(self, _, __):
-        """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServerOverSsh(*self.INIT_ARGS)
-        server._ssh_session = mock.Mock()
-        server._cleanup_iperf_port = mock.Mock()
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-
-        server.start()
-        server.stop()
-
-        self.assertFalse(server.started)
-
-    @mock.patch('builtins.open')
-    @mock.patch('antlion.controllers.iperf_server.connection')
-    def test_stop_returns_expected_log_file(self, _, __):
-        server = IPerfServerOverSsh(*self.INIT_ARGS)
-        server._ssh_session = mock.Mock()
-        server._cleanup_iperf_port = mock.Mock()
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._iperf_pid = mock.Mock()
-
-        log_file = server.stop()
-
-        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
-                         'The expected log file was not returned by stop().')
-
-    @mock.patch('antlion.controllers.iperf_server.connection')
-    def test_start_does_not_run_two_concurrent_processes(self, _):
-        server = IPerfServerOverSsh(*self.INIT_ARGS)
-        server._ssh_session = mock.Mock()
-        server._cleanup_iperf_port = mock.Mock()
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._iperf_pid = mock.Mock()
-
-        server.start()
-
-        self.assertFalse(
-            server._ssh_session.run_async.called,
-            'start() should not begin a second process if another is running.')
-
-    @mock.patch('antlion.utils.stop_standing_subprocess')
-    @mock.patch('antlion.controllers.iperf_server.connection')
-    def test_stop_exits_early_if_no_process_has_started(self, _, __):
-        server = IPerfServerOverSsh(*self.INIT_ARGS)
-        server._ssh_session = mock.Mock()
-        server._cleanup_iperf_port = mock.Mock()
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._iperf_pid = None
-
-        server.stop()
-
-        self.assertFalse(
-            server._ssh_session.run_async.called,
-            'stop() should not kill a process if no process is running.')
-
-
-class IPerfServerOverAdbTest(unittest.TestCase):
-    """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
-
-    ANDROID_DEVICE_PROP = ('antlion.controllers.iperf_server.'
-                           'IPerfServerOverAdb._android_device')
-
-    @mock.patch(ANDROID_DEVICE_PROP)
-    def test_start_makes_started_true(self, mock_ad):
-        """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServerOverAdb('53R147', 'PORT')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        mock_ad.adb.shell.return_value = '<PID>'
-
-        server.start()
-
-        self.assertTrue(server.started)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    @mock.patch('builtins.open')
-    @mock.patch(ANDROID_DEVICE_PROP)
-    def test_start_stop_makes_started_false(self, mock_ad, _, __):
-        """Tests calling start() without calling stop() makes started True."""
-        server = IPerfServerOverAdb('53R147', 'PORT')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        mock_ad.adb.shell.side_effect = ['<PID>', '', '', '']
-
-        server.start()
-        server.stop()
-
-        self.assertFalse(server.started)
-
-    @mock.patch('antlion.libs.proc.job.run')
-    @mock.patch('builtins.open')
-    @mock.patch(ANDROID_DEVICE_PROP)
-    def test_stop_returns_expected_log_file(self, mock_ad, _, __):
-        server = IPerfServerOverAdb('53R147', 'PORT')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._iperf_process = mock.Mock()
-        server._iperf_process_adb_pid = '<PID>'
-        mock_ad.adb.shell.side_effect = ['', '', '']
-
-        log_file = server.stop()
-
-        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
-                         'The expected log file was not returned by stop().')
-
-    @mock.patch(ANDROID_DEVICE_PROP)
-    def test_start_does_not_run_two_concurrent_processes(self, android_device):
-        server = IPerfServerOverAdb('53R147', 'PORT')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._iperf_process = mock.Mock()
-
-        server.start()
-
-        self.assertFalse(
-            android_device.adb.shell_nb.called,
-            'start() should not begin a second process if another is running.')
-
-    @mock.patch('antlion.libs.proc.job.run')
-    @mock.patch('builtins.open')
-    @mock.patch(ANDROID_DEVICE_PROP)
-    def test_stop_exits_early_if_no_process_has_started(
-            self, android_device, _, __):
-        server = IPerfServerOverAdb('53R147', 'PORT')
-        server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
-        server._iperf_pid = None
-
-        server.stop()
-
-        self.assertFalse(
-            android_device.adb.shell_nb.called,
-            'stop() should not kill a process if no process is running.')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/monsoon_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/monsoon_test.py
deleted file mode 100755
index b22831b..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/hvpm/monsoon_test.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.api.hvpm.monsoon import Monsoon
-
-ASSEMBLY_LINE_IMPORT = ('antlion.controllers.monsoon_lib.api.hvpm.monsoon'
-                        '.AssemblyLineBuilder')
-DOWNSAMPLER_IMPORT = ('antlion.controllers.monsoon_lib.api.hvpm.monsoon'
-                      '.DownSampler')
-TEE_IMPORT = 'antlion.controllers.monsoon_lib.api.hvpm.monsoon.Tee'
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-
-class BaseMonsoonTest(unittest.TestCase):
-    """Tests antlion.controllers.monsoon_lib.api.monsoon.Monsoon."""
-
-    SERIAL = 534147
-
-    def setUp(self):
-        self.sleep_patch = mock.patch('time.sleep')
-        self.sleep_patch.start()
-
-        self.mp_manager_patch = mock.patch('multiprocessing.Manager')
-        self.mp_manager_patch.start()
-
-        proxy_mock = mock.MagicMock()
-        proxy_mock.Protocol.getValue.return_value = 1048576 * 4
-        self.monsoon_proxy = mock.patch(
-            'Monsoon.HVPM.Monsoon', return_value=proxy_mock)
-        self.monsoon_proxy.start()
-
-    def tearDown(self):
-        self.sleep_patch.stop()
-        self.monsoon_proxy.stop()
-        self.mp_manager_patch.stop()
-
-    def test_status_fills_status_packet_first(self):
-        """Tests fillStatusPacket() is called before returning the status.
-
-        If this is not done, the status packet returned is stale.
-        """
-
-        def verify_call_order():
-            if not self.monsoon_proxy().fillStatusPacket.called:
-                self.fail('fillStatusPacket must be called first.')
-
-        monsoon = Monsoon(self.SERIAL)
-        monsoon._mon.statusPacket.side_effect = verify_call_order
-
-        status_packet = monsoon.status
-
-        self.assertEqual(
-            status_packet, monsoon._mon.statusPacket,
-            'monsoon.status MUST return '
-            'MonsoonProxy.statusPacket.')
-
-    @mock.patch(DOWNSAMPLER_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_downsample_skipped_if_hz_unset(
-            self, _, downsampler):
-        """Tests the DownSampler transformer is skipped if it is not needed."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'output_path': None, 'transformers': None}
-
-        monsoon.measure_power(1, hz=5000, **unimportant_kwargs)
-
-        self.assertFalse(
-            downsampler.called,
-            'A Downsampler should not have been created for a the default '
-            'sampling frequency.')
-
-    @mock.patch(DOWNSAMPLER_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_downsamples_immediately_after_sampling(
-            self, assembly_line, downsampler):
-        """Tests """
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'output_path': None, 'transformers': None}
-
-        monsoon.measure_power(1, hz=500, **unimportant_kwargs)
-
-        downsampler.assert_called_once_with(int(round(5000 / 500)))
-        # Assert Downsampler() is the first element within the list.
-        self.assertEqual(assembly_line().into.call_args_list[0][ARGS][0],
-                         downsampler())
-
-    @mock.patch(TEE_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_tee_skipped_if_ouput_path_not_set(self, _, tee):
-        """Tests the Tee Transformer is not added when not needed."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'transformers': None}
-
-        monsoon.measure_power(1, output_path=None, **unimportant_kwargs)
-
-        self.assertFalse(
-            tee.called,
-            'A Tee Transformer should not have been created for measure_power '
-            'without an output_path.')
-
-    @mock.patch(TEE_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_tee_is_added_to_assembly_line(
-            self, assembly_line, tee):
-        """Tests Tee is added to the assembly line with the correct path."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'transformers': None}
-
-        monsoon.measure_power(1, output_path='foo', **unimportant_kwargs)
-
-        tee.assert_called_once_with('foo', 0)
-        # Assert Tee() is the first element within the assembly into calls.
-        self.assertEqual(assembly_line().into.call_args_list[0][ARGS][0],
-                         tee())
-
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_transformers_are_added(self, assembly_line):
-        """Tests additional transformers are added to the assembly line."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'output_path': None}
-        expected_transformers = [mock.Mock(), mock.Mock()]
-
-        monsoon.measure_power(
-            1, transformers=expected_transformers, **unimportant_kwargs)
-
-        self.assertEqual(expected_transformers[0],
-                         assembly_line().into.call_args_list[-2][ARGS][0])
-        self.assertEqual(expected_transformers[1],
-                         assembly_line().into.call_args_list[-1][ARGS][0])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/monsoon_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/monsoon_test.py
deleted file mode 100755
index 90cbadb..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/lvpm_stock/monsoon_test.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon import Monsoon
-
-ASSEMBLY_LINE_IMPORT = ('antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon'
-                        '.AssemblyLineBuilder')
-DOWNSAMPLER_IMPORT = ('antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon'
-                      '.DownSampler')
-TEE_IMPORT = 'antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon.Tee'
-MONSOON_PROXY_IMPORT = ('antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon'
-                        '.MonsoonProxy')
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-
-class BaseMonsoonTest(unittest.TestCase):
-    """Tests antlion.controllers.monsoon_lib.api.monsoon.Monsoon."""
-
-    SERIAL = 534147
-
-    def setUp(self):
-        self.sleep_patch = mock.patch('time.sleep')
-        self.sleep_patch.start()
-
-        self.mp_manager_patch = mock.patch('multiprocessing.Manager')
-        self.mp_manager_patch.start()
-
-        proxy_mock = mock.MagicMock()
-        proxy_mock.get_voltage.return_value = 4.2
-        self.monsoon_proxy = mock.patch(
-            MONSOON_PROXY_IMPORT, return_value=proxy_mock)
-        self.monsoon_proxy.start()
-
-    def tearDown(self):
-        self.sleep_patch.stop()
-        self.monsoon_proxy.stop()
-        self.mp_manager_patch.stop()
-
-    @mock.patch(DOWNSAMPLER_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_downsample_skipped_if_hz_unset(
-            self, _, downsampler):
-        """Tests the DownSampler transformer is skipped if it is not needed."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'output_path': None, 'transformers': None}
-
-        monsoon.measure_power(1, hz=5000, **unimportant_kwargs)
-
-        self.assertFalse(
-            downsampler.called,
-            'A Downsampler should not have been created for a the default '
-            'sampling frequency.')
-
-    @mock.patch(DOWNSAMPLER_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_downsamples_immediately_after_sampling(
-            self, assembly_line, downsampler):
-        """Tests """
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'output_path': None, 'transformers': None}
-
-        monsoon.measure_power(1, hz=500, **unimportant_kwargs)
-
-        downsampler.assert_called_once_with(int(round(5000 / 500)))
-        # Assert Downsampler() is the first element within the list.
-        self.assertEqual(assembly_line().into.call_args_list[0][ARGS][0],
-                         downsampler())
-
-    @mock.patch(TEE_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_tee_skipped_if_ouput_path_not_set(self, _, tee):
-        """Tests the Tee Transformer is not added when not needed."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'transformers': None}
-
-        monsoon.measure_power(1, output_path=None, **unimportant_kwargs)
-
-        self.assertFalse(
-            tee.called,
-            'A Tee Transformer should not have been created for measure_power '
-            'without an output_path.')
-
-    @mock.patch(TEE_IMPORT)
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_tee_is_added_to_assembly_line(
-            self, assembly_line, tee):
-        """Tests Tee is added to the assembly line with the correct path."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'transformers': None}
-
-        monsoon.measure_power(1, output_path='foo', **unimportant_kwargs)
-
-        tee.assert_called_once_with('foo', 0)
-        # Assert Tee() is the first element within the assembly into calls.
-        self.assertEqual(assembly_line().into.call_args_list[0][ARGS][0],
-                         tee())
-
-    @mock.patch(ASSEMBLY_LINE_IMPORT)
-    def test_measure_power_transformers_are_added(self, assembly_line):
-        """Tests additional transformers are added to the assembly line."""
-        monsoon = Monsoon(self.SERIAL)
-        unimportant_kwargs = {'hz': 5000, 'output_path': None}
-        expected_transformers = [mock.Mock(), mock.Mock()]
-
-        monsoon.measure_power(
-            1, transformers=expected_transformers, **unimportant_kwargs)
-
-        self.assertEqual(expected_transformers[0],
-                         assembly_line().into.call_args_list[-2][ARGS][0])
-        self.assertEqual(expected_transformers[1],
-                         assembly_line().into.call_args_list[-1][ARGS][0])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/api/monsoon_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/api/monsoon_test.py
deleted file mode 100755
index d40f467..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/api/monsoon_test.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.api.common import MonsoonError
-from antlion.controllers.monsoon_lib.api.common import PASSTHROUGH_STATES
-from antlion.controllers.monsoon_lib.api.common import PassthroughStates
-from antlion.controllers.monsoon_lib.api.monsoon import BaseMonsoon
-
-# The position in the call tuple that represents the args array.
-ARGS = 0
-
-STILL_TIME_LEFT = 0
-OUT_OF_TIME = 9001
-
-
-class MonsoonImpl(BaseMonsoon):
-    MIN_VOLTAGE = 1.5
-    MAX_VOLTAGE = 3.0
-
-    set_voltage = mock.Mock()
-    release_monsoon_connection = mock.Mock()
-    establish_monsoon_connection = mock.Mock()
-
-    def _set_usb_passthrough_mode(self, value):
-        self.__usb_passthrough_mode = value
-
-    def __init__(self):
-        super().__init__()
-        self.__usb_passthrough_mode = None
-
-    @property
-    def status(self):
-        class StatusPacket(object):
-            def __init__(self, passthrough_mode):
-                self.usbPassthroughMode = (
-                    passthrough_mode
-                    if passthrough_mode in PASSTHROUGH_STATES.values() else
-                    PASSTHROUGH_STATES.get(passthrough_mode, None))
-
-        return StatusPacket(self.__usb_passthrough_mode)
-
-
-class BaseMonsoonTest(unittest.TestCase):
-    """Tests antlion.controllers.monsoon_lib.api.monsoon.Monsoon."""
-
-    def setUp(self):
-        self.sleep_patch = mock.patch('time.sleep')
-        self.sleep_patch.start()
-        MonsoonImpl.set_voltage = mock.Mock()
-        MonsoonImpl.release_monsoon_connection = mock.Mock()
-        MonsoonImpl.establish_monsoon_connection = mock.Mock()
-
-    def tearDown(self):
-        self.sleep_patch.stop()
-
-    def test_get_closest_valid_voltage_returns_zero_when_low(self):
-        voltage_to_round_to_zero = MonsoonImpl.MIN_VOLTAGE / 2 - 0.1
-        self.assertEqual(
-            MonsoonImpl.get_closest_valid_voltage(voltage_to_round_to_zero), 0)
-
-    def test_get_closest_valid_voltage_snaps_to_min_when_low_but_close(self):
-        voltage_to_round_to_min = MonsoonImpl.MIN_VOLTAGE / 2 + 0.1
-        self.assertEqual(
-            MonsoonImpl.get_closest_valid_voltage(voltage_to_round_to_min),
-            MonsoonImpl.MIN_VOLTAGE)
-
-    def test_get_closest_valid_voltage_snaps_to_max_when_high(self):
-        voltage_to_round_to_max = MonsoonImpl.MAX_VOLTAGE * 2
-        self.assertEqual(
-            MonsoonImpl.get_closest_valid_voltage(voltage_to_round_to_max),
-            MonsoonImpl.MAX_VOLTAGE)
-
-    def test_get_closest_valid_voltage_to_not_round(self):
-        valid_voltage = (MonsoonImpl.MAX_VOLTAGE + MonsoonImpl.MIN_VOLTAGE) / 2
-
-        self.assertEqual(
-            MonsoonImpl.get_closest_valid_voltage(valid_voltage),
-            valid_voltage)
-
-    def test_is_voltage_valid_voltage_is_valid(self):
-        valid_voltage = (MonsoonImpl.MAX_VOLTAGE + MonsoonImpl.MIN_VOLTAGE) / 2
-
-        self.assertTrue(MonsoonImpl.is_voltage_valid(valid_voltage))
-
-    def test_is_voltage_valid_voltage_is_not_valid(self):
-        invalid_voltage = MonsoonImpl.MIN_VOLTAGE - 2
-
-        self.assertFalse(MonsoonImpl.is_voltage_valid(invalid_voltage))
-
-    def test_validate_voltage_voltage_is_valid(self):
-        valid_voltage = (MonsoonImpl.MAX_VOLTAGE + MonsoonImpl.MIN_VOLTAGE) / 2
-
-        MonsoonImpl.validate_voltage(valid_voltage)
-
-    def test_validate_voltage_voltage_is_not_valid(self):
-        invalid_voltage = MonsoonImpl.MIN_VOLTAGE - 2
-
-        with self.assertRaises(MonsoonError):
-            MonsoonImpl.validate_voltage(invalid_voltage)
-
-    def test_set_voltage_safe_rounds_unsafe_voltage(self):
-        invalid_voltage = MonsoonImpl.MIN_VOLTAGE - .1
-        monsoon = MonsoonImpl()
-
-        monsoon.set_voltage_safe(invalid_voltage)
-
-        monsoon.set_voltage.assert_called_once_with(MonsoonImpl.MIN_VOLTAGE)
-
-    def test_set_voltage_safe_does_not_round_safe_voltages(self):
-        valid_voltage = (MonsoonImpl.MAX_VOLTAGE + MonsoonImpl.MIN_VOLTAGE) / 2
-        monsoon = MonsoonImpl()
-
-        monsoon.set_voltage_safe(valid_voltage)
-
-        monsoon.set_voltage.assert_called_once_with(valid_voltage)
-
-    def test_ramp_voltage_sets_vout_to_final_value(self):
-        """Tests the desired end voltage is set."""
-        monsoon = MonsoonImpl()
-        expected_value = monsoon.MIN_VOLTAGE
-
-        monsoon.ramp_voltage(0, expected_value)
-
-        self.assertEqual(
-            MonsoonImpl.set_voltage.call_args_list[-1][ARGS][0],
-            expected_value, 'The last call to setVout() was not the expected '
-            'final value.')
-
-    def test_ramp_voltage_ramps_voltage_over_time(self):
-        """Tests that voltage increases between each call."""
-        monsoon = MonsoonImpl()
-
-        difference = (MonsoonImpl.VOLTAGE_RAMP_RATE *
-                      MonsoonImpl.VOLTAGE_RAMP_TIME_STEP * 5)
-        monsoon.ramp_voltage(MonsoonImpl.MIN_VOLTAGE,
-                             MonsoonImpl.MIN_VOLTAGE + difference)
-
-        previous_voltage = 0
-        for set_voltage_call in MonsoonImpl.set_voltage.call_args_list:
-            self.assertGreaterEqual(
-                set_voltage_call[ARGS][0], previous_voltage,
-                'ramp_voltage does not always increment voltage.')
-            previous_voltage = set_voltage_call[ARGS][0]
-
-    def test_usb_accepts_passthrough_state_sets_with_str(self):
-        monsoon = MonsoonImpl()
-        state_string = 'on'
-
-        monsoon.usb(state_string)
-
-        self.assertEqual(monsoon.status.usbPassthroughMode,
-                         PASSTHROUGH_STATES[state_string])
-
-    def test_usb_accepts_passthrough_state_sets_with_int_value(self):
-        monsoon = MonsoonImpl()
-
-        monsoon.usb(1)
-
-        self.assertEqual(monsoon.status.usbPassthroughMode, 1)
-
-    def test_usb_raises_on_invalid_str_value(self):
-        monsoon = MonsoonImpl()
-
-        with self.assertRaises(ValueError):
-            monsoon.usb('DEADBEEF')
-
-    def test_usb_raises_on_invalid_int_value(self):
-        monsoon = MonsoonImpl()
-
-        with self.assertRaises(ValueError):
-            monsoon.usb(9001)
-
-    @mock.patch('time.time')
-    def test_usb_raises_timeout_error(self, time):
-        monsoon = MonsoonImpl()
-        time.side_effect = [STILL_TIME_LEFT, OUT_OF_TIME]
-
-        with self.assertRaises(TimeoutError):
-            monsoon.usb(1)
-
-    def test_usb_does_not_set_passthrough_mode_if_unchanged(self):
-        """Tests that the passthrough mode is not reset if it is unchanged."""
-        monsoon = MonsoonImpl()
-        existing_state = PassthroughStates.ON
-        monsoon._set_usb_passthrough_mode(existing_state)
-        monsoon._set_usb_passthrough_mode = mock.Mock()
-
-        monsoon.usb(existing_state)
-
-        self.assertFalse(
-            monsoon._set_usb_passthrough_mode.called,
-            'usbPassthroughMode should not be called when the '
-            'state does not change.')
-
-    def take_samples_always_reestablishes_the_monsoon_connection(self):
-        monsoon = MonsoonImpl()
-        assembly_line = mock.Mock()
-        assembly_line.run.side_effect = Exception('Some Terrible error')
-
-        monsoon.take_samples(assembly_line)
-
-        self.assertTrue(monsoon.establish_monsoon_connection.called)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/__init__.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/assembly_line_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/assembly_line_test.py
deleted file mode 100755
index 7afd5ae..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/assembly_line_test.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import AssemblyLineBuilder
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import DevNullBufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import IndexedBuffer
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ProcessAssemblyLine
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import ThreadAssemblyLine
-
-ASSEMBLY_LINE_MODULE = (
-    'antlion.controllers.monsoon_lib.sampling.engine.assembly_line')
-
-
-def mock_import(full_module_name, import_name):
-    return mock.patch('%s.%s' % (full_module_name, import_name))
-
-
-class ProcessAssemblyLineTest(unittest.TestCase):
-    """Tests the basic functionality of ProcessAssemblyLine."""
-
-    @mock.patch('multiprocessing.Pool')
-    def test_run_no_nodes(self, pool_mock):
-        """Tests run() with no nodes does not spawn a new process."""
-        empty_node_list = []
-        assembly_line = ProcessAssemblyLine(empty_node_list)
-
-        assembly_line.run()
-
-        self.assertFalse(pool_mock().__enter__().apply_async.called)
-
-    @mock.patch('multiprocessing.Pool')
-    def test_run_spawns_new_process_for_each_node(self, pool_mock):
-        """Tests run() with a node spawns a new process for each node."""
-        node_list = [mock.Mock(), mock.Mock()]
-        assembly_line = ProcessAssemblyLine(node_list)
-
-        assembly_line.run()
-
-        apply_async = pool_mock().apply_async
-        self.assertEqual(len(node_list), apply_async.call_count)
-        for node in node_list:
-            apply_async.assert_any_call(node.transformer.transform,
-                                        [node.input_stream])
-
-
-class ThreadAssemblyLineTest(unittest.TestCase):
-    """Tests the basic functionality of ThreadAssemblyLine."""
-
-    @mock_import(ASSEMBLY_LINE_MODULE, 'ThreadPoolExecutor')
-    def test_run_no_nodes(self, pool_mock):
-        """Tests run() with no nodes does not spawn a new thread."""
-        empty_node_list = []
-        assembly_line = ThreadAssemblyLine(empty_node_list)
-
-        assembly_line.run()
-
-        self.assertFalse(pool_mock().__enter__().submit.called)
-
-    @mock_import(ASSEMBLY_LINE_MODULE, 'ThreadPoolExecutor')
-    def test_run_spawns_new_thread_for_each_node(self, pool_mock):
-        """Tests run() with a node spawns a new thread for each node."""
-        node_list = [mock.Mock(), mock.Mock()]
-        assembly_line = ThreadAssemblyLine(node_list)
-
-        assembly_line.run()
-
-        submit = pool_mock().__enter__().submit
-        self.assertEqual(len(node_list), submit.call_count)
-        for node in node_list:
-            submit.assert_any_call(node.transformer.transform,
-                                   node.input_stream)
-
-
-class AssemblyLineBuilderTest(unittest.TestCase):
-    """Tests the basic functionality of AssemblyLineBuilder."""
-
-    def test_source_raises_if_nodes_already_in_assembly_line(self):
-        """Tests a ValueError is raised if a node already exists."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        first_source = mock.Mock()
-        second_source = mock.Mock()
-        builder.source(first_source)
-
-        with self.assertRaises(ValueError) as context:
-            builder.source(second_source)
-
-        self.assertIn('single source', context.exception.args[0])
-
-    def test_source_sets_input_stream_from_given_stream(self):
-        """Tests source() sets input_stream from args."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        input_stream = mock.Mock()
-        dummy_source = mock.Mock()
-
-        builder.source(dummy_source, input_stream=input_stream)
-
-        self.assertEqual(input_stream, builder.nodes[-1].input_stream)
-
-    def test_source_creates_a_new_input_stream(self):
-        """Tests source() takes in DevNullBufferStream when None is provided."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        dummy_source = mock.Mock()
-
-        builder.source(dummy_source)
-
-        self.assertIsInstance(builder.nodes[-1].input_stream,
-                              DevNullBufferStream)
-
-    def test_source_returns_self(self):
-        """Tests source() returns the builder."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-
-        return_value = builder.source(mock.Mock())
-
-        self.assertEqual(return_value, builder)
-
-    def test_into_raises_value_error_if_source_not_called_yet(self):
-        """Tests a ValueError is raised if into() is called before source()."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        dummy_transformer = mock.Mock()
-
-        with self.assertRaises(ValueError) as context:
-            builder.into(dummy_transformer)
-
-        self.assertIn('source', context.exception.args[0])
-
-    def test_into_raises_value_error_if_already_built(self):
-        """Tests a ValueError is raised into() is called after build()."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        dummy_transformer = mock.Mock()
-        # Build before trying to add more nodes.
-        builder.source(dummy_transformer).build()
-
-        with self.assertRaises(ValueError) as context:
-            builder.into(dummy_transformer)
-
-        self.assertIn('built', context.exception.args[0])
-
-    def test_into_appends_transformer_to_node_list(self):
-        """Tests into() appends the transformer to the end of the node list."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        dummy_transformer = mock.Mock()
-        dummy_source = mock.Mock()
-        builder.source(dummy_source)
-
-        builder.into(dummy_transformer)
-
-        self.assertEqual(dummy_transformer, builder.nodes[-1].transformer)
-
-    def test_into_sets_output_stream_to_newly_created_stream(self):
-        """Tests into() sets the input_stream queue to the newly created one."""
-        queue_generator = mock.Mock()
-        builder = AssemblyLineBuilder(queue_generator, mock.Mock())
-        dummy_transformer = mock.Mock()
-        dummy_source = mock.Mock()
-        builder.source(dummy_source)
-
-        builder.into(dummy_transformer)
-
-        self.assertEqual(queue_generator(),
-                         builder.nodes[-1].input_stream._buffer_queue)
-
-    def test_into_returns_self(self):
-        """Tests into() returns the builder."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        builder.source(mock.Mock())
-
-        return_value = builder.into(mock.Mock())
-
-        self.assertEqual(return_value, builder)
-
-    def test_build_raises_if_already_built(self):
-        """Tests build() raises ValueError if build() was already called."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-        builder.source(mock.Mock()).build()
-
-        with self.assertRaises(ValueError) as context:
-            builder.build()
-
-        self.assertIn('already built', context.exception.args[0])
-
-    def test_build_raises_if_no_source_has_been_set(self):
-        """Tests build() raises when there's nothing to build."""
-        builder = AssemblyLineBuilder(mock.Mock(), mock.Mock())
-
-        with self.assertRaises(ValueError) as context:
-            builder.build()
-
-        self.assertIn('empty', context.exception.args[0])
-
-    def test_build_properly_sets_output_stream(self):
-        """Tests build() passes the output_stream to the AssemblyLine."""
-        given_output_stream = 1
-
-        assembly_line_generator = mock.Mock()
-        builder = AssemblyLineBuilder(mock.Mock(), assembly_line_generator)
-        builder.source(mock.Mock())
-
-        builder.build(output_stream=given_output_stream)
-
-        self.assertEqual(
-            assembly_line_generator.call_args[0][0][-1].output_stream,
-            given_output_stream)
-
-    def test_build_generates_dev_null_stream_by_default(self):
-        """Tests build() uses DevNullBufferStream when no output_stream."""
-        assembly_line_generator = mock.Mock()
-        builder = AssemblyLineBuilder(mock.Mock(), assembly_line_generator)
-        builder.source(mock.Mock())
-
-        builder.build()
-
-        self.assertIsInstance(
-            assembly_line_generator.call_args[0][0][-1].output_stream,
-            DevNullBufferStream)
-
-
-class IndexedBufferTest(unittest.TestCase):
-    """Tests the IndexedBuffer class."""
-
-    def test_create_indexed_buffer_uses_existing_list(self):
-        my_list = [0, 1, 2, 3, 4, 5]
-        self.assertEqual(IndexedBuffer(0, my_list).buffer, my_list)
-
-    def test_create_indexed_buffer_creates_buffer_when_given_a_size(self):
-        buffer_len = 10
-        self.assertEqual(len(IndexedBuffer(0, buffer_len).buffer), buffer_len)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/calibration_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/calibration_test.py
deleted file mode 100755
index 9122d6b..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/calibration_test.py
+++ /dev/null
@@ -1,165 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import statistics
-import unittest
-from collections import deque
-
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationError
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationScalars
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationSnapshot
-from antlion.controllers.monsoon_lib.sampling.engine.calibration import CalibrationWindows
-from antlion.controllers.monsoon_lib.sampling.enums import Channel
-from antlion.controllers.monsoon_lib.sampling.enums import Granularity
-from antlion.controllers.monsoon_lib.sampling.enums import Origin
-
-# These values don't really matter.
-C = Channel.MAIN
-O = Origin.ZERO
-G = Granularity.FINE
-C2 = Channel.USB
-O2 = Origin.REFERENCE
-G2 = Granularity.COARSE
-
-
-class CalibrationWindowsTest(unittest.TestCase):
-    """Unit tests the CalibrationWindows class."""
-
-    def setUp(self):
-        # Here, we set up CalibrationWindows with a single dict entry so we can
-        # add values to the window. Normally, a child class is responsible for
-        # setting the keys of the CalibrationWindows object.
-        self.calibration_windows = CalibrationWindows(
-            calibration_window_size=5)
-        self.calibration_windows._calibrations[(C, O, G)] = deque()
-
-    def test_add_adds_new_value_to_end_of_window(self):
-        """Tests add() appends the new value to the end of the window."""
-        self.calibration_windows.add(C, O, G, 0)
-        self.calibration_windows.add(C, O, G, 1)
-        self.calibration_windows.add(C, O, G, 2)
-
-        expected_value = 3
-
-        self.calibration_windows.add(C, O, G, expected_value)
-
-        self.assertEqual(expected_value,
-                         self.calibration_windows._calibrations[(C, O, G)][-1])
-
-    def test_add_removes_stale_values(self):
-        """Tests add() removes values outside of the calibration window."""
-        value_to_remove = 0
-        new_values = range(1, 6)
-
-        self.calibration_windows.add(C, O, G, value_to_remove)
-        for new_value in new_values:
-            self.calibration_windows.add(C, O, G, new_value)
-
-        self.assertNotIn(value_to_remove,
-                         self.calibration_windows._calibrations[(C, O, G)])
-
-    def test_get_averages_items_within_window(self):
-        """tests get() returns the average of all values within the window."""
-        values = range(5)
-        expected_value = statistics.mean(values)
-
-        for value in values:
-            self.calibration_windows.add(C, O, G, value)
-
-        self.assertEqual(self.calibration_windows.get(C, O, G), expected_value)
-
-    def test_get_raises_error_when_calibration_is_not_complete(self):
-        """Tests get() raises CalibrationError when the window is not full."""
-        values = range(4)
-        for value in values:
-            self.calibration_windows.add(C, O, G, value)
-
-        with self.assertRaises(CalibrationError):
-            self.calibration_windows.get(C, O, G)
-
-
-class CalibrationScalarsTest(unittest.TestCase):
-    """Unit tests the CalibrationScalars class."""
-
-    def setUp(self):
-        # Here, we set up CalibrationScalars with a single dict entry so we can
-        # add values to the window. Normally, a child class is responsible for
-        # setting the keys of the CalibrationScalars object.
-        self.calibration_scalars = CalibrationScalars()
-        # Use a non-integer value so unit tests will fail when a bug occurs.
-        self.calibration_scalars._calibrations[(C, O, G)] = None
-
-    def test_get_returns_last_added_scalar(self):
-        """Tests the value added is the value returned from get()."""
-        ignored_value = 2.71828
-        expected_value = 3.14159
-
-        self.calibration_scalars.add(C, O, G, ignored_value)
-        self.calibration_scalars.add(C, O, G, expected_value)
-
-        self.assertEqual(expected_value, self.calibration_scalars.get(C, O, G))
-
-
-class CalibrationSnapshotTest(unittest.TestCase):
-    """Unit tests the CalibrationSnapshot class."""
-
-    def test_all_keys_are_copied_to_snapshot(self):
-        """Tests that all keys from passed-in collection are copied."""
-        base_calibration = CalibrationScalars()
-        base_calibration._calibrations = {
-            (C, O, G): 2.71828,
-            (C2, O2, G2): 3.14159,
-        }
-
-        calibration_snapshot = CalibrationSnapshot(base_calibration)
-
-        self.assertSetEqual(
-            set(base_calibration.get_keys()),
-            set(calibration_snapshot.get_keys()))
-
-    def test_init_raises_value_error_upon_illegal_arguments(self):
-        """Tests __init__() raises ValueError if the argument is invalid."""
-        with self.assertRaises(ValueError):
-            CalibrationSnapshot({'illegal': 'dictionary'})
-
-    def test_calibration_error_surfaced_on_get(self):
-        """Tests get() raises a CalibrationError if the snapshotted collection
-        had a CalibrationError.
-        """
-        base_calibration = CalibrationScalars()
-        base_calibration._calibrations = {
-            (C, O, G): CalibrationError('raise me')
-        }
-
-        calibration_snapshot = CalibrationSnapshot(base_calibration)
-
-        with self.assertRaises(CalibrationError):
-            calibration_snapshot.get(C, O, G)
-
-    def test_calibration_copied_upon_snapshot_created(self):
-        """Tests the calibration value is snapshotted."""
-        expected_value = 5
-        unexpected_value = 10
-        base_calibration = CalibrationScalars()
-        base_calibration._calibrations = {(C, O, G): expected_value}
-
-        calibration_snapshot = CalibrationSnapshot(base_calibration)
-        base_calibration.add(C, O, G, unexpected_value)
-
-        self.assertEqual(calibration_snapshot.get(C, O, G), expected_value)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformer_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformer_test.py
deleted file mode 100755
index dafa1e1..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformer_test.py
+++ /dev/null
@@ -1,268 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferList
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import BufferStream
-from antlion.controllers.monsoon_lib.sampling.engine.assembly_line import IndexedBuffer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import ParallelTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SequentialTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import SourceTransformer
-from antlion.controllers.monsoon_lib.sampling.engine.transformer import Transformer
-
-# The indexes of the arguments returned in Mock's call lists.
-ARGS = 0
-KWARGS = 1
-
-
-class TransformerImpl(Transformer):
-    """A basic implementation of a Transformer object."""
-
-    def __init__(self):
-        super().__init__()
-        self.actions = []
-
-    def on_begin(self):
-        self.actions.append('begin')
-
-    def on_end(self):
-        self.actions.append('end')
-
-    def _transform(self, _):
-        self.actions.append('transform')
-
-
-def raise_exception(tipe=Exception):
-    def exception_raiser():
-        raise tipe()
-
-    return exception_raiser
-
-
-class TransformerTest(unittest.TestCase):
-    """Tests the Transformer class."""
-
-    def test_transform_calls_functions_in_order(self):
-        """Tests transform() calls functions in the correct arrangement."""
-        my_transformer = TransformerImpl()
-
-        my_transformer.transform(mock.Mock())
-
-        self.assertEqual(['begin', 'transform', 'end'], my_transformer.actions)
-
-    def test_transform_initializes_input_stream(self):
-        """Tests transform() initializes the input_stream before beginning."""
-        input_stream = mock.Mock()
-        transformer = TransformerImpl()
-        # Purposely fail before sending any data
-        transformer.on_begin = raise_exception(Exception)
-
-        with self.assertRaises(Exception):
-            transformer.transform(input_stream)
-
-        # Asserts initialize was called before on_begin.
-        self.assertTrue(input_stream.initialize.called)
-
-    def test_transform_initializes_output_stream(self):
-        """Tests transform() initializes the output_stream before beginning."""
-        output_stream = mock.Mock()
-        transformer = TransformerImpl()
-        transformer.set_output_stream(output_stream)
-        # Purposely fail before sending any data
-        transformer.on_begin = raise_exception(Exception)
-
-        with self.assertRaises(Exception):
-            transformer.transform(mock.Mock())
-
-        # Asserts initialize was called before on_begin.
-        self.assertTrue(output_stream.initialize.called)
-
-
-class SourceTransformerTest(unittest.TestCase):
-    """Tests the SourceTransformer class."""
-
-    def test_transform_ends_on_buffer_stream_end(self):
-        """Tests transformation ends on stream end."""
-        source_transformer = SourceTransformer()
-        source_transformer.set_output_stream(mock.Mock())
-        transform_buffer = mock.Mock(side_effect=[BufferStream.END])
-        source_transformer._transform_buffer = transform_buffer
-
-        output_stream = mock.Mock()
-        source_transformer.transform(output_stream)
-
-        self.assertFalse(output_stream.add_indexed_buffer.called)
-
-    def test_transform_adds_transformed_index_buffer(self):
-        source_transformer = SourceTransformer()
-        output_stream = mock.Mock()
-        source_transformer.set_output_stream(output_stream)
-        expected_buffer = [0, 1, 2]
-        transform_buffer = mock.Mock(
-            side_effect=[expected_buffer, BufferStream.END])
-        source_transformer._transform_buffer = transform_buffer
-
-        source_transformer.transform(mock.Mock())
-
-        self.assertEqual(
-            expected_buffer,
-            output_stream.add_indexed_buffer.call_args[ARGS][0].buffer)
-
-    def test_transform_increases_buffer_index_each_call(self):
-        source_transformer = SourceTransformer()
-        output_stream = mock.Mock()
-        source_transformer.set_output_stream(output_stream)
-        buffer = [0, 1, 2]
-        transform_buffer = mock.Mock(
-            side_effect=[buffer, buffer, buffer, BufferStream.END])
-        source_transformer._transform_buffer = transform_buffer
-
-        source_transformer.transform(mock.Mock())
-
-        self.assertEqual([0, 1, 2], [
-            output_stream.add_indexed_buffer.call_args_list[i][ARGS][0].index
-            for i in range(output_stream.add_indexed_buffer.call_count)
-        ])
-
-    def test_transform_calls_end_stream(self):
-        source_transformer = SourceTransformer()
-        output_stream = mock.Mock()
-        source_transformer.set_output_stream(output_stream)
-        transform_buffer = mock.Mock(side_effect=[BufferStream.END])
-        source_transformer._transform_buffer = transform_buffer
-
-        source_transformer.transform(mock.Mock())
-
-        self.assertTrue(output_stream.end_stream.called)
-
-
-class SequentialTransformerTest(unittest.TestCase):
-    """Unit tests the SequentialTransformer class."""
-
-    def test_send_buffers_updates_next_index_on_buffer_list(self):
-        sequential_transformer = SequentialTransformer()
-        sequential_transformer._next_index = 10
-        expected_next_index = 15
-
-        sequential_transformer._send_buffers(BufferList([[]] * 5))
-
-        self.assertEqual(expected_next_index,
-                         sequential_transformer._next_index)
-
-    def test_send_buffers_updates_next_index_on_single_buffer(self):
-        sequential_transformer = SequentialTransformer()
-        sequential_transformer._next_index = 10
-        expected_next_index = 11
-
-        sequential_transformer._send_buffers([])
-
-        self.assertEqual(expected_next_index,
-                         sequential_transformer._next_index)
-
-    def test_send_buffers_sends_buffer_list_with_correct_indexes(self):
-        buffers_to_send = [
-            [1],
-            [1, 2],
-            [1, 2, 3],
-            [1, 2, 3, 4],
-            [1, 2, 3, 4, 5],
-        ]
-        sequential_transformer = SequentialTransformer()
-        output_stream = mock.Mock()
-        sequential_transformer.set_output_stream(output_stream)
-        sequential_transformer._send_buffers(BufferList(buffers_to_send))
-
-        for expected_index, expected_buffer in enumerate(buffers_to_send):
-            call = output_stream.add_indexed_buffer.call_args_list[
-                expected_index]
-            self.assertEqual(expected_index, call[ARGS][0].index)
-            self.assertEqual(expected_buffer, call[ARGS][0].buffer)
-
-    def test_transform_breaks_upon_buffer_stream_end_received(self):
-        sequential_transformer = SequentialTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        sequential_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]
-
-        sequential_transformer._transform(input_stream)
-
-        self.assertFalse(output_stream.add_indexed_buffer.called)
-
-    def test_transform_closes_output_stream_when_finished(self):
-        sequential_transformer = SequentialTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        sequential_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]
-
-        sequential_transformer._transform(input_stream)
-
-        self.assertTrue(output_stream.end_stream.called)
-
-
-class ParallelTransformerTest(unittest.TestCase):
-    """Unit tests the ParallelTransformer class."""
-
-    def test_transform_breaks_upon_buffer_stream_end_received(self):
-        parallel_transformer = ParallelTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        parallel_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]
-
-        parallel_transformer._transform(input_stream)
-
-        self.assertFalse(output_stream.add_indexed_buffer.called)
-
-    def test_transform_closes_output_stream_when_finished(self):
-        parallel_transformer = ParallelTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        parallel_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [BufferStream.END]
-
-        parallel_transformer._transform(input_stream)
-
-        self.assertTrue(output_stream.end_stream.called)
-
-    def test_transform_passes_indexed_buffer_with_updated_buffer(self):
-        expected_buffer = [0, 1, 2, 3, 4]
-        expected_index = 12345
-        parallel_transformer = ParallelTransformer()
-        output_stream = mock.Mock()
-        input_stream = mock.Mock()
-        parallel_transformer.set_output_stream(output_stream)
-        input_stream.remove_indexed_buffer.side_effect = [
-            IndexedBuffer(expected_index, []), BufferStream.END
-        ]
-        parallel_transformer._transform_buffer = lambda _: expected_buffer
-
-        parallel_transformer._transform(input_stream)
-
-        self.assertEqual(
-            expected_buffer,
-            output_stream.add_indexed_buffer.call_args_list[0][ARGS][0].buffer)
-        self.assertEqual(
-            expected_index,
-            output_stream.add_indexed_buffer.call_args_list[0][ARGS][0].index)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformers_test.py b/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformers_test.py
deleted file mode 100755
index 56d34e3..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_lib/sampling/engine/transformers_test.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import statistics
-import unittest
-
-import mock
-
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import DownSampler
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import PerfgateTee
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import SampleAggregator
-from antlion.controllers.monsoon_lib.sampling.engine.transformers import Tee
-from antlion.controllers.monsoon_lib.sampling.hvpm.transformers import HvpmReading
-
-ARGS = 0
-KWARGS = 1
-
-
-class TeeTest(unittest.TestCase):
-    """Unit tests the transformers.Tee class."""
-
-    @mock.patch('builtins.open')
-    def test_begin_opens_file_on_expected_filename(self, open_mock):
-        expected_filename = 'foo'
-
-        Tee(expected_filename).on_begin()
-
-        open_mock.assert_called_with(expected_filename, 'w+')
-
-    @mock.patch('builtins.open')
-    def test_end_closes_file(self, open_mock):
-        tee = Tee('foo')
-        tee.on_begin()
-
-        tee.on_end()
-
-        self.assertTrue(open_mock().close.called)
-
-    @mock.patch('builtins.open')
-    def test_transform_buffer_outputs_correct_format(self, open_mock):
-        tee = Tee('foo')
-        tee.on_begin()
-
-        expected_output = [
-            '0.010000000 1.414213562370\n', '0.020000000 2.718281828460\n',
-            '0.030000000 3.141592653590\n'
-        ]
-
-        tee._transform_buffer([
-            HvpmReading([1.41421356237, 0, 0, 0, 0], 0.01),
-            HvpmReading([2.71828182846, 0, 0, 0, 0], 0.02),
-            HvpmReading([3.14159265359, 0, 0, 0, 0], 0.03),
-        ])
-
-        for call, out in zip(open_mock().write.call_args_list,
-                             expected_output):
-            self.assertEqual(call[ARGS][0], out)
-
-
-class PerfgateTeeTest(unittest.TestCase):
-    """Unit tests the transformers.PerfgateTee class."""
-
-    @mock.patch('builtins.open')
-    def test_begin_opens_file_on_expected_filename(self, open_mock):
-        expected_filename = 'foo'
-
-        PerfgateTee(expected_filename).on_begin()
-
-        open_mock.assert_called_with(expected_filename, 'w+')
-
-    @mock.patch('builtins.open')
-    def test_end_closes_file(self, open_mock):
-        tee = PerfgateTee('foo')
-        tee.on_begin()
-
-        tee.on_end()
-
-        self.assertTrue(open_mock().close.called)
-
-    @mock.patch('builtins.open')
-    def test_transform_buffer_outputs_correct_format(self, open_mock):
-        tee = PerfgateTee('foo')
-        tee.on_begin()
-
-        expected_output = [
-            '1596149635552503296,0.000223,4.193050\n',
-            '1596149635562476032,0.000212,4.193190\n',
-            '1596149635572549376,0.000225,4.193135\n',
-        ]
-
-        tee._transform_buffer([
-            HvpmReading([0.000223, 0, 0, 4.193050, 0], 1596149635.552503296),
-            HvpmReading([0.000212, 0, 0, 4.193190, 0], 1596149635.562476032),
-            HvpmReading([0.000225, 0, 0, 4.193135, 0], 1596149635.572549376),
-        ])
-
-        for call, out in zip(open_mock().write.call_args_list,
-                             expected_output):
-            self.assertEqual(call[ARGS][0], out)
-
-
-class SampleAggregatorTest(unittest.TestCase):
-    """Unit tests the transformers.SampleAggregator class."""
-
-    def test_transform_buffer_respects_start_after_seconds_flag(self):
-        sample_aggregator = SampleAggregator(start_after_seconds=1.0)
-        sample_aggregator._transform_buffer([
-            HvpmReading([1.41421356237, 0, 0, 0, 0], 0.00),
-            HvpmReading([2.71828182846, 0, 0, 0, 0], 0.99),
-            HvpmReading([3.14159265359, 0, 0, 0, 0], 1.00),
-        ])
-
-        self.assertEqual(sample_aggregator.num_samples, 1)
-        self.assertEqual(sample_aggregator.sum_currents, 3.14159265359)
-
-    def test_transform_buffer_sums_currents(self):
-        sample_aggregator = SampleAggregator()
-        sample_aggregator._transform_buffer([
-            HvpmReading([1.41421356237, 0, 0, 0, 0], 0.01),
-            HvpmReading([2.71828182846, 0, 0, 0, 0], 0.99),
-            HvpmReading([3.14159265359, 0, 0, 0, 0], 1.00),
-        ])
-
-        self.assertEqual(sample_aggregator.num_samples, 3)
-        self.assertAlmostEqual(sample_aggregator.sum_currents, 7.27408804442)
-
-
-class DownSamplerTest(unittest.TestCase):
-    """Unit tests the DownSampler class."""
-
-    def test_transform_buffer_downsamples_without_leftovers(self):
-        downsampler = DownSampler(2)
-        buffer = [
-            HvpmReading([2, 0, 0, 0, 0], .01),
-            HvpmReading([4, 0, 0, 0, 0], .03),
-            HvpmReading([6, 0, 0, 0, 0], .05),
-            HvpmReading([8, 0, 0, 0, 0], .07),
-            HvpmReading([10, 0, 0, 0, 0], .09),
-            HvpmReading([12, 0, 0, 0, 0], .011),
-        ]
-
-        values = downsampler._transform_buffer(buffer)
-
-        self.assertEqual(len(values), len(buffer) / 2)
-        for i, down_sample in enumerate(values):
-            self.assertAlmostEqual(
-                down_sample.main_current,
-                ((buffer[2 * i] + buffer[2 * i + 1]) / 2).main_current)
-
-    def test_transform_stores_unused_values_in_leftovers(self):
-        downsampler = DownSampler(3)
-        buffer = [
-            HvpmReading([2, 0, 0, 0, 0], .01),
-            HvpmReading([4, 0, 0, 0, 0], .03),
-            HvpmReading([6, 0, 0, 0, 0], .05),
-            HvpmReading([8, 0, 0, 0, 0], .07),
-            HvpmReading([10, 0, 0, 0, 0], .09),
-        ]
-
-        downsampler._transform_buffer(buffer)
-
-        self.assertEqual(len(downsampler._leftovers), 2)
-        self.assertIn(buffer[-2], downsampler._leftovers)
-        self.assertIn(buffer[-1], downsampler._leftovers)
-
-    def test_transform_uses_leftovers_on_next_calculation(self):
-        downsampler = DownSampler(3)
-        starting_leftovers = [
-            HvpmReading([2, 0, 0, 0, 0], .01),
-            HvpmReading([4, 0, 0, 0, 0], .03),
-        ]
-        downsampler._leftovers = starting_leftovers
-        buffer = [
-            HvpmReading([6, 0, 0, 0, 0], .05),
-            HvpmReading([8, 0, 0, 0, 0], .07),
-            HvpmReading([10, 0, 0, 0, 0], .09),
-            HvpmReading([12, 0, 0, 0, 0], .011)
-        ]
-
-        values = downsampler._transform_buffer(buffer)
-
-        self.assertEqual(len(values), 2)
-        self.assertNotIn(starting_leftovers[0], downsampler._leftovers)
-        self.assertNotIn(starting_leftovers[1], downsampler._leftovers)
-
-        self.assertAlmostEqual(
-            values[0].main_current,
-            statistics.mean([
-                starting_leftovers[0].main_current,
-                starting_leftovers[1].main_current,
-                buffer[0].main_current,
-            ]))
-        self.assertAlmostEqual(
-            values[1].main_current,
-            statistics.mean([
-                buffer[1].main_current,
-                buffer[2].main_current,
-                buffer[3].main_current,
-            ]))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/monsoon_test.py b/src/antlion/unit_tests/controllers/monsoon_test.py
deleted file mode 100755
index 858ac3a..0000000
--- a/src/antlion/unit_tests/controllers/monsoon_test.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import mock
-
-from antlion.controllers import monsoon
-from antlion.controllers.monsoon_lib.api.hvpm.monsoon import Monsoon as HvpmMonsoon
-from antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon import Monsoon as LvpmStockMonsoon
-
-
-@mock.patch('antlion.controllers.monsoon_lib.api.lvpm_stock.monsoon.MonsoonProxy')
-@mock.patch('antlion.controllers.monsoon_lib.api.hvpm.monsoon.HVPM')
-class MonsoonTest(unittest.TestCase):
-    """Tests the antlion.controllers.iperf_client module functions."""
-    def test_create_can_create_lvpm_from_id_only(self, *_):
-        monsoons = monsoon.create([12345])
-        self.assertIsInstance(monsoons[0], LvpmStockMonsoon)
-
-    def test_create_can_create_lvpm_from_dict(self, *_):
-        monsoons = monsoon.create([{'type': 'LvpmStockMonsoon', 'serial': 10}])
-        self.assertIsInstance(monsoons[0], LvpmStockMonsoon)
-        self.assertEqual(monsoons[0].serial, 10)
-
-    def test_create_can_create_hvpm_from_id_only(self, *_):
-        monsoons = monsoon.create([23456])
-        self.assertIsInstance(monsoons[0], HvpmMonsoon)
-
-    def test_create_can_create_hvpm_from_dict(self, *_):
-        monsoons = monsoon.create([{'type': 'HvpmMonsoon', 'serial': 10}])
-        self.assertIsInstance(monsoons[0], HvpmMonsoon)
-        self.assertEqual(monsoons[0].serial, 10)
-
-    def test_raises_error_if_monsoon_type_is_unknown(self, *_):
-        with self.assertRaises(ValueError):
-            monsoon.create([{'type': 'UNKNOWN', 'serial': 10}])
-
-    def test_raises_error_if_monsoon_serial_not_provided(self, *_):
-        with self.assertRaises(ValueError):
-            monsoon.create([{'type': 'LvpmStockMonsoon'}])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py b/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
deleted file mode 100644
index 3cd0b85..0000000
--- a/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
+++ /dev/null
@@ -1,124 +0,0 @@
-#!/usr/bin python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Python unittest module for pdu_lib.synaccess.np02b"""
-
-import unittest
-from unittest.mock import patch
-
-from antlion.controllers.pdu import PduError
-from antlion.controllers.pdu_lib.synaccess.np02b import _TNHelperNP02B, PduDevice
-
-# Test Constants
-HOST = '192.168.1.2'
-VALID_COMMAND_STR = 'cmd'
-VALID_COMMAND_BYTE_STR = b'cmd\n\r'
-VALID_RESPONSE_STR = ''
-VALID_RESPONSE_BYTE_STR = b'\n\r\r\n\r\n'
-STATUS_COMMAND_STR = 'pshow'
-STATUS_COMMAND_BYTE_STR = b'pshow\n\r'
-STATUS_RESPONSE_STR = (
-    'Port | Name    |Status   1 |    Outlet1 |   OFF|   2 |    Outlet2 |   ON |'
-)
-STATUS_RESPONSE_BYTE_STR = (
-    b'Port | Name    |Status   1 |    Outlet1 |   OFF|   2 |    Outlet2 |   '
-    b'ON |\n\r\r\n\r\n')
-INVALID_COMMAND_OUTPUT_BYTE_STR = b'Invalid Command\n\r\r\n\r\n>'
-VALID_STATUS_DICT = {'1': False, '2': True}
-INVALID_STATUS_DICT = {'1': False, '2': False}
-
-
-class _TNHelperNP02BTest(unittest.TestCase):
-    """Unit tests for _TNHelperNP02B."""
-
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time.sleep')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.telnetlib')
-    def test_cmd_is_properly_written(self, telnetlib_mock, sleep_mock):
-        """cmd should strip whitespace and encode in ASCII."""
-        tnhelper = _TNHelperNP02B(HOST)
-        telnetlib_mock.Telnet().read_until.return_value = (
-            VALID_RESPONSE_BYTE_STR)
-        res = tnhelper.cmd(VALID_COMMAND_STR)
-        telnetlib_mock.Telnet().write.assert_called_with(
-            VALID_COMMAND_BYTE_STR)
-
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time.sleep')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.telnetlib')
-    def test_cmd_valid_command_output_is_properly_parsed(
-            self, telnetlib_mock, sleep_mock):
-        """cmd should strip the prompt, separators and command from the
-        output."""
-        tnhelper = _TNHelperNP02B(HOST)
-        telnetlib_mock.Telnet().read_until.return_value = (
-            VALID_RESPONSE_BYTE_STR)
-        res = tnhelper.cmd(VALID_COMMAND_STR)
-        self.assertEqual(res, VALID_RESPONSE_STR)
-
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time.sleep')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.telnetlib')
-    def test_cmd_status_output_is_properly_parsed(self, telnetlib_mock,
-                                                  sleep_mock):
-        """cmd should strip the prompt, separators and command from the output,
-        returning just the status information."""
-        tnhelper = _TNHelperNP02B(HOST)
-        telnetlib_mock.Telnet().read_until.return_value = (
-            STATUS_RESPONSE_BYTE_STR)
-        res = tnhelper.cmd(STATUS_COMMAND_STR)
-        self.assertEqual(res, STATUS_RESPONSE_STR)
-
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time.sleep')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.telnetlib')
-    def test_cmd_invalid_command_raises_error(self, telnetlib_mock,
-                                              sleep_mock):
-        """cmd should raise PduError when an invalid command is given."""
-        tnhelper = _TNHelperNP02B(HOST)
-        telnetlib_mock.Telnet().read_until.return_value = (
-            INVALID_COMMAND_OUTPUT_BYTE_STR)
-        with self.assertRaises(PduError):
-            res = tnhelper.cmd('Some invalid command.')
-
-
-class NP02BPduDeviceTest(unittest.TestCase):
-    """Unit tests for NP02B PduDevice implementation."""
-
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd')
-    def test_status_parses_output_to_valid_dictionary(self, tnhelper_cmd_mock):
-        """status should parse helper response correctly into dict."""
-        np02b = PduDevice(HOST, None, None)
-        tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
-        self.assertEqual(np02b.status(), VALID_STATUS_DICT)
-
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd')
-    def test_verify_state_matches_state(self, tnhelper_cmd_mock):
-        """verify_state should return true when expected state is a subset of
-        actual state"""
-        np02b = PduDevice(HOST, None, None)
-        tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
-        self.assertTrue(np02b._verify_state(VALID_STATUS_DICT))
-
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b.time')
-    @patch('antlion.controllers.pdu_lib.synaccess.np02b._TNHelperNP02B.cmd')
-    def test_verify_state_throws_error(self, tnhelper_cmd_mock, time_mock):
-        """verify_state should throw error after timeout when actual state never
-        reaches expected state"""
-        time_mock.time.side_effect = [1, 2, 10]
-        np02b = PduDevice(HOST, None, None)
-        tnhelper_cmd_mock.return_value = STATUS_RESPONSE_STR
-        with self.assertRaises(PduError):
-            self.assertTrue(np02b._verify_state(INVALID_STATUS_DICT))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/power_metrics_test.py b/src/antlion/unit_tests/controllers/power_metrics_test.py
deleted file mode 100644
index ba47b2f..0000000
--- a/src/antlion/unit_tests/controllers/power_metrics_test.py
+++ /dev/null
@@ -1,187 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import statistics
-import unittest
-from unittest import mock
-from unittest.mock import patch
-
-from antlion.controllers import power_metrics
-from antlion.controllers.power_metrics import CURRENT
-from antlion.controllers.power_metrics import END_TIMESTAMP
-from antlion.controllers.power_metrics import HOUR
-from antlion.controllers.power_metrics import Metric
-from antlion.controllers.power_metrics import MILLIAMP
-from antlion.controllers.power_metrics import MINUTE
-from antlion.controllers.power_metrics import PowerMetrics
-from antlion.controllers.power_metrics import START_TIMESTAMP
-from antlion.controllers.power_metrics import TIME
-from antlion.controllers.power_metrics import WATT
-
-FAKE_UNIT_TYPE = 'fake_unit'
-FAKE_UNIT = 'F'
-
-
-class MeasurementTest(unittest.TestCase):
-    """Unit tests for the Measurement class."""
-
-    def test_init_with_valid_unit_type(self):
-        """Test that a Measurement is properly initialized given a valid unit
-        type.
-        """
-        measurement = Metric(2, CURRENT, MILLIAMP)
-        self.assertEqual(measurement.value, 2)
-        self.assertEqual(measurement.unit, MILLIAMP)
-
-    def test_init_with_invalid_unit_type(self):
-        """Test that __init__ raises an error if given an invalid unit type."""
-        with self.assertRaisesRegex(TypeError, 'valid unit type'):
-            measurement = Metric(2, FAKE_UNIT_TYPE, FAKE_UNIT)
-
-    def test_unit_conversion(self):
-        """Test that to_unit correctly converts value and unit."""
-        ratio = 1000
-        current_amps = Metric.amps(15)
-        current_milliamps = current_amps.to_unit(MILLIAMP)
-        self.assertEqual(current_milliamps.value / current_amps.value, ratio)
-
-    def test_unit_conversion_with_wrong_type(self):
-        """Test that to_unit raises and error if incompatible unit type is
-        specified.
-        """
-        current_amps = Metric.amps(3.4)
-        with self.assertRaisesRegex(TypeError, 'Incompatible units'):
-            power_watts = current_amps.to_unit(WATT)
-
-    def test_comparison_operators(self):
-        """Test that the comparison operators work as intended."""
-        # time_a == time_b < time_c
-        time_a = Metric.seconds(120)
-        time_b = Metric(2, TIME, MINUTE)
-        time_c = Metric(0.1, TIME, HOUR)
-
-        self.assertEqual(time_a, time_b)
-        self.assertEqual(time_b, time_a)
-        self.assertLessEqual(time_a, time_b)
-        self.assertGreaterEqual(time_a, time_b)
-
-        self.assertNotEqual(time_a, time_c)
-        self.assertNotEqual(time_c, time_a)
-        self.assertLess(time_a, time_c)
-        self.assertLessEqual(time_a, time_c)
-        self.assertGreater(time_c, time_a)
-        self.assertGreaterEqual(time_c, time_a)
-
-    def test_arithmetic_operators(self):
-        """Test that the addition and subtraction operators work as intended"""
-        time_a = Metric(3, TIME, HOUR)
-        time_b = Metric(90, TIME, MINUTE)
-
-        sum_ = time_a + time_b
-        self.assertEqual(sum_.value, 4.5)
-        self.assertEqual(sum_.unit, HOUR)
-
-        sum_reversed = time_b + time_a
-        self.assertEqual(sum_reversed.value, 270)
-        self.assertEqual(sum_reversed.unit, MINUTE)
-
-        diff = time_a - time_b
-        self.assertEqual(diff.value, 1.5)
-        self.assertEqual(diff.unit, HOUR)
-
-        diff_reversed = time_b - time_a
-        self.assertEqual(diff_reversed.value, -90)
-        self.assertEqual(diff_reversed.unit, MINUTE)
-
-
-class PowerMetricsTest(unittest.TestCase):
-    """Unit tests for the PowerMetrics class."""
-
-    SAMPLES = [0.13, 0.95, 0.32, 4.84, 2.48, 4.11, 4.85, 4.88, 4.22, 2.2]
-    RAW_DATA = list(zip(range(10), SAMPLES))
-    VOLTAGE = 4.2
-
-    def setUp(self):
-        self.power_metrics = PowerMetrics(self.VOLTAGE)
-
-    def test_import_raw_data(self):
-        """Test that power metrics can be loaded from file. Simply ensure that
-        the number of samples is correct."""
-
-        imported_data = power_metrics.import_raw_data(
-            os.path.join(os.path.dirname(__file__),
-                         'data/sample_monsoon_data')
-        )
-
-        count = 0
-        for _, __ in imported_data:
-            count = count + 1
-        self.assertEqual(count, 10)
-
-    @patch('antlion.controllers.power_metrics.PowerMetrics')
-    def test_split_by_test_with_timestamps(self, mock_power_metric_type):
-        """Test that given test timestamps, a power metric is generated from
-        a subset of samples corresponding to the test."""
-        timestamps = {'sample_test': {START_TIMESTAMP: 3500,
-                                      END_TIMESTAMP:   8500}}
-
-        mock_power_metric = mock.Mock()
-        mock_power_metric_type.side_effect = lambda v: mock_power_metric
-        power_metrics.generate_test_metrics(self.RAW_DATA,
-                                            timestamps=timestamps,
-                                            voltage=self.VOLTAGE)
-
-        self.assertEqual(mock_power_metric.update_metrics.call_count, 5)
-
-    def test_incomplete_timestamps_are_ignored(self):
-        """Test that given incomplete timestamps, a power metric is generated from
-        a subset of samples corresponding to the test."""
-        sample_test = 'sample_test'
-        test_end = 13500
-        test_timestamps = {sample_test: {
-            END_TIMESTAMP: test_end}}
-        # no error expected
-        metrics = (
-            power_metrics.generate_test_metrics(self.RAW_DATA,
-                                                timestamps=test_timestamps,
-                                                voltage=self.VOLTAGE))
-
-
-    def test_numeric_metrics(self):
-        """Test that the numeric metrics have correct values."""
-        timestamps = {'sample_test': {START_TIMESTAMP: 0,
-                                      END_TIMESTAMP:   10000}}
-        metrics = power_metrics.generate_test_metrics(self.RAW_DATA,
-                                                      timestamps=timestamps,
-                                                      voltage=self.VOLTAGE)
-        metrics_as_dic = {m.name: m for m in metrics['sample_test']}
-        self.assertAlmostEqual(metrics_as_dic['avg_current'].value,
-                               statistics.mean(self.SAMPLES) * 1000)
-        self.assertAlmostEqual(metrics_as_dic['max_current'].value,
-                               max(self.SAMPLES) * 1000)
-        self.assertAlmostEqual(metrics_as_dic['min_current'].value,
-                               min(self.SAMPLES) * 1000)
-        self.assertAlmostEqual(
-            metrics_as_dic['stdev_current'].value,
-            statistics.stdev(self.SAMPLES) * 1000)
-        self.assertAlmostEqual(
-            self.power_metrics.avg_power.value,
-            self.power_metrics.avg_current.value * self.VOLTAGE)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/power_monitor_test.py b/src/antlion/unit_tests/controllers/power_monitor_test.py
deleted file mode 100644
index f62e6fa..0000000
--- a/src/antlion/unit_tests/controllers/power_monitor_test.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from antlion.controllers import power_monitor
-
-
-class PowerMonitorTest(unittest.TestCase):
-
-    def test_registry_gets_updated(self):
-        power_monitor.update_registry({'a_key': 'a_value'})
-        self.assertTrue('a_key' in power_monitor.get_registry())
-        self.assertEqual('a_value', power_monitor.get_registry()['a_key'])
-
-    def test_registry_disallows_mutation(self):
-        power_monitor.update_registry({'a_key': 'a_value'})
-        with self.assertRaises(power_monitor.ResourcesRegistryError):
-            power_monitor.update_registry({'a_key': 'another_value'})
-
-    def test_registry_allows_updates_with_same_values(self):
-        power_monitor.update_registry({'a_key': 'a_value'})
-        power_monitor.update_registry({'a_key': 'a_value'})
-
-    def tearDown(self):
-        super().tearDown()
-        power_monitor._REGISTRY = {}
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/rohdeschwarz_lib/__init__.py b/src/antlion/unit_tests/controllers/rohdeschwarz_lib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/controllers/rohdeschwarz_lib/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/controllers/rohdeschwarz_lib/contest_test.py b/src/antlion/unit_tests/controllers/rohdeschwarz_lib/contest_test.py
deleted file mode 100644
index 05d57d7..0000000
--- a/src/antlion/unit_tests/controllers/rohdeschwarz_lib/contest_test.py
+++ /dev/null
@@ -1,283 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion import asserts
-import unittest
-from unittest import mock
-import socket
-import time
-from contextlib import closing
-
-# TODO(markdr): Remove this hack after adding zeep to setup.py.
-import sys
-sys.modules['zeep'] = mock.Mock()
-
-from antlion.controllers.rohdeschwarz_lib import contest
-
-
-def find_free_port():
-    """ Helper function to find a free port.
-    https://stackoverflow.com/a/45690594
-    """
-    with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
-        s.bind(('', 0))
-        s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-        return s.getsockname()[1]
-
-
-class ContestTest(unittest.TestCase):
-    """ Unit tests for the contest controller."""
-
-    LOCAL_HOST_IP = '127.0.0.1'
-
-    @classmethod
-    def setUpClass(self):
-        self.log = logger.create_tagged_trace_logger('contest_test')
-
-    def test_automation_server_end_to_end(self):
-        """ End to end test for the Contest object's ability to start an
-        Automation Server and respond to the commands sent through the
-        socket interface. """
-
-        automation_port = find_free_port()
-
-        # Instantiate the mock Contest object. This will start a thread in the
-        # background running the Automation server.
-        with mock.patch('zeep.client.Client') as zeep_client:
-
-            # Create a MagicMock instance
-            zeep_client.return_value = mock.MagicMock()
-
-            controller = contest.Contest(
-                logger=self.log,
-                remote_ip=None,
-                remote_port=None,
-                automation_listen_ip=self.LOCAL_HOST_IP,
-                automation_port=automation_port,
-                dut_on_func=None,
-                dut_off_func=None,
-                ftp_pwd=None,
-                ftp_usr=None)
-
-            # Give some time for the server to initialize as it's running on
-            # a different thread.
-            time.sleep(0.01)
-
-            # Start a socket connection and send a command
-            with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
-                s.connect((self.LOCAL_HOST_IP, automation_port))
-                s.sendall(b'AtTestcaseStart')
-                s.settimeout(1.0)
-                data = s.recv(1024)
-                asserts.assert_true(data == b'OK\n', "Received OK response.")
-
-        controller.destroy()
-
-    def test_automation_protocol_calls_dut_off_func_for_on_command(self):
-        """ Tests the AutomationProtocol's ability to turn the DUT off
-        upon receiving the requests."""
-
-        dut_on_func = mock.Mock()
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), dut_on_func, mock.Mock())
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'DUT_SWITCH_ON')
-        asserts.assert_true(dut_on_func.called, 'Function was not called.')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_calls_dut_on_func_for_off_command(self):
-        """ Tests the Automation server's ability to turn the DUT on
-        upon receiving the requests."""
-
-        dut_off_func = mock.Mock()
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), dut_off_func)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'DUT_SWITCH_OFF')
-        asserts.assert_true(dut_off_func.called, 'Function was not called.')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_handles_testcase_start_command(self):
-        """ Tests the Automation server's ability to handle a testcase start
-        command."""
-
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), None)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'AtTestcaseStart name_of_the_testcase')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_handles_testplan_start_command(self):
-        """ Tests the Automation server's ability to handle a testplan start
-        command."""
-
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), None)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'AtTestplanStart')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_handles_testcase_end_command(self):
-        """ Tests the Automation server's ability to handle a testcase end
-        command."""
-
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), None)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'AfterTestcase')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    def test_automation_protocol_handles_testplan_end_command(self):
-        """ Tests the Automation server's ability to handle a testplan start
-        command."""
-
-        protocol = contest.AutomationServer.AutomationProtocol(
-            mock.Mock(), mock.Mock(), None)
-        protocol.send_ok = mock.Mock()
-        protocol.data_received(b'AfterTestplan')
-        asserts.assert_true(protocol.send_ok.called, 'OK response not sent.')
-
-    # Makes all time.sleep commands call a mock function that returns
-    # immediately, rather than sleeping.
-    @mock.patch('time.sleep')
-    # Prevents the controller to try to download the results from the FTP server
-    @mock.patch('antlion.controllers.rohdeschwarz_lib.contest'
-                '.Contest.pull_test_results')
-    def test_execute_testplan_stops_reading_output_on_exit_line(
-            self, time_mock, results_func_mock):
-        """ Makes sure that execute_test plan returns after receiving an
-        exit code.
-
-        Args:
-            time_mock: time.sleep mock object.
-            results_func_mock: Contest.pull_test_results mock object.
-        """
-
-        service_output = mock.Mock()
-        # An array of what return values. If a value is an Exception, the
-        # Exception is raised instead.
-        service_output.side_effect = [
-            'Output line 1\n', 'Output line 2\n',
-            'Testplan Directory: \\\\a\\b\\c\n'
-            'Exit code: 0\n',
-            AssertionError('Tried to read output after exit code was sent.')
-        ]
-
-        with mock.patch('zeep.client.Client') as zeep_client:
-            zeep_client.return_value.service.DoGetOutput = service_output
-            controller = contest.Contest(logger=self.log,
-                                         remote_ip=None,
-                                         remote_port=None,
-                                         automation_listen_ip=None,
-                                         automation_port=None,
-                                         dut_on_func=None,
-                                         dut_off_func=None,
-                                         ftp_usr=None,
-                                         ftp_pwd=None)
-
-        controller.execute_testplan('TestPlan')
-        controller.destroy()
-
-    # Makes all time.sleep commands call a mock function that returns
-    # immediately, rather than sleeping.
-    @mock.patch('time.sleep')
-    # Prevents the controller to try to download the results from the FTP server
-    @mock.patch.object(contest.Contest, 'pull_test_results')
-    def test_execute_testplan_detects_results_directory(
-            self, time_mock, results_func_mock):
-        """ Makes sure that execute_test is able to detect the testplan
-        directory from the test output.
-
-        Args:
-            time_mock: time.sleep mock object.
-            results_func_mock: Contest.pull_test_results mock object.
-        """
-
-        results_directory = 'results\directory\\name'
-
-        service_output = mock.Mock()
-        # An array of what return values. If a value is an Exception, the
-        # Exception is raised instead.
-        service_output.side_effect = [
-            'Testplan Directory: {}{}\\ \n'.format(contest.Contest.FTP_ROOT,
-                                                   results_directory),
-            'Exit code: 0\n'
-        ]
-
-        with mock.patch('zeep.client.Client') as zeep_client:
-            zeep_client.return_value.service.DoGetOutput = service_output
-            controller = contest.Contest(logger=self.log,
-                                         remote_ip=None,
-                                         remote_port=None,
-                                         automation_listen_ip=None,
-                                         automation_port=None,
-                                         dut_on_func=None,
-                                         dut_off_func=None,
-                                         ftp_usr=None,
-                                         ftp_pwd=None)
-
-        controller.execute_testplan('TestPlan')
-
-        controller.pull_test_results.assert_called_with(results_directory)
-        controller.destroy()
-
-    # Makes all time.sleep commands call a mock function that returns
-    # immediately, rather than sleeping.
-    @mock.patch('time.sleep')
-    # Prevents the controller to try to download the results from the FTP server
-    @mock.patch.object(contest.Contest, 'pull_test_results')
-    def test_execute_testplan_fails_when_contest_is_unresponsive(
-            self, time_mock, results_func_mock):
-        """ Makes sure that execute_test plan returns after receiving an
-        exit code.
-
-        Args:
-            time_mock: time.sleep mock object.
-            results_func_mock: Contest.pull_test_results mock object.
-        """
-
-        service_output = mock.Mock()
-        # An array of what return values. If a value is an Exception, the
-        # Exception is raised instead.
-        mock_output = [None] * contest.Contest.MAXIMUM_OUTPUT_READ_RETRIES
-        mock_output.append(
-            AssertionError('Test did not failed after too many '
-                           'unsuccessful retries.'))
-        service_output.side_effect = mock_output
-
-        with mock.patch('zeep.client.Client') as zeep_client:
-            zeep_client.return_value.service.DoGetOutput = service_output
-            controller = contest.Contest(logger=self.log,
-                                         remote_ip=None,
-                                         remote_port=None,
-                                         automation_listen_ip=None,
-                                         automation_port=None,
-                                         dut_on_func=None,
-                                         dut_off_func=None,
-                                         ftp_usr=None,
-                                         ftp_pwd=None)
-
-        try:
-            controller.execute_testplan('TestPlan')
-        except RuntimeError:
-            pass
-
-        controller.destroy()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py b/src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
deleted file mode 100755
index 1714233..0000000
--- a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-
-import mock
-
-from antlion.controllers.sl4a_lib import rpc_client
-
-
-class BreakoutError(Exception):
-    """Thrown to prove program execution."""
-
-
-class RpcClientTest(unittest.TestCase):
-    """Tests the rpc_client.RpcClient class."""
-
-    def test_terminate_warn_on_working_connections(self):
-        """Tests rpc_client.RpcClient.terminate().
-
-        Tests that if some connections are still working, we log this before
-        closing the connections.
-        """
-        session = mock.Mock()
-
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
-        client._log = mock.Mock()
-        client._working_connections = [mock.Mock()]
-
-        client.terminate()
-
-        self.assertTrue(client._log.warning.called)
-
-    def test_terminate_closes_all_connections(self):
-        """Tests rpc_client.RpcClient.terminate().
-
-        Tests that all free and working connections have been closed.
-        """
-        session = mock.Mock()
-
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
-        client._log = mock.Mock()
-        working_connections = [mock.Mock() for _ in range(3)]
-        free_connections = [mock.Mock() for _ in range(3)]
-        client._free_connections = free_connections
-        client._working_connections = working_connections
-
-        client.terminate()
-
-        for connection in working_connections + free_connections:
-            self.assertTrue(connection.close.called)
-
-    def test_get_free_connection_get_available_client(self):
-        """Tests rpc_client.RpcClient._get_free_connection().
-
-        Tests that an available client is returned if one exists.
-        """
-
-        def fail_on_wrong_execution():
-            self.fail('The program is not executing the expected path. '
-                      'Tried to return an available free client, ended up '
-                      'sleeping to wait for client instead.')
-
-        session = mock.Mock()
-
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
-        expected_connection = mock.Mock()
-        client._free_connections = [expected_connection]
-        client._lock = mock.MagicMock()
-
-        with mock.patch('time.sleep') as sleep_mock:
-            sleep_mock.side_effect = fail_on_wrong_execution
-
-            connection = client._get_free_connection()
-
-        self.assertEqual(connection, expected_connection)
-        self.assertTrue(expected_connection in client._working_connections)
-        self.assertEqual(len(client._free_connections), 0)
-
-    def test_get_free_connection_continues_upon_connection_taken(self):
-        """Tests rpc_client.RpcClient._get_free_connection().
-
-        Tests that if the free connection is taken while trying to acquire the
-        lock to reserve it, the thread gives up the lock and tries again.
-        """
-
-        def empty_list():
-            client._free_connections.clear()
-
-        def fail_on_wrong_execution():
-            self.fail('The program is not executing the expected path. '
-                      'Tried to return an available free client, ended up '
-                      'sleeping to wait for client instead.')
-
-        session = mock.Mock()
-
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
-        client._free_connections = mock.Mock()
-        client._lock = mock.MagicMock()
-        client._lock.acquire.side_effect = empty_list
-        client._free_connections = [mock.Mock()]
-
-        with mock.patch('time.sleep') as sleep_mock:
-            sleep_mock.side_effect = fail_on_wrong_execution
-
-            try:
-                client._get_free_connection()
-            except IndexError:
-                self.fail('Tried to pop free connection when another thread'
-                          'has taken it.')
-        # Assert that the lock has been freed.
-        self.assertEqual(client._lock.acquire.call_count,
-                         client._lock.release.call_count)
-
-    def test_get_free_connection_sleep(self):
-        """Tests rpc_client.RpcClient._get_free_connection().
-
-        Tests that if the free connection is taken, it will wait for a new one.
-        """
-
-        session = mock.Mock()
-
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
-        client._free_connections = []
-        client.max_connections = 0
-        client._lock = mock.MagicMock()
-        client._free_connections = []
-
-        with mock.patch('time.sleep') as sleep_mock:
-            sleep_mock.side_effect = BreakoutError()
-            try:
-                client._get_free_connection()
-            except BreakoutError:
-                # Assert that the lock has been freed.
-                self.assertEqual(client._lock.acquire.call_count,
-                                 client._lock.release.call_count)
-                # Asserts that the sleep has been called.
-                self.assertTrue(sleep_mock.called)
-                # Asserts that no changes to connections happened
-                self.assertEqual(len(client._free_connections), 0)
-                self.assertEqual(len(client._working_connections), 0)
-                return True
-        self.fail('Failed to hit sleep case')
-
-    def test_release_working_connection(self):
-        """Tests rpc_client.RpcClient._release_working_connection.
-
-        Tests that the working connection is moved into the free connections.
-        """
-        session = mock.Mock()
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
-
-        connection = mock.Mock()
-        client._working_connections = [connection]
-        client._free_connections = []
-        client._release_working_connection(connection)
-
-        self.assertTrue(connection in client._free_connections)
-        self.assertFalse(connection in client._working_connections)
-
-    def test_future(self):
-        """Tests rpc_client.RpcClient.future.
-
-        """
-        session = mock.Mock()
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
-
-        self.assertEqual(client.future, client._async_client)
-
-    def test_getattr(self):
-        """Tests rpc_client.RpcClient.__getattr__.
-
-        Tests that the name, args, and kwargs are correctly passed to self.rpc.
-        """
-        session = mock.Mock()
-        client = rpc_client.RpcClient(session.uid, session.adb.serial,
-                                      lambda _: mock.Mock(),
-                                      lambda _: mock.Mock())
-        client.rpc = mock.MagicMock()
-        fn = client.fake_function_please_do_not_be_implemented
-
-        fn('arg1', 'arg2', kwarg1=1, kwarg2=2)
-        client.rpc.assert_called_with(
-            'fake_function_please_do_not_be_implemented',
-            'arg1',
-            'arg2',
-            kwarg1=1,
-            kwarg2=2)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py b/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
deleted file mode 100755
index 35a3425..0000000
--- a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
+++ /dev/null
@@ -1,149 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import mock
-import unittest
-
-from antlion.controllers.sl4a_lib import rpc_client, rpc_connection
-
-MOCK_RESP = b'{"id": 0, "result": 123, "error": null, "status": 1, "uid": 1}'
-MOCK_RESP_UNKNOWN_UID = b'{"id": 0, "result": 123, "error": null, "status": 0}'
-MOCK_RESP_WITH_ERROR = b'{"id": 0, "error": 1, "status": 1, "uid": 1}'
-
-
-class MockSocketFile(object):
-    def __init__(self, resp):
-        self.resp = resp
-        self.last_write = None
-
-    def write(self, msg):
-        self.last_write = msg
-
-    def readline(self):
-        return self.resp
-
-    def flush(self):
-        pass
-
-
-class RpcConnectionTest(unittest.TestCase):
-    """This test class has unit tests for the implementation of everything
-    under antlion.controllers.android, which is the RPC client module for sl4a.
-    """
-
-    @staticmethod
-    def mock_rpc_connection(response=MOCK_RESP,
-                            uid=rpc_connection.UNKNOWN_UID):
-        """Sets up a faked socket file from the mock connection."""
-        fake_file = MockSocketFile(response)
-        fake_conn = mock.MagicMock()
-        fake_conn.makefile.return_value = fake_file
-        adb = mock.Mock()
-        ports = mock.Mock()
-
-        return rpc_connection.RpcConnection(
-            adb, ports, fake_conn, fake_file, uid=uid)
-
-    def test_open_chooses_init_on_unknown_uid(self):
-        """Tests rpc_connection.RpcConnection.open().
-
-        Tests that open uses the init start command when the uid is unknown.
-        """
-
-        def pass_on_init(start_command):
-            if not start_command == rpc_connection.Sl4aConnectionCommand.INIT:
-                self.fail(
-                    'Must call "init". Called "%s" instead.' % start_command)
-
-        connection = self.mock_rpc_connection()
-        connection._initiate_handshake = pass_on_init
-        connection.open()
-
-    def test_open_chooses_continue_on_known_uid(self):
-        """Tests rpc_connection.RpcConnection.open().
-
-        Tests that open uses the continue start command when the uid is known.
-        """
-
-        def pass_on_continue(start_command):
-            if start_command != rpc_connection.Sl4aConnectionCommand.CONTINUE:
-                self.fail('Must call "continue". Called "%s" instead.' %
-                          start_command)
-
-        connection = self.mock_rpc_connection(uid=1)
-        connection._initiate_handshake = pass_on_continue
-        connection.open()
-
-    def test_initiate_handshake_returns_uid(self):
-        """Tests rpc_connection.RpcConnection._initiate_handshake().
-
-        Test that at the end of a handshake with no errors the client object
-        has the correct parameters.
-        """
-        connection = self.mock_rpc_connection()
-        connection._initiate_handshake(
-            rpc_connection.Sl4aConnectionCommand.INIT)
-
-        self.assertEqual(connection.uid, 1)
-
-    def test_initiate_handshake_returns_unknown_status(self):
-        """Tests rpc_connection.RpcConnection._initiate_handshake().
-
-        Test that when the handshake is given an unknown uid then the client
-        will not be given a uid.
-        """
-        connection = self.mock_rpc_connection(MOCK_RESP_UNKNOWN_UID)
-        connection._initiate_handshake(
-            rpc_connection.Sl4aConnectionCommand.INIT)
-
-        self.assertEqual(connection.uid, rpc_client.UNKNOWN_UID)
-
-    def test_initiate_handshake_no_response(self):
-        """Tests rpc_connection.RpcConnection._initiate_handshake().
-
-        Test that if a handshake receives no response then it will give a
-        protocol error.
-        """
-        connection = self.mock_rpc_connection(b'')
-
-        with self.assertRaises(
-                rpc_client.Sl4aProtocolError,
-                msg=rpc_client.Sl4aProtocolError.NO_RESPONSE_FROM_HANDSHAKE):
-            connection._initiate_handshake(
-                rpc_connection.Sl4aConnectionCommand.INIT)
-
-    def test_cmd_properly_formatted(self):
-        """Tests rpc_connection.RpcConnection._cmd().
-
-        Tests that the command sent is properly formatted.
-        """
-        connection = self.mock_rpc_connection(MOCK_RESP)
-        connection._cmd('test')
-        self.assertIn(
-            connection._socket_file.last_write,
-            [b'{"cmd": "test", "uid": -1}\n', b'{"uid": -1, "cmd": "test"}\n'])
-
-    def test_get_new_ticket(self):
-        """Tests rpc_connection.RpcConnection.get_new_ticket().
-
-        Tests that a new number is always given for get_new_ticket().
-        """
-        connection = self.mock_rpc_connection(MOCK_RESP)
-        self.assertEqual(connection.get_new_ticket() + 1,
-                         connection.get_new_ticket())
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py b/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
deleted file mode 100755
index 459f9a7..0000000
--- a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
+++ /dev/null
@@ -1,472 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import mock
-import unittest
-
-from antlion.controllers.sl4a_lib import sl4a_manager
-from antlion.controllers.sl4a_lib import rpc_client
-
-
-class Sl4aManagerFactoryTest(unittest.TestCase):
-    """Tests the sl4a_manager module-level functions."""
-
-    def setUp(self):
-        """Clears the Sl4aManager cache."""
-        sl4a_manager._all_sl4a_managers = {}
-
-    def test_create_manager(self):
-        """Tests sl4a_manager.create_sl4a_manager().
-
-        Tests that a new Sl4aManager is returned without an error.
-        """
-        adb = mock.Mock()
-        adb.serial = 'SERIAL'
-        sl4a_man = sl4a_manager.create_sl4a_manager(adb)
-        self.assertEqual(sl4a_man.adb, adb)
-
-    def test_create_sl4a_manager_return_already_created_manager(self):
-        """Tests sl4a_manager.create_sl4a_manager().
-
-        Tests that a second call to create_sl4a_manager() does not create a
-        new Sl4aManager, and returns the first created Sl4aManager instead.
-        """
-        adb = mock.Mock()
-        adb.serial = 'SERIAL'
-        first_manager = sl4a_manager.create_sl4a_manager(adb)
-
-        adb_same_serial = mock.Mock()
-        adb_same_serial.serial = 'SERIAL'
-        second_manager = sl4a_manager.create_sl4a_manager(adb)
-
-        self.assertEqual(first_manager, second_manager)
-
-    def test_create_sl4a_manager_multiple_devices_with_one_manager_each(self):
-        """Tests sl4a_manager.create_sl4a_manager().
-
-        Tests that when create_s4l4a_manager() is called for different devices,
-        each device gets its own Sl4aManager object.
-        """
-        adb_1 = mock.Mock()
-        adb_1.serial = 'SERIAL'
-        first_manager = sl4a_manager.create_sl4a_manager(adb_1)
-
-        adb_2 = mock.Mock()
-        adb_2.serial = 'DIFFERENT_SERIAL_NUMBER'
-        second_manager = sl4a_manager.create_sl4a_manager(adb_2)
-
-        self.assertNotEqual(first_manager, second_manager)
-
-
-class Sl4aManagerTest(unittest.TestCase):
-    """Tests the sl4a_manager.Sl4aManager class."""
-    ATTEMPT_INTERVAL = .25
-    MAX_WAIT_ON_SERVER_SECONDS = 1
-    _SL4A_LAUNCH_SERVER_CMD = ''
-    _SL4A_CLOSE_SERVER_CMD = ''
-    _SL4A_ROOT_FIND_PORT_CMD = ''
-    _SL4A_USER_FIND_PORT_CMD = ''
-    _SL4A_START_SERVICE_CMD = ''
-
-    @classmethod
-    def setUpClass(cls):
-        # Copy all module constants before testing begins.
-        Sl4aManagerTest.ATTEMPT_INTERVAL = \
-            sl4a_manager.ATTEMPT_INTERVAL
-        Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS = \
-            sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS
-        Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD = \
-            sl4a_manager._SL4A_LAUNCH_SERVER_CMD
-        Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD = \
-            sl4a_manager._SL4A_CLOSE_SERVER_CMD
-        Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD = \
-            sl4a_manager._SL4A_ROOT_FIND_PORT_CMD
-        Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD = \
-            sl4a_manager._SL4A_USER_FIND_PORT_CMD
-        Sl4aManagerTest._SL4A_START_SERVICE_CMD = \
-            sl4a_manager._SL4A_START_SERVICE_CMD
-
-    def setUp(self):
-        # Restore all module constants at the beginning of each test case.
-        sl4a_manager.ATTEMPT_INTERVAL = \
-            Sl4aManagerTest.ATTEMPT_INTERVAL
-        sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS = \
-            Sl4aManagerTest.MAX_WAIT_ON_SERVER_SECONDS
-        sl4a_manager._SL4A_LAUNCH_SERVER_CMD = \
-            Sl4aManagerTest._SL4A_LAUNCH_SERVER_CMD
-        sl4a_manager._SL4A_CLOSE_SERVER_CMD = \
-            Sl4aManagerTest._SL4A_CLOSE_SERVER_CMD
-        sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = \
-            Sl4aManagerTest._SL4A_ROOT_FIND_PORT_CMD
-        sl4a_manager._SL4A_USER_FIND_PORT_CMD = \
-            Sl4aManagerTest._SL4A_USER_FIND_PORT_CMD
-        sl4a_manager._SL4A_START_SERVICE_CMD = \
-            Sl4aManagerTest._SL4A_START_SERVICE_CMD
-
-        # Reset module data at the beginning of each test.
-        sl4a_manager._all_sl4a_managers = {}
-
-    def test_sl4a_ports_in_use(self):
-        """Tests sl4a_manager.Sl4aManager.sl4a_ports_in_use
-
-        Tests to make sure all server ports are returned with no duplicates.
-        """
-        adb = mock.Mock()
-        manager = sl4a_manager.Sl4aManager(adb)
-        session_1 = mock.Mock()
-        session_1.server_port = 12345
-        manager.sessions[1] = session_1
-        session_2 = mock.Mock()
-        session_2.server_port = 15973
-        manager.sessions[2] = session_2
-        session_3 = mock.Mock()
-        session_3.server_port = 12345
-        manager.sessions[3] = session_3
-        session_4 = mock.Mock()
-        session_4.server_port = 67890
-        manager.sessions[4] = session_4
-        session_5 = mock.Mock()
-        session_5.server_port = 75638
-        manager.sessions[5] = session_5
-
-        returned_ports = manager.sl4a_ports_in_use
-
-        # No duplicated ports.
-        self.assertEqual(len(returned_ports), len(set(returned_ports)))
-        # One call for each session
-        self.assertSetEqual(set(returned_ports), {12345, 15973, 67890, 75638})
-
-    @mock.patch('time.sleep', return_value=None)
-    def test_start_sl4a_server_uses_all_retries(self, _):
-        """Tests sl4a_manager.Sl4aManager.start_sl4a_server().
-
-        Tests to ensure that _start_sl4a_server retries and successfully returns
-        a port.
-        """
-        adb = mock.Mock()
-        adb.shell = lambda _, **kwargs: ''
-
-        side_effects = []
-        expected_port = 12345
-        for _ in range(int(sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS /
-                           sl4a_manager.ATTEMPT_INTERVAL) - 1):
-            side_effects.append(None)
-        side_effects.append(expected_port)
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        manager._get_open_listening_port = mock.Mock(side_effect=side_effects)
-        try:
-            found_port = manager.start_sl4a_server(0)
-            self.assertTrue(found_port)
-        except rpc_client.Sl4aConnectionError:
-            self.fail('start_sl4a_server failed to respect FIND_PORT_RETRIES.')
-
-    @mock.patch('time.sleep', return_value=None)
-    def test_start_sl4a_server_fails_all_retries(self, _):
-        """Tests sl4a_manager.Sl4aManager.start_sl4a_server().
-
-        Tests to ensure that start_sl4a_server throws an error if all retries
-        fail.
-        """
-        adb = mock.Mock()
-        adb.shell = lambda _, **kwargs: ''
-
-        side_effects = []
-        for _ in range(int(sl4a_manager.MAX_WAIT_ON_SERVER_SECONDS /
-                           sl4a_manager.ATTEMPT_INTERVAL)):
-            side_effects.append(None)
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        manager._get_open_listening_port = mock.Mock(side_effect=side_effects)
-        try:
-            manager.start_sl4a_server(0)
-            self.fail('Sl4aConnectionError was not thrown.')
-        except rpc_client.Sl4aConnectionError:
-            pass
-
-    def test_get_all_ports_command_uses_root_cmd(self):
-        """Tests sl4a_manager.Sl4aManager._get_all_ports_command().
-
-        Tests that _get_all_ports_command calls the root command when root is
-        available.
-        """
-        adb = mock.Mock()
-        adb.is_root = lambda: True
-        command = 'ngo45hke3b4vie3mv5ni93,vfu3j'
-        sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = command
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        self.assertEqual(manager._get_all_ports_command(), command)
-
-    def test_get_all_ports_command_escalates_to_root(self):
-        """Tests sl4a_manager.Sl4aManager._call_get_ports_command().
-
-        Tests that _call_get_ports_command calls the root command when adb is
-        user but can escalate to root.
-        """
-        adb = mock.Mock()
-        adb.is_root = lambda: False
-        adb.ensure_root = lambda: True
-        command = 'ngo45hke3b4vie3mv5ni93,vfu3j'
-        sl4a_manager._SL4A_ROOT_FIND_PORT_CMD = command
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        self.assertEqual(manager._get_all_ports_command(), command)
-
-    def test_get_all_ports_command_uses_user_cmd(self):
-        """Tests sl4a_manager.Sl4aManager._call_get_ports_command().
-
-        Tests that _call_get_ports_command calls the user command when root is
-        unavailable.
-        """
-        adb = mock.Mock()
-        adb.is_root = lambda: False
-        adb.ensure_root = lambda: False
-        command = 'ngo45hke3b4vie3mv5ni93,vfu3j'
-        sl4a_manager._SL4A_USER_FIND_PORT_CMD = command
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        self.assertEqual(manager._get_all_ports_command(), command)
-
-    def test_get_open_listening_port_no_port_found(self):
-        """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
-
-        Tests to ensure None is returned if no open port is found.
-        """
-        adb = mock.Mock()
-        adb.shell = lambda _: ''
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        self.assertIsNone(manager._get_open_listening_port())
-
-    def test_get_open_listening_port_no_new_port_found(self):
-        """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
-
-        Tests to ensure None is returned if the ports returned have all been
-        marked as in used.
-        """
-        adb = mock.Mock()
-        adb.shell = lambda _: '12345 67890'
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        manager._sl4a_ports = {'12345', '67890'}
-        self.assertIsNone(manager._get_open_listening_port())
-
-    def test_get_open_listening_port_port_is_avaiable(self):
-        """Tests sl4a_manager.Sl4aManager._get_open_listening_port().
-
-        Tests to ensure a port is returned if a port is found and has not been
-        marked as used.
-        """
-        adb = mock.Mock()
-        adb.shell = lambda _: '12345 67890'
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        manager._sl4a_ports = {'12345'}
-        self.assertEqual(manager._get_open_listening_port(), 67890)
-
-    def test_is_sl4a_installed_is_true(self):
-        """Tests sl4a_manager.Sl4aManager.is_sl4a_installed().
-
-        Tests is_sl4a_installed() returns true when pm returns data
-        """
-        adb = mock.Mock()
-        adb.shell = lambda _, **kwargs: 'asdf'
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        self.assertTrue(manager.is_sl4a_installed())
-
-    def test_is_sl4a_installed_is_false(self):
-        """Tests sl4a_manager.Sl4aManager.is_sl4a_installed().
-
-        Tests is_sl4a_installed() returns true when pm returns data
-        """
-        adb = mock.Mock()
-        adb.shell = lambda _, **kwargs: ''
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        self.assertFalse(manager.is_sl4a_installed())
-
-    def test_start_sl4a_throws_error_on_sl4a_not_installed(self):
-        """Tests sl4a_manager.Sl4aManager.start_sl4a_service().
-
-        Tests that a MissingSl4aError is thrown when SL4A is not installed.
-        """
-        adb = mock.Mock()
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        manager.is_sl4a_installed = lambda: False
-        try:
-            manager.start_sl4a_service()
-            self.fail('An error should have been thrown.')
-        except rpc_client.Sl4aNotInstalledError:
-            pass
-
-    def test_start_sl4a_starts_sl4a_if_not_running(self):
-        """Tests sl4a_manager.Sl4aManager.start_sl4a_service().
-
-        Tests that SL4A is started if it was not already running.
-        """
-        adb = mock.Mock()
-        adb.shell = mock.Mock(side_effect=['', '', ''])
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        manager.is_sl4a_installed = lambda: True
-        try:
-            manager.start_sl4a_service()
-        except rpc_client.Sl4aNotInstalledError:
-            self.fail('An error should not have been thrown.')
-        adb.shell.assert_called_with(sl4a_manager._SL4A_START_SERVICE_CMD)
-
-    def test_create_session_uses_oldest_server_port(self):
-        """Tests sl4a_manager.Sl4aManager.create_session().
-
-        Tests that when no port is given, the oldest server port opened is used
-        as the server port for a new session. The oldest server port can be
-        found by getting the oldest session's server port.
-        """
-        adb = mock.Mock()
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        # Ignore starting SL4A.
-        manager.start_sl4a_service = lambda: None
-
-        session_1 = mock.Mock()
-        session_1.server_port = 12345
-        session_2 = mock.Mock()
-        session_2.server_port = 67890
-        session_3 = mock.Mock()
-        session_3.server_port = 67890
-
-        manager.sessions[3] = session_3
-        manager.sessions[1] = session_1
-        manager.sessions[2] = session_2
-
-        with mock.patch.object(
-                rpc_client.RpcClient, '__init__', return_value=None):
-            created_session = manager.create_session()
-
-        self.assertEqual(created_session.server_port, session_1.server_port)
-
-    def test_create_session_uses_random_port_when_no_session_exists(self):
-        """Tests sl4a_manager.Sl4aManager.create_session().
-
-        Tests that when no port is given, and no SL4A server exists, the server
-        port for the session is set to 0.
-        """
-        adb = mock.Mock()
-
-        manager = sl4a_manager.create_sl4a_manager(adb)
-        # Ignore starting SL4A.
-        manager.start_sl4a_service = lambda: None
-
-        with mock.patch.object(
-                rpc_client.RpcClient, '__init__', return_value=None):
-            created_session = manager.create_session()
-
-        self.assertEqual(created_session.server_port, 0)
-
-    def test_terminate_all_session_call_terminate_on_all_sessions(self):
-        """Tests sl4a_manager.Sl4aManager.terminate_all_sessions().
-
-        Tests to see that the manager has called terminate on all sessions.
-        """
-        called_terminate_on = list()
-
-        def called_on(session):
-            called_terminate_on.append(session)
-
-        adb = mock.Mock()
-        manager = sl4a_manager.Sl4aManager(adb)
-
-        session_1 = mock.Mock()
-        session_1.terminate = lambda *args, **kwargs: called_on(session_1)
-        manager.sessions[1] = session_1
-        session_4 = mock.Mock()
-        session_4.terminate = lambda *args, **kwargs: called_on(session_4)
-        manager.sessions[4] = session_4
-        session_5 = mock.Mock()
-        session_5.terminate = lambda *args, **kwargs: called_on(session_5)
-        manager.sessions[5] = session_5
-
-        manager._get_all_ports = lambda: []
-        manager.terminate_all_sessions()
-        # No duplicates calls to terminate.
-        self.assertEqual(
-            len(called_terminate_on), len(set(called_terminate_on)))
-        # One call for each session
-        self.assertSetEqual(
-            set(called_terminate_on), {session_1, session_4, session_5})
-
-    def test_terminate_all_session_close_each_server(self):
-        """Tests sl4a_manager.Sl4aManager.terminate_all_sessions().
-
-        Tests to see that the manager has called terminate on all sessions.
-        """
-        closed_ports = list()
-
-        def close(command):
-            if str.isdigit(command):
-                closed_ports.append(command)
-            return ''
-
-        adb = mock.Mock()
-        adb.shell = close
-        sl4a_manager._SL4A_CLOSE_SERVER_CMD = '%s'
-        ports_to_close = {'12345', '67890', '24680', '13579'}
-
-        manager = sl4a_manager.Sl4aManager(adb)
-        manager._sl4a_ports = set(ports_to_close)
-        manager._get_all_ports = lambda: []
-        manager.terminate_all_sessions()
-
-        # No duplicate calls to close port
-        self.assertEqual(len(closed_ports), len(set(closed_ports)))
-        # One call for each port
-        self.assertSetEqual(ports_to_close, set(closed_ports))
-
-    def test_obtain_sl4a_server_starts_new_server(self):
-        """Tests sl4a_manager.Sl4aManager.obtain_sl4a_server().
-
-        Tests that a new server can be returned if the server does not exist.
-        """
-        adb = mock.Mock()
-        manager = sl4a_manager.Sl4aManager(adb)
-        manager.start_sl4a_server = mock.Mock()
-
-        manager.obtain_sl4a_server(0)
-
-        self.assertTrue(manager.start_sl4a_server.called)
-
-    @mock.patch(
-        'antlion.controllers.sl4a_lib.sl4a_manager.Sl4aManager.sl4a_ports_in_use',
-        new_callable=mock.PropertyMock)
-    def test_obtain_sl4a_server_returns_existing_server(
-            self, sl4a_ports_in_use):
-        """Tests sl4a_manager.Sl4aManager.obtain_sl4a_server().
-
-        Tests that an existing server is returned if it is already opened.
-        """
-        adb = mock.Mock()
-        manager = sl4a_manager.Sl4aManager(adb)
-        manager.start_sl4a_server = mock.Mock()
-        sl4a_ports_in_use.return_value = [12345]
-
-        ret = manager.obtain_sl4a_server(12345)
-
-        self.assertFalse(manager.start_sl4a_server.called)
-        self.assertEqual(12345, ret)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py b/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
deleted file mode 100755
index efa7073..0000000
--- a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import errno
-import mock
-from socket import timeout
-from socket import error as socket_error
-import unittest
-from mock import patch
-
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.sl4a_lib import sl4a_ports
-from antlion.controllers.sl4a_lib import rpc_client
-from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError
-from antlion.controllers.sl4a_lib.sl4a_session import Sl4aSession
-
-
-class Sl4aSessionTest(unittest.TestCase):
-    """Tests the Sl4aSession class."""
-
-    def test_is_alive_true_on_not_terminated(self):
-        """Tests Sl4aSession.is_alive.
-
-        Tests that the session is_alive when it has not been terminated.
-        """
-        session = mock.Mock()
-        session._terminated = False
-        session.is_alive = Sl4aSession.is_alive
-        self.assertNotEqual(session._terminated, session.is_alive)
-
-    def test_is_alive_false_on_terminated(self):
-        """Tests Sl4aSession.is_alive.
-
-        Tests that the session is_alive when it has not been terminated.
-        """
-        session = mock.Mock()
-        session._terminated = True
-        session.is_alive = Sl4aSession.is_alive
-        self.assertNotEqual(session._terminated, session.is_alive)
-
-    @patch('antlion.controllers.sl4a_lib.event_dispatcher.EventDispatcher')
-    def test_get_event_dispatcher_create_on_none(self, _):
-        """Tests Sl4aSession.get_event_dispatcher.
-
-        Tests that a new event_dispatcher is created if one does not exist.
-        """
-        session = mock.Mock()
-        session._event_dispatcher = None
-        ed = Sl4aSession.get_event_dispatcher(session)
-        self.assertTrue(session._event_dispatcher is not None)
-        self.assertEqual(session._event_dispatcher, ed)
-
-    def test_get_event_dispatcher_returns_existing_event_dispatcher(self):
-        """Tests Sl4aSession.get_event_dispatcher.
-
-        Tests that the existing event_dispatcher is returned.
-        """
-        session = mock.Mock()
-        session._event_dispatcher = 'Something that is not None'
-        ed = Sl4aSession.get_event_dispatcher(session)
-        self.assertEqual(session._event_dispatcher, ed)
-
-    def test_create_client_side_connection_hint_already_in_use(self):
-        """Tests Sl4aSession._create_client_side_connection().
-
-        Tests that if the hinted port is already in use, the function will
-        call itself with a hinted port of 0 (random).
-        """
-        session = mock.Mock()
-        session._create_client_side_connection = mock.Mock()
-        with mock.patch('socket.socket') as socket:
-            # Throw an error when trying to bind to the hinted port.
-            error = OSError()
-            error.errno = errno.EADDRINUSE
-            socket_instance = mock.Mock()
-            socket_instance.bind = mock.Mock()
-            socket_instance.bind.side_effect = error
-            socket.return_value = socket_instance
-
-            Sl4aSession._create_client_side_connection(
-                session, sl4a_ports.Sl4aPorts(1, 2, 3))
-
-        fn = session._create_client_side_connection
-        self.assertEqual(fn.call_count, 1)
-        # Asserts that the 1st argument (Sl4aPorts) sent to the function
-        # has a client port of 0.
-        self.assertEqual(fn.call_args_list[0][0][0].client_port, 0)
-
-    def test_create_client_side_connection_catches_timeout(self):
-        """Tests Sl4aSession._create_client_side_connection().
-
-        Tests that the function will raise an Sl4aConnectionError upon timeout.
-        """
-        session = mock.Mock()
-        session._create_client_side_connection = mock.Mock()
-        error = timeout()
-        with mock.patch('socket.socket') as socket:
-            # Throw an error when trying to bind to the hinted port.
-            socket_instance = mock.Mock()
-            socket_instance.connect = mock.Mock()
-            socket_instance.connect.side_effect = error
-            socket.return_value = socket_instance
-
-            with self.assertRaises(rpc_client.Sl4aConnectionError):
-                Sl4aSession._create_client_side_connection(
-                    session, sl4a_ports.Sl4aPorts(0, 2, 3))
-
-    def test_create_client_side_connection_hint_taken_during_fn(self):
-        """Tests Sl4aSession._create_client_side_connection().
-
-        Tests that the function will call catch an EADDRNOTAVAIL OSError and
-        call itself again, this time with a hinted port of 0 (random).
-        """
-        session = mock.Mock()
-        session._create_client_side_connection = mock.Mock()
-        error = socket_error()
-        error.errno = errno.EADDRNOTAVAIL
-        with mock.patch('socket.socket') as socket:
-            # Throw an error when trying to bind to the hinted port.
-            socket_instance = mock.Mock()
-            socket_instance.connect = mock.Mock()
-            socket_instance.connect.side_effect = error
-            socket.return_value = socket_instance
-
-            Sl4aSession._create_client_side_connection(
-                session, sl4a_ports.Sl4aPorts(0, 2, 3))
-
-        fn = session._create_client_side_connection
-        self.assertEqual(fn.call_count, 1)
-        # Asserts that the 1st argument (Sl4aPorts) sent to the function
-        # has a client port of 0.
-        self.assertEqual(fn.call_args_list[0][0][0].client_port, 0)
-
-    def test_create_client_side_connection_re_raises_uncaught_errors(self):
-        """Tests Sl4aSession._create_client_side_connection().
-
-        Tests that the function will re-raise any socket error that does not
-        have errno.EADDRNOTAVAIL.
-        """
-        session = mock.Mock()
-        session._create_client_side_connection = mock.Mock()
-        error = socket_error()
-        # Some error that isn't EADDRNOTAVAIL
-        error.errno = errno.ESOCKTNOSUPPORT
-        with mock.patch('socket.socket') as socket:
-            # Throw an error when trying to bind to the hinted port.
-            socket_instance = mock.Mock()
-            socket_instance.connect = mock.Mock()
-            socket_instance.connect.side_effect = error
-            socket.return_value = socket_instance
-
-            with self.assertRaises(socket_error):
-                Sl4aSession._create_client_side_connection(
-                    session, sl4a_ports.Sl4aPorts(0, 2, 3))
-
-    def test_terminate_only_closes_if_not_terminated(self):
-        """Tests Sl4aSession.terminate()
-
-        Tests that terminate only runs termination steps if the session has not
-        already been terminated.
-        """
-        session = mock.Mock()
-        session._terminate_lock = mock.MagicMock()
-        session._terminated = True
-        Sl4aSession.terminate(session)
-
-        self.assertFalse(session._event_dispatcher.close.called)
-        self.assertFalse(session.rpc_client.terminate.called)
-
-    def test_terminate_closes_session_first(self):
-        """Tests Sl4aSession.terminate()
-
-        Tests that terminate only runs termination steps if the session has not
-        already been terminated.
-        """
-        session = mock.Mock()
-        session._terminate_lock = mock.MagicMock()
-        session._terminated = True
-        Sl4aSession.terminate(session)
-
-        self.assertFalse(session._event_dispatcher.close.called)
-        self.assertFalse(session.rpc_client.terminate.called)
-
-    def test_create_forwarded_port(self):
-        """Tests Sl4aSession._create_forwarded_port returns the hinted port."""
-        mock_adb = mock.Mock()
-        mock_adb.get_version_number = lambda: 37
-        mock_adb.tcp_forward = lambda hinted_port, device_port: hinted_port
-        mock_session = mock.Mock()
-        mock_session.adb = mock_adb
-        mock_session.log = mock.Mock()
-
-        self.assertEqual(8080,
-                         Sl4aSession._create_forwarded_port(
-                             mock_session, 9999, 8080))
-
-    def test_create_forwarded_port_fail_once(self):
-        """Tests that _create_forwarded_port can return a non-hinted port.
-
-        This will only happen if the hinted port is already taken.
-        """
-        mock_adb = mock.Mock()
-        mock_adb.get_version_number = lambda: 37
-
-        mock_adb.tcp_forward = mock.Mock(
-            side_effect=AdbError('cmd', 'stdout', stderr='cannot bind listener',
-                                 ret_code=1))
-        mock_session = mock.MagicMock()
-        mock_session.adb = mock_adb
-        mock_session.log = mock.Mock()
-        mock_session._create_forwarded_port = lambda *args, **kwargs: 12345
-
-        self.assertEqual(12345,
-                         Sl4aSession._create_forwarded_port(mock_session, 9999,
-                                                            8080))
-
-    def test_create_forwarded_port_raises_if_adb_version_is_old(self):
-        """Tests that _create_forwarded_port raises if adb version < 37."""
-        mock_adb = mock.Mock()
-        mock_adb.get_version_number = lambda: 31
-        mock_adb.tcp_forward = lambda _, __: self.fail(
-            'Calling adb.tcp_forward despite ADB version being too old.')
-        mock_session = mock.Mock()
-        mock_session.adb = mock_adb
-        mock_session.log = mock.Mock()
-        with self.assertRaises(Sl4aStartError):
-            Sl4aSession._create_forwarded_port(mock_session, 9999, 0)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/event/decorators_test.py b/src/antlion/unit_tests/event/decorators_test.py
deleted file mode 100755
index 2cb1dfc..0000000
--- a/src/antlion/unit_tests/event/decorators_test.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-from antlion.event import event_bus
-from antlion.event.decorators import register_instance_subscriptions
-from antlion.event.decorators import register_static_subscriptions
-from antlion.event.decorators import subscribe
-from antlion.event.decorators import subscribe_static
-from antlion.event.event import Event
-from antlion.event.subscription_handle import SubscriptionHandle
-from mock import Mock
-
-
-class DecoratorsTest(TestCase):
-    """Tests the decorators found in antlion.event.decorators."""
-
-    def test_subscribe_static_return_type(self):
-        """Tests that the subscribe_static is the correct type."""
-        mock = Mock()
-
-        @subscribe_static(type)
-        def test(_):
-            return mock
-
-        self.assertTrue(isinstance(test, SubscriptionHandle))
-
-    def test_subscribe_static_calling_the_function_returns_normally(self):
-        """Tests that functions decorated by subscribe_static can be called."""
-        static_mock = Mock()
-
-        @subscribe_static(type)
-        def test(_):
-            return static_mock
-
-        self.assertEqual(test(Mock()), static_mock)
-
-    class DummyClass(object):
-        def __init__(self):
-            self.mock = Mock()
-
-        @subscribe(type)
-        def test(self, _):
-            return self.mock
-
-    def test_subscribe_return_type(self):
-        """Tests that subscribe returns the correct type."""
-        dummy_class = DecoratorsTest.DummyClass()
-        self.assertTrue(isinstance(dummy_class.test, SubscriptionHandle))
-
-    def test_subscribe_calling_the_function_returns_normally(self):
-        """tests that functions decorated by subscribe can be called."""
-        dummy_class = DecoratorsTest.DummyClass()
-        self.assertEqual(dummy_class.test(''), dummy_class.mock)
-
-
-class DummyEvent(Event):
-    """A dummy event used for testing registered functions."""
-
-
-class RegisterStaticSubscriptionsTest(TestCase):
-
-    def test_register_static_subscriptions_returns_passed_in_object(self):
-        obj = Mock()
-        returned_value = register_static_subscriptions(obj)
-        self.assertEqual(obj, returned_value,
-                         'register_static_subscriptions returned a value other'
-                         'than the object passed in.')
-
-    def test_register_static_subscriptions_registers_properly(self):
-        @register_static_subscriptions
-        class RegisterStaticSubscriptionsClass(object):
-            captured_event = None
-
-            @staticmethod
-            @subscribe_static(DummyEvent)
-            def on_static_event(evt):
-                RegisterStaticSubscriptionsClass.captured_event = evt
-
-        event = DummyEvent()
-        event_bus.post(event)
-
-        self.assertEqual(event, RegisterStaticSubscriptionsClass.captured_event,
-                         'register_static_subscriptions did not subscribe '
-                         'RegisterStaticSubscriptionsClass.on_static_event.')
-
-
-class RegisterInstanceSubscriptionsTest(TestCase):
-
-    def test_register_instance_subscriptions_returns_passed_in_object(self):
-        class SomeClass(object):
-            pass
-
-        returned_value = register_instance_subscriptions(SomeClass)
-        self.assertEqual(SomeClass, returned_value,
-                         'register_instance_subscriptions returned a value '
-                         'other than the object passed in.')
-
-    def test_register_instance_subscriptions_registers_properly(self):
-        @register_instance_subscriptions
-        class RegisterInstanceSubscriptionsClass(object):
-            def __init__(self):
-                self.captured_event = None
-
-            @subscribe(DummyEvent)
-            def on_instance_event(self, evt):
-                self.captured_event = evt
-
-        instance = RegisterInstanceSubscriptionsClass()
-        event = DummyEvent()
-        event_bus.post(event)
-
-        self.assertEqual(
-            event, instance.captured_event,
-            'register_instance_subscriptions did not subscribe the instance '
-            'function RegisterInstanceSubscriptionsClass.on_instance_event.')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/event/event_bus_integration_test.py b/src/antlion/unit_tests/event/event_bus_integration_test.py
deleted file mode 100755
index 04f5e20..0000000
--- a/src/antlion/unit_tests/event/event_bus_integration_test.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import tempfile
-import unittest
-from unittest import TestCase
-
-import mobly.config_parser as mobly_config_parser
-
-from antlion.base_test import BaseTestClass
-from antlion.event import event_bus, subscription_bundle
-from antlion.event.decorators import subscribe, subscribe_static
-from antlion.event.event import Event
-from antlion.test_runner import TestRunner
-
-
-class TestClass(BaseTestClass):
-    instance_event_received = []
-    static_event_received = []
-
-    def __init__(self, configs):
-        import mock
-        self.log = mock.Mock()
-        with mock.patch('mobly.utils.create_dir'):
-            super().__init__(configs)
-
-    @subscribe(Event)
-    def subscribed_instance_member(self, event):
-        TestClass.instance_event_received.append(event)
-
-    @staticmethod
-    @subscribe_static(Event)
-    def subscribed_static_member(event):
-        TestClass.static_event_received.append(event)
-
-    def test_post_event(self):
-        event_bus.post(Event())
-
-
-class EventBusIntegrationTest(TestCase):
-    """Tests the EventBus E2E."""
-    def setUp(self):
-        """Clears the event bus of all state."""
-        self.called_event = False
-        event_bus._event_bus = event_bus._EventBus()
-        TestClass.instance_event_received = []
-        TestClass.static_event_received = []
-
-    def test_test_class_subscribed_fn_receives_event(self):
-        """Tests that TestClasses have their subscribed functions called."""
-        with tempfile.TemporaryDirectory() as tmp_dir:
-            test_run_config = mobly_config_parser.TestRunConfig()
-            test_run_config.testbed_name = 'SampleTestBed'
-            test_run_config.log_path = tmp_dir
-
-            TestRunner(test_run_config, [('TestClass', [])]).run(TestClass)
-
-        self.assertGreaterEqual(len(TestClass.instance_event_received), 1)
-        self.assertEqual(len(TestClass.static_event_received), 0)
-
-    def test_subscribe_static_bundles(self):
-        """Tests that @subscribe_static bundles register their listeners."""
-        bundle = subscription_bundle.create_from_static(TestClass)
-        bundle.register()
-
-        event_bus.post(Event())
-
-        self.assertEqual(len(TestClass.instance_event_received), 0)
-        self.assertEqual(len(TestClass.static_event_received), 1)
-
-    def test_subscribe_instance_bundles(self):
-        """Tests that @subscribe bundles register only instance listeners."""
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = ''
-        test_run_config.log_path = ''
-        test_object = TestClass(test_run_config)
-        bundle = subscription_bundle.create_from_instance(test_object)
-        bundle.register()
-
-        event_bus.post(Event())
-
-        self.assertEqual(len(TestClass.instance_event_received), 1)
-        self.assertEqual(len(TestClass.static_event_received), 0)
-
-    def test_event_register(self):
-        """Tests that event.register()'d functions can receive posted Events."""
-        def event_listener(_):
-            self.called_event = True
-
-        event_bus.register(Event, event_listener)
-        event_bus.post(Event())
-
-        self.assertTrue(self.called_event)
-
-    def test_event_unregister(self):
-        """Tests that an event can be registered, and then unregistered."""
-        def event_listener(_):
-            self.called_event = False
-
-        registration_id = event_bus.register(Event, event_listener)
-        event_bus.unregister(registration_id)
-        event_bus.post(Event())
-
-        self.assertFalse(self.called_event)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/event/event_bus_test.py b/src/antlion/unit_tests/event/event_bus_test.py
deleted file mode 100755
index 2352ea7..0000000
--- a/src/antlion/unit_tests/event/event_bus_test.py
+++ /dev/null
@@ -1,269 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-from mock import Mock
-from mock import patch
-
-from antlion.event import event_bus
-from antlion.event.event import Event
-from antlion.event.event_subscription import EventSubscription
-
-
-class EventBusTest(TestCase):
-    """Tests the event_bus functions."""
-
-    def setUp(self):
-        """Clears all state from the event_bus between test cases."""
-        event_bus._event_bus = event_bus._EventBus()
-
-    def get_subscription_argument(self, register_subscription_call):
-        """Gets the subscription argument from a register_subscription call."""
-        return register_subscription_call[0][0]
-
-    @patch('antlion.event.event_bus._event_bus.register_subscription')
-    def test_register_registers_a_subscription(self, register_subscription):
-        """Tests that register creates and registers a subscription."""
-        mock_event = Mock()
-        mock_func = Mock()
-        order = 43
-        event_bus.register(mock_event, mock_func, order=order)
-
-        args, _ = register_subscription.call_args
-        subscription = args[0]
-
-        # Instead of writing an equality operator for only testing,
-        # check the internals to make sure they are expected values.
-        self.assertEqual(subscription._event_type, mock_event)
-        self.assertEqual(subscription._func, mock_func)
-        self.assertEqual(subscription.order, order)
-
-    @patch('antlion.event.event_bus._event_bus.register_subscription')
-    def test_register_subscriptions_for_list(self, register_subscription):
-        """Tests that register_subscription is called for each subscription."""
-        mocks = [Mock(), Mock(), Mock()]
-        subscriptions = [
-            EventSubscription(mocks[0], lambda _: None),
-            EventSubscription(mocks[1], lambda _: None),
-            EventSubscription(mocks[2], lambda _: None),
-        ]
-
-        event_bus.register_subscriptions(subscriptions)
-        received_subscriptions = set()
-        for index, call in enumerate(register_subscription.call_args_list):
-            received_subscriptions.add(self.get_subscription_argument(call))
-
-        self.assertEqual(register_subscription.call_count, len(subscriptions))
-        self.assertSetEqual(received_subscriptions, set(subscriptions))
-
-    def test_register_subscription_new_event_type(self):
-        """Tests that the event_bus can register a new event type."""
-        mock_type = Mock()
-        bus = event_bus._event_bus
-        subscription = EventSubscription(mock_type, lambda _: None)
-
-        reg_id = event_bus.register_subscription(subscription)
-
-        self.assertTrue(mock_type in bus._subscriptions.keys())
-        self.assertTrue(subscription in bus._subscriptions[mock_type])
-        self.assertTrue(reg_id in bus._registration_id_map.keys())
-
-    def test_register_subscription_existing_type(self):
-        """Tests that the event_bus can register an existing event type."""
-        mock_type = Mock()
-        bus = event_bus._event_bus
-        bus._subscriptions[mock_type] = [
-            EventSubscription(mock_type, lambda _: None)
-        ]
-        new_subscription = EventSubscription(mock_type, lambda _: True)
-
-        reg_id = event_bus.register_subscription(new_subscription)
-
-        self.assertTrue(new_subscription in bus._subscriptions[mock_type])
-        self.assertTrue(reg_id in bus._registration_id_map.keys())
-
-    def test_post_to_unregistered_event_does_not_call_other_funcs(self):
-        """Tests posting an unregistered event will not call other funcs."""
-        mock_subscription = Mock()
-        bus = event_bus._event_bus
-        mock_type = Mock()
-        mock_subscription.event_type = mock_type
-        bus._subscriptions[mock_type] = [mock_subscription]
-
-        event_bus.post(Mock())
-
-        self.assertEqual(mock_subscription.deliver.call_count, 0)
-
-    def test_post_to_registered_event_calls_all_registered_funcs(self):
-        """Tests posting to a registered event calls all registered funcs."""
-        mock_subscriptions = [Mock(), Mock(), Mock()]
-        bus = event_bus._event_bus
-        for subscription in mock_subscriptions:
-            subscription.order = 0
-        mock_event = Mock()
-        bus._subscriptions[type(mock_event)] = mock_subscriptions
-
-        event_bus.post(mock_event)
-
-        for subscription in mock_subscriptions:
-            subscription.deliver.assert_called_once_with(mock_event)
-
-    def test_post_with_ignore_errors_calls_all_registered_funcs(self):
-        """Tests posting with ignore_errors=True calls all registered funcs,
-        even if they raise errors.
-        """
-
-        def _raise(_):
-            raise Exception
-
-        mock_event = Mock()
-        mock_subscriptions = [Mock(), Mock(), Mock()]
-        mock_subscriptions[0].deliver.side_effect = _raise
-        bus = event_bus._event_bus
-        for i, subscription in enumerate(mock_subscriptions):
-            subscription.order = i
-        bus._subscriptions[type(mock_event)] = mock_subscriptions
-
-        event_bus.post(mock_event, ignore_errors=True)
-
-        for subscription in mock_subscriptions:
-            subscription.deliver.assert_called_once_with(mock_event)
-
-    @patch('antlion.event.event_bus._event_bus.unregister')
-    def test_unregister_all_from_list(self, unregister):
-        """Tests unregistering from a list unregisters the specified list."""
-        unregister_list = [Mock(), Mock()]
-
-        event_bus.unregister_all(from_list=unregister_list)
-
-        self.assertEqual(unregister.call_count, len(unregister_list))
-        for args, _ in unregister.call_args_list:
-            subscription = args[0]
-            self.assertTrue(subscription in unregister_list)
-
-    @patch('antlion.event.event_bus._event_bus.unregister')
-    def test_unregister_all_from_event(self, unregister):
-        """Tests that all subscriptions under the event are unregistered."""
-        mock_event = Mock()
-        mock_event_2 = Mock()
-        bus = event_bus._event_bus
-        unregister_list = [Mock(), Mock()]
-        bus._subscriptions[type(mock_event_2)] = [Mock(), Mock(), Mock()]
-        bus._subscriptions[type(mock_event)] = unregister_list
-        for sub_type in bus._subscriptions.keys():
-            for subscription in bus._subscriptions[sub_type]:
-                subscription.event_type = sub_type
-                bus._registration_id_map[id(subscription)] = subscription
-
-        event_bus.unregister_all(from_event=type(mock_event))
-
-        self.assertEqual(unregister.call_count, len(unregister_list))
-        for args, _ in unregister.call_args_list:
-            subscription = args[0]
-            self.assertTrue(subscription in unregister_list)
-
-    @patch('antlion.event.event_bus._event_bus.unregister')
-    def test_unregister_all_no_args_unregisters_everything(self, unregister):
-        """Tests unregister_all without arguments will unregister everything."""
-        mock_event_1 = Mock()
-        mock_event_2 = Mock()
-        bus = event_bus._event_bus
-        unregister_list_1 = [Mock(), Mock()]
-        unregister_list_2 = [Mock(), Mock(), Mock()]
-        bus._subscriptions[type(mock_event_1)] = unregister_list_1
-        bus._subscriptions[type(mock_event_2)] = unregister_list_2
-        for sub_type in bus._subscriptions.keys():
-            for subscription in bus._subscriptions[sub_type]:
-                subscription.event_type = sub_type
-                bus._registration_id_map[id(subscription)] = subscription
-
-        event_bus.unregister_all()
-
-        self.assertEqual(unregister.call_count,
-                         len(unregister_list_1) + len(unregister_list_2))
-        for args, _ in unregister.call_args_list:
-            subscription = args[0]
-            self.assertTrue(subscription in unregister_list_1
-                            or subscription in unregister_list_2)
-
-    def test_unregister_given_an_event_subscription(self):
-        """Tests that unregister can unregister a given EventSubscription."""
-        mock_event = Mock()
-        bus = event_bus._event_bus
-        subscription = EventSubscription(type(mock_event), lambda _: None)
-        bus._registration_id_map[id(subscription)] = subscription
-        bus._subscriptions[type(mock_event)] = [subscription]
-
-        val = event_bus.unregister(subscription)
-
-        self.assertTrue(val)
-        self.assertTrue(subscription not in bus._registration_id_map)
-        self.assertTrue(
-            subscription not in bus._subscriptions[type(mock_event)])
-
-    def test_unregister_given_a_registration_id(self):
-        """Tests that unregister can unregister a given EventSubscription."""
-        mock_event = Mock()
-        bus = event_bus._event_bus
-        subscription = EventSubscription(type(mock_event), lambda _: None)
-        registration_id = id(subscription)
-        bus._registration_id_map[id(subscription)] = subscription
-        bus._subscriptions[type(mock_event)] = [subscription]
-
-        val = event_bus.unregister(registration_id)
-
-        self.assertTrue(val)
-        self.assertTrue(subscription not in bus._registration_id_map)
-        self.assertTrue(
-            subscription not in bus._subscriptions[type(mock_event)])
-
-    def test_unregister_given_object_that_is_not_a_subscription(self):
-        """Asserts that a ValueError is raised upon invalid arguments."""
-        with self.assertRaises(ValueError):
-            event_bus.unregister(Mock())
-
-    def test_unregister_given_invalid_registration_id(self):
-        """Asserts that a false is returned upon invalid registration_id."""
-        val = event_bus.unregister(9)
-        self.assertFalse(val)
-
-    def test_listen_for_registers_listener(self):
-        """Tests listen_for registers the listener within the with statement."""
-        bus = event_bus._event_bus
-
-        def event_listener(_):
-            pass
-
-        with event_bus.listen_for(Event, event_listener):
-            self.assertEqual(len(bus._registration_id_map), 1)
-
-    def test_listen_for_unregisters_listener(self):
-        """Tests listen_for unregisters the listener after the with statement.
-        """
-        bus = event_bus._event_bus
-
-        def event_listener(_):
-            pass
-
-        with event_bus.listen_for(Event, event_listener):
-            pass
-
-        self.assertEqual(len(bus._registration_id_map), 0)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/event/event_subscription_test.py b/src/antlion/unit_tests/event/event_subscription_test.py
deleted file mode 100755
index 1bdeb8f..0000000
--- a/src/antlion/unit_tests/event/event_subscription_test.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-from mock import Mock
-
-from antlion.event.event_subscription import EventSubscription
-
-
-class EventSubscriptionTest(TestCase):
-    """Tests the EventSubscription class."""
-
-    @staticmethod
-    def filter_out_event(_):
-        return False
-
-    @staticmethod
-    def pass_filter(_):
-        return True
-
-    def test_event_type_returns_correct_value(self):
-        """Tests that event_type returns the correct event type."""
-        expected_event_type = Mock()
-        subscription = EventSubscription(expected_event_type, lambda _: None)
-        self.assertEqual(expected_event_type, subscription.event_type)
-
-    def test_deliver_dont_deliver_if_event_is_filtered(self):
-        """Tests deliver does not call func if the event is filtered out."""
-        func = Mock()
-        subscription = EventSubscription(Mock(), func,
-                                         event_filter=self.filter_out_event)
-
-        subscription.deliver(Mock())
-
-        self.assertFalse(func.called)
-
-    def test_deliver_deliver_accepted_event(self):
-        """Tests deliver does call func when the event is accepted."""
-        func = Mock()
-        subscription = EventSubscription(Mock(), func,
-                                         event_filter=self.pass_filter)
-
-        subscription.deliver(Mock())
-        self.assertTrue(func.called)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/event/subscription_bundle_test.py b/src/antlion/unit_tests/event/subscription_bundle_test.py
deleted file mode 100755
index 654c26e..0000000
--- a/src/antlion/unit_tests/event/subscription_bundle_test.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-import sys
-from antlion.event import subscription_bundle
-from antlion.event.decorators import subscribe
-from antlion.event.decorators import subscribe_static
-from antlion.event.event import Event
-from antlion.event.subscription_bundle import SubscriptionBundle
-from mock import Mock
-from mock import patch
-
-
-class SubscriptionBundleTest(TestCase):
-    """Tests the SubscriptionBundle class."""
-
-    def test_add_calls_add_subscription_properly(self):
-        """Tests that the convenience function add() calls add_subscription."""
-        event = object()
-        func = object()
-        event_filter = object()
-        order = object()
-        package = SubscriptionBundle()
-        package.add_subscription = Mock()
-
-        package.add(event, func, event_filter=event_filter, order=order)
-
-        self.assertEqual(package.add_subscription.call_count, 1)
-        subscription = package.add_subscription.call_args[0][0]
-        self.assertEqual(subscription._event_type, event)
-        self.assertEqual(subscription._func, func)
-        self.assertEqual(subscription._event_filter, event_filter)
-        self.assertEqual(subscription.order, order)
-
-    @patch('antlion.event.event_bus.register_subscription')
-    def test_add_subscription_registers_sub_if_package_is_registered(
-            self, register_subscription):
-        """Tests that add_subscription registers the subscription if the
-        SubscriptionBundle is already registered."""
-        package = SubscriptionBundle()
-        package._registered = True
-        mock_subscription = Mock()
-
-        package.add_subscription(mock_subscription)
-
-        self.assertEqual(register_subscription.call_count, 1)
-        register_subscription.assert_called_with(mock_subscription)
-
-    def test_add_subscription_adds_to_subscriptions(self):
-        """Tests add_subscription adds the subscription to subscriptions."""
-        mock_subscription = Mock()
-        package = SubscriptionBundle()
-
-        package.add_subscription(mock_subscription)
-
-        self.assertTrue(mock_subscription in package.subscriptions.keys())
-
-    def test_remove_subscription_removes_subscription_from_subscriptions(self):
-        """Tests remove_subscription removes the given subscription from the
-        subscriptions dictionary."""
-        mock_subscription = Mock()
-        package = SubscriptionBundle()
-        package.subscriptions[mock_subscription] = id(mock_subscription)
-
-        package.remove_subscription(mock_subscription)
-
-        self.assertTrue(mock_subscription not in package.subscriptions.keys())
-
-    @patch('antlion.event.event_bus.unregister')
-    def test_remove_subscription_unregisters_subscription(self, unregister):
-        """Tests that removing a subscription will also unregister it if the
-        SubscriptionBundle is registered."""
-        mock_subscription = Mock()
-        package = SubscriptionBundle()
-        package._registered = True
-        package.subscriptions[mock_subscription] = id(mock_subscription)
-
-        package.remove_subscription(mock_subscription)
-
-        self.assertEqual(unregister.call_count, 1)
-        unregistered_obj = unregister.call_args[0][0]
-        self.assertTrue(unregistered_obj == id(mock_subscription) or
-                        unregistered_obj == mock_subscription)
-
-    @patch('antlion.event.event_bus.register_subscription')
-    def test_register_registers_all_subscriptions(self, register_subscription):
-        """Tests register() registers all subscriptions within the bundle."""
-        mock_subscription_list = [Mock(), Mock(), Mock()]
-        package = SubscriptionBundle()
-        package._registered = False
-        for subscription in mock_subscription_list:
-            package.subscriptions[subscription] = None
-
-        package.register()
-
-        self.assertEqual(register_subscription.call_count,
-                         len(mock_subscription_list))
-        args = {args[0] for args, _ in register_subscription.call_args_list}
-        for subscription in mock_subscription_list:
-            self.assertTrue(subscription in args or id(subscription) in args)
-
-    @patch('antlion.event.event_bus.unregister')
-    def test_register_registers_all_subscriptions(self, unregister):
-        """Tests register() registers all subscriptions within the bundle."""
-        mock_subscription_list = [Mock(), Mock(), Mock()]
-        package = SubscriptionBundle()
-        package._registered = True
-        for subscription in mock_subscription_list:
-            package.subscriptions[subscription] = id(subscription)
-
-        package.unregister()
-
-        self.assertEqual(unregister.call_count, len(mock_subscription_list))
-        args = {args[0] for args, _ in unregister.call_args_list}
-        for subscription in mock_subscription_list:
-            self.assertTrue(subscription in args or id(subscription) in args)
-
-
-class SubscriptionBundleStaticFunctions(TestCase):
-    """Tests the static functions found in subscription_bundle.py"""
-
-    @staticmethod
-    @subscribe_static(Event)
-    def static_listener_1():
-        pass
-
-    @staticmethod
-    @subscribe_static(Event)
-    def static_listener_2():
-        pass
-
-    @subscribe(Event)
-    def instance_listener_1(self):
-        pass
-
-    @subscribe(Event)
-    def instance_listener_2(self):
-        pass
-
-    def test_create_from_static(self):
-        """Tests create_from_static gets all StaticSubscriptionHandles."""
-        cls = self.__class__
-        bundle = subscription_bundle.create_from_static(cls)
-
-        self.assertEqual(len(bundle.subscriptions), 2)
-        keys = bundle.subscriptions.keys()
-        self.assertIn(cls.static_listener_1.subscription, keys)
-        self.assertIn(cls.static_listener_2.subscription, keys)
-
-    def test_create_from_instance(self):
-        """Tests create_from_instance gets all InstanceSubscriptionHandles."""
-        bundle = subscription_bundle.create_from_instance(self)
-
-        self.assertEqual(len(bundle.subscriptions), 2)
-        keys = bundle.subscriptions.keys()
-        self.assertIn(self.instance_listener_1.subscription, keys)
-        self.assertIn(self.instance_listener_2.subscription, keys)
-
-
-@subscribe_static(Event)
-def static_listener_1():
-    pass
-
-
-class SubscribeStaticModuleLevelTest(TestCase):
-    def test_create_from_static(self):
-        """Tests create_from_static gets all StaticSubscriptionHandles."""
-        bundle = subscription_bundle.create_from_static(
-            sys.modules[self.__module__])
-
-        self.assertEqual(len(bundle.subscriptions), 1)
-        keys = bundle.subscriptions.keys()
-        self.assertIn(static_listener_1.subscription, keys)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/logging/log_stream_test.py b/src/antlion/unit_tests/libs/logging/log_stream_test.py
deleted file mode 100755
index 8e1fc78..0000000
--- a/src/antlion/unit_tests/libs/logging/log_stream_test.py
+++ /dev/null
@@ -1,430 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import os
-import unittest
-
-import mock
-
-from antlion import context
-from antlion.libs.logging import log_stream
-from antlion.libs.logging.log_stream import AlsoToLogHandler
-from antlion.libs.logging.log_stream import InvalidStyleSetError
-from antlion.libs.logging.log_stream import LogStyles
-from antlion.libs.logging.log_stream import _LogStream
-
-
-class TestClass(object):
-    """Dummy class for TestEvents"""
-
-    def __init__(self):
-        self.test_name = self.test_case.__name__
-
-    def test_case(self):
-        """Dummy test case for test events."""
-
-
-class LogStreamTest(unittest.TestCase):
-    """Tests the _LogStream class in antlion.libs.logging.log_stream."""
-
-    @staticmethod
-    def patch(imported_name, *args, **kwargs):
-        return mock.patch('antlion.libs.logging.log_stream.%s' % imported_name,
-                          *args, **kwargs)
-
-    @classmethod
-    def setUpClass(cls):
-        # logging.log_path only exists if logger._setup_test_logger is called.
-        # Here we set it to a value that is likely to not exist so file IO is
-        # not executed (an error is raised instead of creating the file).
-        logging.log_path = '/f/a/i/l/p/a/t/h'
-
-    def setUp(self):
-        log_stream._log_streams = dict()
-
-    # __init__
-
-    @mock.patch('os.makedirs')
-    def test_init_adds_null_handler(self, *_):
-        """Tests that a NullHandler is added to the logger upon initialization.
-        This ensures that no log output is generated when a test class is not
-        running.
-        """
-        debug_monolith_log = LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG
-        with self.patch('MovableFileHandler'):
-            log = log_stream.create_logger(self._testMethodName,
-                                           log_styles=debug_monolith_log)
-
-        self.assertTrue(isinstance(log.handlers[0], logging.NullHandler))
-
-    # __validate_style
-
-    @mock.patch('os.makedirs')
-    def test_validate_styles_raises_when_same_location_set_multiple_times(
-            self, *_):
-        """Tests that a style is invalid if it sets the same handler twice.
-
-        If the error is NOT raised, then a LogStream can create a Logger that
-        has multiple LogHandlers trying to write to the same file.
-        """
-        with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(
-                self._testMethodName,
-                log_styles=[LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])
-        self.assertTrue(
-            'has been set multiple' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
-
-    @mock.patch('os.makedirs')
-    def test_validate_styles_raises_when_multiple_file_outputs_set(self, *_):
-        """Tests that a style is invalid if more than one of MONOLITH_LOG,
-        TESTCLASS_LOG, and TESTCASE_LOG is set for the same log level.
-
-        If the error is NOT raised, then a LogStream can create a Logger that
-        has multiple LogHandlers trying to write to the same file.
-        """
-        with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(
-                self._testMethodName,
-                log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG])
-        self.assertTrue(
-            'More than one of' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
-
-        with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(
-                self._testMethodName,
-                log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])
-        self.assertTrue(
-            'More than one of' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
-
-        with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(
-                self._testMethodName,
-                log_styles=[LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.TESTCLASS_LOG,
-                            LogStyles.LOG_DEBUG | LogStyles.MONOLITH_LOG])
-        self.assertTrue(
-            'More than one of' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
-
-    @mock.patch('os.makedirs')
-    def test_validate_styles_raises_when_no_level_exists(self, *_):
-        """Tests that a style is invalid if it does not contain a log level.
-
-        If the style does not contain a log level, then there is no way to
-        pass the information coming from the logger to the correct file.
-        """
-        with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(self._testMethodName,
-                                     log_styles=[LogStyles.MONOLITH_LOG])
-
-        self.assertTrue(
-            'log level' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
-
-    @mock.patch('os.makedirs')
-    def test_validate_styles_raises_when_no_location_exists(self, *_):
-        """Tests that a style is invalid if it does not contain a log level.
-
-        If the style does not contain a log level, then there is no way to
-        pass the information coming from the logger to the correct file.
-        """
-        with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(self._testMethodName,
-                                     log_styles=[LogStyles.LOG_INFO])
-
-        self.assertTrue(
-            'log location' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
-
-    @mock.patch('os.makedirs')
-    def test_validate_styles_raises_when_rotate_logs_no_file_handler(self, *_):
-        """Tests that a LogStyle cannot set ROTATE_LOGS without *_LOG flag.
-
-        If the LogStyle contains ROTATE_LOGS, it must be associated with a log
-        that is rotatable. TO_ACTS_LOG and TO_STDOUT are not rotatable logs,
-        since those are both controlled by another object/process. The user
-        must specify MONOLITHIC_LOG or TESTCASE_LOG.
-        """
-        with self.assertRaises(InvalidStyleSetError) as catch:
-            log_stream.create_logger(
-                self._testMethodName,
-                # Added LOG_DEBUG here to prevent the no_level_exists raise from
-                # occurring.
-                log_styles=[LogStyles.LOG_DEBUG + LogStyles.ROTATE_LOGS])
-
-        self.assertTrue(
-            'log type' in catch.exception.args[0],
-            msg='__validate_styles did not raise the expected error message')
-
-    # __handle_style
-
-    @mock.patch('os.makedirs')
-    def test_handle_style_to_acts_log_creates_handler(self, *_):
-        """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler."""
-        info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_ACTS_LOG
-
-        log = log_stream.create_logger(self._testMethodName,
-                                       log_styles=info_acts_log)
-
-        self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))
-
-    @mock.patch('os.makedirs')
-    def test_handle_style_to_acts_log_creates_handler_is_lowest_level(self, *_):
-        """Tests that using the flag TO_ACTS_LOG creates an AlsoToLogHandler
-        that is set to the lowest LogStyles level."""
-        info_acts_log = (LogStyles.LOG_DEBUG + LogStyles.LOG_INFO +
-                         LogStyles.TO_ACTS_LOG)
-
-        log = log_stream.create_logger(self._testMethodName,
-                                       log_styles=info_acts_log)
-
-        self.assertTrue(isinstance(log.handlers[1], AlsoToLogHandler))
-        self.assertEqual(log.handlers[1].level, logging.DEBUG)
-
-    @mock.patch('os.makedirs')
-    def test_handle_style_to_stdout_creates_stream_handler(self, *_):
-        """Tests that using the flag TO_STDOUT creates a StreamHandler."""
-        info_acts_log = LogStyles.LOG_INFO + LogStyles.TO_STDOUT
-
-        log = log_stream.create_logger(self._testMethodName,
-                                       log_styles=info_acts_log)
-
-        self.assertTrue(isinstance(log.handlers[1], logging.StreamHandler))
-
-    @mock.patch('os.makedirs')
-    def test_handle_style_creates_file_handler(self, *_):
-        """Tests handle_style creates a MovableFileHandler for the MONOLITH_LOG."""
-        info_acts_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
-
-        expected = mock.MagicMock()
-        with self.patch('MovableFileHandler', return_value=expected):
-            log = log_stream.create_logger(self._testMethodName,
-                                           log_styles=info_acts_log)
-
-        self.assertEqual(log.handlers[1], expected)
-
-    @mock.patch('os.makedirs')
-    def test_handle_style_creates_rotating_file_handler(self, *_):
-        """Tests handle_style creates a MovableFileHandler for the ROTATE_LOGS."""
-        info_acts_log = (LogStyles.LOG_INFO + LogStyles.ROTATE_LOGS +
-                         LogStyles.MONOLITH_LOG)
-
-        expected = mock.MagicMock()
-        with self.patch('MovableRotatingFileHandler', return_value=expected):
-            log = log_stream.create_logger(self._testMethodName,
-                                           log_styles=info_acts_log)
-
-        self.assertEqual(log.handlers[1], expected)
-
-    # __create_rotating_file_handler
-
-    def test_create_rotating_file_handler_does_what_it_says_it_does(self):
-        """Tests that __create_rotating_file_handler does exactly that."""
-        expected = mock.MagicMock()
-
-        with self.patch('MovableRotatingFileHandler', return_value=expected):
-            # Through name-mangling, this function is automatically renamed. See
-            # https://docs.python.org/3/tutorial/classes.html#private-variables
-            fh = _LogStream._LogStream__create_rotating_file_handler('')
-
-        self.assertEqual(expected, fh,
-                         'The function did not return a MovableRotatingFileHandler.')
-
-    # __get_file_handler_creator
-
-    def test_get_file_handler_creator_returns_rotating_file_handler(self):
-        """Tests the function returns a MovableRotatingFileHandler when the log_style
-        has LogStyle.ROTATE_LOGS."""
-        expected = mock.MagicMock()
-
-        with self.patch('_LogStream._LogStream__create_rotating_file_handler',
-                        return_value=expected):
-            # Through name-mangling, this function is automatically renamed. See
-            # https://docs.python.org/3/tutorial/classes.html#private-variables
-            fh_creator = _LogStream._LogStream__get_file_handler_creator(
-                LogStyles.ROTATE_LOGS)
-
-        self.assertEqual(expected, fh_creator('/d/u/m/m/y/p/a/t/h'),
-                         'The function did not return a MovableRotatingFileHandler.')
-
-    def test_get_file_handler_creator_returns_file_handler(self):
-        """Tests the function returns a MovableFileHandler when the log_style does NOT
-        have LogStyle.ROTATE_LOGS."""
-        expected = mock.MagicMock()
-
-        with self.patch('MovableFileHandler', return_value=expected):
-            # Through name-mangling, this function is automatically renamed. See
-            # https://docs.python.org/3/tutorial/classes.html#private-variables
-            handler = _LogStream._LogStream__get_file_handler_creator(
-                LogStyles.NONE)()
-
-        self.assertTrue(isinstance(handler, mock.Mock))
-
-    # __get_lowest_log_level
-
-    def test_get_lowest_level_gets_lowest_level(self):
-        """Tests __get_lowest_level returns the lowest LogStyle level given."""
-        level = _LogStream._LogStream__get_lowest_log_level(
-            LogStyles.ALL_LEVELS)
-        self.assertEqual(level, LogStyles.LOG_DEBUG)
-
-    # __get_current_output_dir
-
-    @mock.patch('os.makedirs')
-    def test_get_current_output_dir_gets_correct_path(self, *_):
-        """Tests __get_current_output_dir gets the correct path from the context
-        """
-        info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
-
-        base_path = "BASEPATH"
-        subcontext = "SUBCONTEXT"
-        with self.patch('MovableFileHandler'):
-            logstream = log_stream._LogStream(
-                self._testMethodName, log_styles=info_monolith_log,
-                base_path=base_path, subcontext=subcontext)
-
-        expected = os.path.join(base_path, subcontext)
-        self.assertEqual(
-            logstream._LogStream__get_current_output_dir(), expected)
-
-    # __create_handler
-
-    @mock.patch('os.makedirs')
-    def test_create_handler_creates_handler_at_correct_path(self, *_):
-        """Tests that __create_handler calls the handler creator with the
-        correct absolute path to the log file.
-        """
-        info_monolith_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
-        base_path = 'BASEPATH'
-        with self.patch('MovableFileHandler') as file_handler:
-            log_stream.create_logger(
-                self._testMethodName, log_styles=info_monolith_log,
-                base_path=base_path)
-            expected = os.path.join(
-                base_path, '%s_%s.txt' % (self._testMethodName, 'info'))
-            file_handler.assert_called_with(expected)
-
-    # __remove_handler
-
-    @mock.patch('os.makedirs')
-    def test_remove_handler_removes_a_handler(self, *_):
-        """Tests that __remove_handler removes the handler from the logger and
-        closes the handler.
-        """
-        dummy_obj = mock.Mock()
-        dummy_obj.logger = mock.Mock()
-        handler = mock.Mock()
-        _LogStream._LogStream__remove_handler(dummy_obj, handler)
-
-        self.assertTrue(dummy_obj.logger.removeHandler.called)
-        self.assertTrue(handler.close.called)
-
-    # update_handlers
-
-    @mock.patch('os.makedirs')
-    def test_update_handlers_updates_filehandler_target(self, _):
-        """Tests that update_handlers invokes the underlying
-        MovableFileHandler.set_file method on the correct path.
-        """
-        info_testclass_log = LogStyles.LOG_INFO + LogStyles.TESTCLASS_LOG
-        file_name = 'FILENAME'
-        with self.patch('MovableFileHandler'):
-            log = log_stream.create_logger(
-                self._testMethodName, log_styles=info_testclass_log)
-            handler = log.handlers[-1]
-            handler.baseFilename = file_name
-            stream = log_stream._log_streams[log.name]
-            stream._LogStream__get_current_output_dir = (
-                lambda: 'BASEPATH/TestClass'
-            )
-
-            stream.update_handlers(context.NewTestClassContextEvent())
-
-            handler.set_file.assert_called_with('BASEPATH/TestClass/FILENAME')
-
-    # cleanup
-
-    @mock.patch('os.makedirs')
-    def test_cleanup_removes_all_handlers(self, *_):
-        """ Tests that cleanup removes all handlers in the logger, except
-        the NullHandler.
-        """
-        info_testcase_log = LogStyles.LOG_INFO + LogStyles.MONOLITH_LOG
-        with self.patch('MovableFileHandler'):
-            log_stream.create_logger(self._testMethodName,
-                                     log_styles=info_testcase_log)
-
-        created_log_stream = log_stream._log_streams[self._testMethodName]
-        created_log_stream.cleanup()
-
-        self.assertEqual(len(created_log_stream.logger.handlers), 1)
-
-
-class LogStreamModuleTests(unittest.TestCase):
-    @classmethod
-    def setUpClass(cls):
-        # logging.log_path only exists if logger._setup_test_logger is called.
-        # Here we set it to a value that is likely to not exist so file IO is
-        # not executed (an error is raised instead of creating the file).
-        logging.log_path = '/f/a/i/l/p/a/t/h'
-
-    def setUp(self):
-        log_stream._log_streams = {}
-
-    # _update_handlers
-
-    @staticmethod
-    def create_new_context_event():
-        return context.NewContextEvent()
-
-    def test_update_handlers_delegates_calls_to_log_streams(self):
-        """Tests _update_handlers calls update_handlers on each log_stream.
-        """
-        log_stream._log_streams = {
-            'a': mock.Mock(),
-            'b': mock.Mock()
-        }
-
-        log_stream._update_handlers(self.create_new_context_event())
-
-        self.assertTrue(log_stream._log_streams['a'].update_handlers.called)
-        self.assertTrue(log_stream._log_streams['b'].update_handlers.called)
-
-    # _set_logger
-
-    def test_set_logger_overwrites_previous_logger(self):
-        """Tests that calling set_logger overwrites the previous logger within
-        log_stream._log_streams.
-        """
-        previous = mock.Mock()
-        log_stream._log_streams = {
-            'a': previous
-        }
-        expected = mock.Mock()
-        expected.name = 'a'
-        log_stream._set_logger(expected)
-
-        self.assertEqual(log_stream._log_streams['a'], expected)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py b/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
deleted file mode 100644
index 3b82d23..0000000
--- a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
+++ /dev/null
@@ -1,143 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-import logging
-import mock
-
-from antlion.controllers import android_device
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_runners import ota_runner_factory
-from antlion import config_parser
-
-
-class OtaRunnerFactoryTests(unittest.TestCase):
-    """Tests all of the functions in the ota_runner_factory module."""
-
-    def setUp(self):
-        self.device = mock.MagicMock()
-        self.device.serial = 'fake_serial'
-
-    def test_get_ota_value_from_config_no_map_key_missing(self):
-        acts_config = {}
-        with self.assertRaises(config_parser.ActsConfigError):
-            ota_runner_factory.get_ota_value_from_config(
-                acts_config, 'ota_tool', self.device)
-
-    def test_get_ota_value_from_config_with_map_key_missing(self):
-        acts_config = {'ota_map': {'fake_serial': 'MockOtaTool'}}
-        with self.assertRaises(config_parser.ActsConfigError):
-            ota_runner_factory.get_ota_value_from_config(
-                acts_config, 'ota_tool', self.device)
-
-    def test_get_ota_value_from_config_with_map_key_found(self):
-        expected_value = '/path/to/tool'
-        acts_config = {
-            'ota_map': {
-                'fake_serial': 'MockOtaTool'
-            },
-            'ota_tool_MockOtaTool': expected_value
-        }
-        ret = ota_runner_factory.get_ota_value_from_config(
-            acts_config, 'ota_tool', self.device)
-        self.assertEqual(expected_value, ret)
-
-    def test_create_from_configs_raise_when_non_default_tool_path_missing(
-            self):
-        acts_config = {
-            'ota_tool': 'FakeTool',
-        }
-        try:
-            ota_runner_factory.create_from_configs(acts_config, self.device)
-        except config_parser.ActsConfigError:
-            return
-        self.fail('create_from_configs did not throw an error when a tool was'
-                  'specified without a tool path.')
-
-    def test_create_from_configs_without_map_makes_proper_calls(self):
-        acts_config = {
-            'ota_package': 'jkl;',
-            'ota_sl4a': 'qaz',
-            'ota_tool': 'FakeTool',
-            'FakeTool': 'qwerty'
-        }
-        function_path = 'antlion.libs.ota.ota_runners.ota_runner_factory.create'
-        with mock.patch(function_path) as mocked_function:
-            ota_runner_factory.create_from_configs(acts_config, self.device)
-            mocked_function.assert_called_with('jkl;', 'qaz', self.device,
-                                               'FakeTool', 'qwerty')
-
-    def test_create_from_configs_with_map_makes_proper_calls(self):
-        acts_config = {
-            'ota_map': {
-                'fake_serial': "hardwareA"
-            },
-            'ota_package_hardwareA': 'jkl;',
-            'ota_sl4a_hardwareA': 'qaz',
-            'ota_tool_hardwareA': 'FakeTool',
-            'FakeTool': 'qwerty'
-        }
-        function_path = 'antlion.libs.ota.ota_runners.ota_runner_factory.create'
-        with mock.patch(function_path) as mocked_function:
-            ota_runner_factory.create_from_configs(acts_config, self.device)
-            mocked_function.assert_called_with('jkl;', 'qaz', self.device,
-                                               'FakeTool', 'qwerty')
-
-    def test_create_raise_on_ota_pkg_and_sl4a_fields_have_different_types(
-            self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
-            with self.assertRaises(TypeError):
-                ota_runner_factory.create('ota_package', ['ota_sl4a'],
-                                          self.device)
-
-    def test_create_raise_on_ota_package_not_a_list_or_string(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
-            with self.assertRaises(TypeError):
-                ota_runner_factory.create({'ota': 'pkg'}, {'ota': 'sl4a'},
-                                          self.device)
-
-    def test_create_returns_single_ota_runner_on_ota_package_being_a_str(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
-            ret = ota_runner_factory.create('', '', self.device)
-            self.assertEqual(type(ret), ota_runner.SingleUseOtaRunner)
-
-    def test_create_returns_multi_ota_runner_on_ota_package_being_a_list(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
-            ret = ota_runner_factory.create([], [], self.device)
-            self.assertEqual(type(ret), ota_runner.MultiUseOtaRunner)
-
-    def test_create_returns_bound_ota_runner_on_second_request(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
-            first_return = ota_runner_factory.create([], [], self.device)
-            logging.disable(logging.WARNING)
-            second_return = ota_runner_factory.create([], [], self.device)
-            logging.disable(logging.NOTSET)
-            self.assertEqual(first_return, second_return)
-
-    def test_create_returns_different_ota_runner_on_second_request(self):
-        with mock.patch('antlion.libs.ota.ota_tools.ota_tool_factory.create'):
-            first_return = ota_runner_factory.create([], [],
-                                                     self.device,
-                                                     use_cached_runners=False)
-            second_return = ota_runner_factory.create([], [],
-                                                      self.device,
-                                                      use_cached_runners=False)
-            self.assertNotEqual(first_return, second_return)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py b/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
deleted file mode 100644
index 9f51368..0000000
--- a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
+++ /dev/null
@@ -1,256 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-import mock
-import os
-
-from antlion.libs.ota.ota_tools import ota_tool
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.controllers import android_device
-
-
-class MockOtaTool(ota_tool.OtaTool):
-    def __init__(self, command):
-        super(MockOtaTool, self).__init__(command)
-        self.update_call_count = 0
-        self.cleanup_call_count = 0
-
-    def update(self, unused):
-        self.update_call_count += 1
-
-    def cleanup(self, unused):
-        self.cleanup_call_count += 1
-
-    def reset_count(self):
-        self.update_call_count = 0
-        self.cleanup_call_count = 0
-
-    def assert_calls_equal(self, test, number_of_expected_calls):
-        test.assertEqual(number_of_expected_calls, self.update_call_count)
-        test.assertEqual(number_of_expected_calls, self.cleanup_call_count)
-
-
-class OtaRunnerImpl(ota_runner.OtaRunner):
-    """Sets properties to return an empty string to allow OtaRunner tests."""
-
-    def get_sl4a_apk(self):
-        return ''
-
-    def get_ota_package(self):
-        return ''
-
-    def validate_update(self):
-        pass
-
-
-class OtaRunnerTest(unittest.TestCase):
-    """Tests the OtaRunner class."""
-
-    def setUp(self):
-        self.prev_sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
-        ota_runner.SL4A_SERVICE_SETUP_TIME = 0
-
-    def tearDown(self):
-        ota_runner.SL4A_SERVICE_SETUP_TIME = self.prev_sl4a_service_setup_time
-
-    def test_update(self):
-        device = mock.MagicMock()
-        device.skip_sl4a = False
-        tool = MockOtaTool('mock_command')
-        runner = OtaRunnerImpl(tool, device)
-        runner.android_device.adb.getprop = mock.Mock(side_effect=['a', 'b'])
-        runner.get_post_build_id = lambda: 'abc'
-
-        runner._update()
-
-        self.assertTrue(device.stop_services.called)
-        self.assertTrue(device.wait_for_boot_completion.called)
-        self.assertTrue(device.start_services.called)
-        self.assertTrue(device.adb.install.called)
-        tool.assert_calls_equal(self, 1)
-
-    def test_update_fail_on_no_change_to_build(self):
-        device = mock.MagicMock()
-        tool = MockOtaTool('mock_command')
-        runner = OtaRunnerImpl(tool, device)
-        runner.android_device.adb.getprop = mock.Mock(side_effect=['a', 'a'])
-        runner.get_post_build_id = lambda: 'abc'
-        try:
-            runner._update()
-            self.fail('Matching build fingerprints did not throw an error!')
-        except ota_runner.OtaError:
-            pass
-
-    def test_init(self):
-        device = mock.MagicMock()
-        tool = MockOtaTool('mock_command')
-        runner = ota_runner.OtaRunner(tool, device)
-
-        self.assertEqual(runner.ota_tool, tool)
-        self.assertEqual(runner.android_device, device)
-        self.assertEqual(runner.serial, device.serial)
-
-    def test_get_post_build_id_grabs_valid_data(self):
-        device = mock.MagicMock()
-        tool = MockOtaTool('mock_command')
-        runner = OtaRunnerImpl(tool, device)
-        ota_package_path = os.path.join(
-            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
-        runner.get_ota_package = lambda: ota_package_path
-        self.assertEqual(runner.get_post_build_id(), 'post-build_information')
-
-    def test_get_ota_package_metadata_value_does_not_exist(self):
-        device = mock.MagicMock()
-        tool = MockOtaTool('mock_command')
-        runner = OtaRunnerImpl(tool, device)
-        ota_package_path = os.path.join(
-            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
-        runner.get_ota_package = lambda: ota_package_path
-        self.assertEqual(runner.get_ota_package_metadata('garbage-data'), None)
-
-
-class SingleUseOtaRunnerTest(unittest.TestCase):
-    """Tests the SingleUseOtaRunner class."""
-
-    def setUp(self):
-        self.device = mock.MagicMock()
-        self.tool = MockOtaTool('mock_command')
-
-    def test_update_first_update_runs(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, '', '')
-        try:
-            with mock.patch.object(ota_runner.OtaRunner, '_update'):
-                runner.update()
-        except ota_runner.OtaError:
-            self.fail('SingleUseOtaRunner threw an exception on the first '
-                      'update call.')
-
-    def test_update_second_update_raises_error(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, '', '')
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
-            runner.update()
-            try:
-                runner.update()
-            except ota_runner.OtaError:
-                return
-        self.fail('SingleUseOtaRunner did not throw an exception on the second'
-                  'update call.')
-
-    def test_can_update_no_updates_called(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, '', '')
-        self.assertEqual(True, runner.can_update())
-
-    def test_can_update_has_updated_already(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, '', '')
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
-            runner.update()
-        self.assertEqual(False, runner.can_update())
-
-    def test_get_ota_package(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, 'a',
-                                               'b')
-        self.assertEqual(runner.get_ota_package(), 'a')
-
-    def test_get_sl4a_apk(self):
-        runner = ota_runner.SingleUseOtaRunner(self.tool, self.device, 'a',
-                                               'b')
-        self.assertEqual(runner.get_sl4a_apk(), 'b')
-
-
-class MultiUseOtaRunnerTest(unittest.TestCase):
-    """Tests the MultiUseOtaRunner class."""
-
-    def setUp(self):
-        self.device = mock.MagicMock()
-        self.tool = MockOtaTool('mock_command')
-
-    def test_update_first_update_runs(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device, [''],
-                                              [''])
-        try:
-            with mock.patch.object(ota_runner.OtaRunner, '_update'):
-                runner.update()
-        except ota_runner.OtaError:
-            self.fail('MultiUseOtaRunner threw an exception on the first '
-                      'update call.')
-
-    def test_update_multiple_updates_run(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
-            runner.update()
-            try:
-                runner.update()
-            except ota_runner.OtaError:
-                self.fail('MultiUseOtaRunner threw an exception before '
-                          'running out of update packages.')
-
-    def test_update_too_many_update_calls_raises_error(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
-            runner.update()
-            runner.update()
-            try:
-                runner.update()
-            except ota_runner.OtaError:
-                return
-        self.fail('MultiUseOtaRunner did not throw an exception after running '
-                  'out of update packages.')
-
-    def test_can_update_no_updates_called(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        self.assertEqual(True, runner.can_update())
-
-    def test_can_update_has_more_updates_left(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
-            runner.update()
-        self.assertEqual(True, runner.can_update())
-
-    def test_can_update_ran_out_of_updates(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        with mock.patch.object(ota_runner.OtaRunner, '_update'):
-            runner.update()
-            runner.update()
-        self.assertEqual(False, runner.can_update())
-
-    def test_get_ota_package(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        self.assertEqual(runner.get_ota_package(), 'first_pkg')
-
-    def test_get_sl4a_apk(self):
-        runner = ota_runner.MultiUseOtaRunner(self.tool, self.device,
-                                              ['first_pkg', 'second_pkg'],
-                                              ['first_apk', 'second_apk'])
-        self.assertEqual(runner.get_sl4a_apk(), 'first_apk')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py b/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
deleted file mode 100644
index 3816157..0000000
--- a/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import unittest
-
-import os
-
-from antlion.controllers import android_device
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import ota_tool
-from antlion.libs.ota.ota_tools import adb_sideload_ota_tool
-
-
-def get_mock_android_device(serial='', ssh_connection=None):
-    """Returns a mocked AndroidDevice with a mocked adb/fastboot."""
-    with mock.patch('antlion.controllers.adb.AdbProxy') as adb_proxy, (
-            mock.patch('antlion.controllers.fastboot.FastbootProxy')) as fb_proxy:
-        fb_proxy.return_value.devices.return_value = ""
-        ret = mock.Mock(
-            android_device.AndroidDevice(serial=serial,
-                                         ssh_connection=ssh_connection))
-        fb_proxy.reset_mock()
-        return ret
-
-
-class AdbSideloadOtaToolTest(unittest.TestCase):
-    """Tests the OtaTool class."""
-
-    def test_init(self):
-        expected_value = 'commmand string'
-        self.assertEqual(
-            ota_tool.OtaTool(expected_value).command, expected_value)
-
-    def setUp(self):
-        self.sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
-        ota_runner.SL4A_SERVICE_SETUP_TIME = 0
-
-    def tearDown(self):
-        ota_runner.SL4A_SERVICE_SETUP_TIME = self.sl4a_service_setup_time
-
-    @staticmethod
-    def test_start():
-        # This test could have a bunch of verify statements,
-        # but its probably not worth it.
-        device = get_mock_android_device()
-        ota_package_path = os.path.join(
-            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
-        tool = adb_sideload_ota_tool.AdbSideloadOtaTool(ota_package_path)
-        runner = ota_runner.SingleUseOtaRunner(tool, device, ota_package_path,
-                                               '')
-        runner.android_device.adb.getprop = mock.Mock(side_effect=['a', 'b'])
-        runner.update()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py b/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
deleted file mode 100644
index 700e6b2..0000000
--- a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from antlion.libs.ota.ota_tools import ota_tool_factory
-
-
-class MockOtaTool(object):
-    def __init__(self, command):
-        self.command = command
-
-
-class OtaToolFactoryTests(unittest.TestCase):
-    def setUp(self):
-        ota_tool_factory._constructed_tools = {}
-
-    def test_create_constructor_exists(self):
-        ota_tool_factory._CONSTRUCTORS = {
-            MockOtaTool.__name__: lambda command: MockOtaTool(command),
-        }
-        ret = ota_tool_factory.create(MockOtaTool.__name__, 'command')
-        self.assertEqual(type(ret), MockOtaTool)
-        self.assertTrue(ret in ota_tool_factory._constructed_tools.values())
-
-    def test_create_not_in_constructors(self):
-        ota_tool_factory._CONSTRUCTORS = {}
-        with self.assertRaises(KeyError):
-            ota_tool_factory.create(MockOtaTool.__name__, 'command')
-
-    def test_create_returns_cached_tool(self):
-        ota_tool_factory._CONSTRUCTORS = {
-            MockOtaTool.__name__: lambda command: MockOtaTool(command),
-        }
-        ret_a = ota_tool_factory.create(MockOtaTool.__name__, 'command')
-        ret_b = ota_tool_factory.create(MockOtaTool.__name__, 'command')
-        self.assertEqual(ret_a, ret_b)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py b/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
deleted file mode 100644
index 9fc8784..0000000
--- a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from antlion.libs.ota.ota_tools import ota_tool
-
-
-class OtaToolTests(unittest.TestCase):
-    """Tests the OtaTool class."""
-
-    def test_init(self):
-        expected_value = 'commmand string'
-        self.assertEqual(
-            ota_tool.OtaTool(expected_value).command, expected_value)
-
-    def test_start_throws_error_on_unimplemented(self):
-        obj = 'some object'
-        with self.assertRaises(NotImplementedError):
-            ota_tool.OtaTool('').update(obj)
-
-    def test_end_is_not_abstract(self):
-        obj = 'some object'
-        try:
-            ota_tool.OtaTool('').cleanup(obj)
-        except:
-            self.fail('End is not required and should be a virtual function.')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py b/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
deleted file mode 100644
index eddf02d..0000000
--- a/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import unittest
-
-import os
-
-from antlion.controllers import android_device
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import update_device_ota_tool
-
-
-def get_mock_android_device(serial='', ssh_connection=None):
-    """Returns a mocked AndroidDevice with a mocked adb/fastboot."""
-    with mock.patch('antlion.controllers.adb.AdbProxy') as adb_proxy, (
-            mock.patch('antlion.controllers.fastboot.FastbootProxy')) as fb_proxy:
-        fb_proxy.return_value.devices.return_value = ""
-        ret = mock.Mock(
-            android_device.AndroidDevice(serial=serial,
-                                         ssh_connection=ssh_connection))
-        fb_proxy.reset_mock()
-        return ret
-
-
-class UpdateDeviceOtaToolTest(unittest.TestCase):
-    """Tests for UpdateDeviceOtaTool."""
-
-    def setUp(self):
-        self.sl4a_service_setup_time = ota_runner.SL4A_SERVICE_SETUP_TIME
-        ota_runner.SL4A_SERVICE_SETUP_TIME = 0
-
-    def tearDown(self):
-        ota_runner.SL4A_SERVICE_SETUP_TIME = self.sl4a_service_setup_time
-
-    def test_update(self):
-        ota_package_path = os.path.join(
-            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
-        with mock.patch('tempfile.mkdtemp') as mkdtemp, (
-                mock.patch('shutil.rmtree')) as rmtree, (
-                    mock.patch('antlion.utils.unzip_maintain_permissions')):
-            mkdtemp.return_value = ''
-            rmtree.return_value = ''
-            device = get_mock_android_device()
-            tool = update_device_ota_tool.UpdateDeviceOtaTool(ota_package_path)
-            runner = mock.Mock(
-                ota_runner.SingleUseOtaRunner(tool, device, '', ''))
-            runner.return_value.android_device = device
-            with mock.patch('antlion.libs.proc.job.run'):
-                tool.update(runner)
-            del tool
-
-    def test_del(self):
-        ota_package_path = os.path.join(
-            os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-            'dummy_ota_package.zip')
-        with mock.patch('tempfile.mkdtemp') as mkdtemp, (
-                mock.patch('shutil.rmtree')) as rmtree, (
-                    mock.patch('antlion.utils.unzip_maintain_permissions')):
-            mkdtemp.return_value = ''
-            rmtree.return_value = ''
-            tool = update_device_ota_tool.UpdateDeviceOtaTool(ota_package_path)
-            del tool
-            self.assertTrue(mkdtemp.called)
-            self.assertTrue(rmtree.called)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/ota/ota_updater_test.py b/src/antlion/unit_tests/libs/ota/ota_updater_test.py
deleted file mode 100644
index b6cad1a..0000000
--- a/src/antlion/unit_tests/libs/ota/ota_updater_test.py
+++ /dev/null
@@ -1,109 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import unittest
-from antlion.libs.ota import ota_updater
-from antlion.libs.ota.ota_runners import ota_runner
-
-
-class MockAndroidDevice(object):
-    def __init__(self, serial):
-        self.serial = serial
-        self.log = mock.Mock()
-        self.take_bug_report = mock.MagicMock()
-
-
-class MockOtaRunner(object):
-    def __init__(self):
-        self.call_count = 0
-        self.should_fail = False
-        self.can_update_value = 'CAN_UPDATE_CALLED'
-
-    def set_failure(self, should_fail=True):
-        self.should_fail = should_fail
-
-    def update(self):
-        self.call_count += 1
-        if self.should_fail:
-            raise ota_runner.OtaError
-
-    def can_update(self):
-        return self.can_update_value
-
-    def validate_update(self):
-        pass
-
-
-class OtaUpdaterTests(unittest.TestCase):
-    """Tests the methods in the ota_updater module."""
-
-    def test_initialize(self):
-        user_params = {'a': 1, 'b': 2, 'c': 3}
-        android_devices = ['x', 'y', 'z']
-        with mock.patch('antlion.libs.ota.ota_runners.ota_runner_factory.'
-                        'create_from_configs') as fn:
-            ota_updater.initialize(user_params, android_devices)
-            for i in range(len(android_devices)):
-                fn.assert_any_call(user_params, android_devices[i])
-            self.assertSetEqual(
-                set(android_devices), set(ota_updater.ota_runners.keys()))
-
-    def test_check_initialization_is_initialized(self):
-        device = MockAndroidDevice('serial')
-        ota_updater.ota_runners = {
-            device: ota_runner.OtaRunner('tool', device)
-        }
-        try:
-            ota_updater._check_initialization(device)
-        except ota_runner.OtaError:
-            self.fail(
-                '_check_initialization raised for initialized runner!')
-
-    def test_check_initialization_is_not_initialized(self):
-        device = MockAndroidDevice('serial')
-        ota_updater.ota_runners = {}
-        with self.assertRaises(KeyError):
-            ota_updater._check_initialization(device)
-
-    def test_update_do_not_ignore_failures_and_failures_occur(self):
-        device = MockAndroidDevice('serial')
-        runner = MockOtaRunner()
-        runner.set_failure(True)
-        ota_updater.ota_runners = {device: runner}
-        with self.assertRaises(ota_runner.OtaError):
-            ota_updater.update(device)
-
-    def test_update_ignore_failures_and_failures_occur(self):
-        device = MockAndroidDevice('serial')
-        runner = MockOtaRunner()
-        runner.set_failure(True)
-        ota_updater.ota_runners = {device: runner}
-        try:
-            ota_updater.update(device, ignore_update_errors=True)
-        except ota_runner.OtaError:
-            self.fail('OtaError was raised when errors are to be ignored!')
-
-    def test_can_update(self):
-        device = MockAndroidDevice('serial')
-        runner = MockOtaRunner()
-        ota_updater.ota_runners = {device: runner}
-        self.assertEqual(ota_updater.can_update(device),
-                         'CAN_UPDATE_CALLED')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/proc/process_test.py b/src/antlion/unit_tests/libs/proc/process_test.py
deleted file mode 100644
index 0fd6985..0000000
--- a/src/antlion/unit_tests/libs/proc/process_test.py
+++ /dev/null
@@ -1,368 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import subprocess
-import unittest
-
-from antlion.libs.proc.process import Process
-from antlion.libs.proc.process import ProcessError
-import mock
-
-
-class FakeThread(object):
-    def __init__(self, target=None):
-        self.target = target
-        self.alive = False
-
-    def _on_start(self):
-        pass
-
-    def start(self):
-        self.alive = True
-        if self._on_start:
-            self._on_start()
-
-    def stop(self):
-        self.alive = False
-
-    def join(self):
-        pass
-
-
-class ProcessTest(unittest.TestCase):
-    """Tests the antlion.libs.proc.process.Process class."""
-
-    def setUp(self):
-        self._Process__start_process = Process._Process__start_process
-
-    def tearDown(self):
-        Process._Process__start_process = self._Process__start_process
-
-    @staticmethod
-    def patch(imported_name, *args, **kwargs):
-        return mock.patch('antlion.libs.proc.process.%s' % imported_name,
-                          *args, **kwargs)
-
-    # set_on_output_callback
-
-    def test_set_on_output_callback(self):
-        """Tests that set_on_output_callback sets on_output_callback."""
-        callback = mock.Mock()
-
-        process = Process('cmd').set_on_output_callback(callback)
-        process._on_output_callback()
-
-        self.assertTrue(callback.called)
-
-    # set_on_terminate_callback
-
-    def test_set_on_terminate_callback(self):
-        """Tests that set_on_terminate_callback sets _on_terminate_callback."""
-        callback = mock.Mock()
-
-        process = Process('cmd').set_on_terminate_callback(callback)
-        process._on_terminate_callback()
-
-        self.assertTrue(callback.called)
-
-    # start
-
-    def test_start_raises_if_called_back_to_back(self):
-        """Tests that start raises an exception if it has already been called
-        prior.
-
-        This is required to prevent references to processes and threads from
-        being overwritten, potentially causing ACTS to hang."""
-        process = Process('cmd')
-
-        # Here we need the thread to start the process object.
-        class FakeThreadImpl(FakeThread):
-            def _on_start(self):
-                process._process = mock.Mock()
-
-        with self.patch('Thread', FakeThreadImpl):
-            process.start()
-            expected_msg = 'Process has already started.'
-            with self.assertRaisesRegex(ProcessError, expected_msg):
-                process.start()
-
-    def test_start_starts_listening_thread(self):
-        """Tests that start starts the _exec_popen_loop function."""
-        process = Process('cmd')
-
-        # Here we need the thread to start the process object.
-        class FakeThreadImpl(FakeThread):
-            def _on_start(self):
-                process._process = mock.Mock()
-
-        with self.patch('Thread', FakeThreadImpl):
-            process.start()
-
-        self.assertTrue(process._listening_thread.alive)
-        self.assertEqual(process._listening_thread.target, process._exec_loop)
-
-    # wait
-
-    def test_wait_raises_if_called_back_to_back(self):
-        """Tests that wait raises an exception if it has already been called
-        prior."""
-        process = Process('cmd')
-        process._process = mock.Mock()
-
-        process.wait(0)
-        expected_msg = 'Process is already being stopped.'
-        with self.assertRaisesRegex(ProcessError, expected_msg):
-            process.wait(0)
-
-    @mock.patch.object(Process, '_kill_process')
-    def test_wait_kills_after_timeout(self, *_):
-        """Tests that if a TimeoutExpired error is thrown during wait, the
-        process is killed."""
-        process = Process('cmd')
-        process._process = mock.Mock()
-        process._process.wait.side_effect = subprocess.TimeoutExpired('', '')
-
-        process.wait(0)
-
-        self.assertEqual(process._kill_process.called, True)
-
-    @mock.patch('os.getpgid', side_effect=lambda id: id)
-    @mock.patch('os.killpg')
-    def test_sends_signal(self, mock_os, *_):
-        """Tests that signal is sent to process.."""
-        process = Process('cmd')
-        mock_process = mock.Mock()
-        mock_process.pid = -1
-        process._process = mock_process
-
-        process.signal(51641)
-
-        mock_os.assert_called_with(-1, 51641)
-
-    def test_signal_raises_error_on_windows(self, *_):
-        """Tests that signaling is unsupported in windows with appropriate
-        error msg."""
-        process = Process('cmd')
-        mock_inner_process = mock.Mock()
-        mock_inner_process.pid = -1
-        process._process = mock_inner_process
-
-        with mock.patch('antlion.libs.proc.process._on_windows', True):
-            with self.assertRaises(ProcessError):
-                process.signal(51641)
-
-    @mock.patch.object(Process, '_kill_process')
-    def test_wait_sets_stopped_to_true_before_process_kill(self, *_):
-        """Tests that stop() sets the _stopped attribute to True.
-
-        This order is required to prevent the _exec_loop from calling
-        _on_terminate_callback when the user has killed the process.
-        """
-        verifier = mock.Mock()
-        verifier.passed = False
-
-        def test_call_order():
-            self.assertTrue(process._stopped)
-            verifier.passed = True
-
-        process = Process('cmd')
-        process._process = mock.Mock()
-        process._process.poll.return_value = None
-        process._process.wait.side_effect = subprocess.TimeoutExpired('', '')
-        process._kill_process = test_call_order
-
-        process.wait()
-
-        self.assertEqual(verifier.passed, True)
-
-    def test_wait_joins_listening_thread_if_it_exists(self):
-        """Tests wait() joins _listening_thread if it exists."""
-        process = Process('cmd')
-        process._process = mock.Mock()
-        mocked_thread = mock.Mock()
-        process._listening_thread = mocked_thread
-
-        process.wait(0)
-
-        self.assertEqual(mocked_thread.join.called, True)
-
-    def test_wait_clears_listening_thread_if_it_exists(self):
-        """Tests wait() joins _listening_thread if it exists.
-
-        Threads can only be started once, so after wait has been called, we
-        want to make sure we clear the listening thread.
-        """
-        process = Process('cmd')
-        process._process = mock.Mock()
-        process._listening_thread = mock.Mock()
-
-        process.wait(0)
-
-        self.assertEqual(process._listening_thread, None)
-
-    def test_wait_joins_redirection_thread_if_it_exists(self):
-        """Tests wait() joins _listening_thread if it exists."""
-        process = Process('cmd')
-        process._process = mock.Mock()
-        mocked_thread = mock.Mock()
-        process._redirection_thread = mocked_thread
-
-        process.wait(0)
-
-        self.assertEqual(mocked_thread.join.called, True)
-
-    def test_wait_clears_redirection_thread_if_it_exists(self):
-        """Tests wait() joins _listening_thread if it exists.
-
-        Threads can only be started once, so after wait has been called, we
-        want to make sure we clear the listening thread.
-        """
-        process = Process('cmd')
-        process._process = mock.Mock()
-        process._redirection_thread = mock.Mock()
-
-        process.wait(0)
-
-        self.assertEqual(process._redirection_thread, None)
-
-    # stop
-
-    def test_stop_sets_stopped_to_true(self):
-        """Tests that stop() sets the _stopped attribute to True."""
-        process = Process('cmd')
-        process._process = mock.Mock()
-
-        process.stop()
-
-        self.assertTrue(process._stopped)
-
-    def test_stop_sets_stopped_to_true_before_process_kill(self):
-        """Tests that stop() sets the _stopped attribute to True.
-
-        This order is required to prevent the _exec_loop from calling
-        _on_terminate_callback when the user has killed the process.
-        """
-        verifier = mock.Mock()
-        verifier.passed = False
-
-        def test_call_order():
-            self.assertTrue(process._stopped)
-            verifier.passed = True
-
-        process = Process('cmd')
-        process._process = mock.Mock()
-        process._process.poll.return_value = None
-        process._kill_process = test_call_order
-        process._process.wait.side_effect = subprocess.TimeoutExpired('', '')
-
-        process.stop()
-
-        self.assertEqual(verifier.passed, True)
-
-    def test_stop_calls_wait(self):
-        """Tests that stop() also has the functionality of wait()."""
-        process = Process('cmd')
-        process._process = mock.Mock()
-        process.wait = mock.Mock()
-
-        process.stop()
-
-        self.assertEqual(process.wait.called, True)
-
-    # _redirect_output
-
-    def test_redirect_output_feeds_all_lines_to_on_output_callback(self):
-        """Tests that _redirect_output loops until all lines are parsed."""
-        received_list = []
-
-        def appender(line):
-            received_list.append(line)
-
-        process = Process('cmd')
-        process.set_on_output_callback(appender)
-        process._process = mock.Mock()
-        process._process.stdout.readline.side_effect = [b'a\n', b'b\n', b'']
-
-        process._redirect_output()
-
-        self.assertEqual(received_list[0], 'a')
-        self.assertEqual(received_list[1], 'b')
-        self.assertEqual(len(received_list), 2)
-
-    # __start_process
-
-    def test_start_process_returns_a_popen_object(self):
-        """Tests that a Popen object is returned by __start_process."""
-        with self.patch('subprocess.Popen', return_value='verification'):
-            self.assertEqual(Process._Process__start_process('cmd'),
-                             'verification')
-
-    # _exec_loop
-
-    def test_exec_loop_redirections_output(self):
-        """Tests that the _exec_loop function calls to redirect the output."""
-        process = Process('cmd')
-        Process._Process__start_process = mock.Mock()
-
-        with self.patch('Thread', FakeThread):
-            process._exec_loop()
-
-        self.assertEqual(process._redirection_thread.target,
-                         process._redirect_output)
-        self.assertEqual(process._redirection_thread.alive, True)
-
-    def test_exec_loop_waits_for_process(self):
-        """Tests that the _exec_loop waits for the process to complete before
-        returning."""
-        process = Process('cmd')
-        Process._Process__start_process = mock.Mock()
-
-        with self.patch('Thread', FakeThread):
-            process._exec_loop()
-
-        self.assertEqual(process._process.wait.called, True)
-
-    def test_exec_loop_loops_if_not_stopped(self):
-        process = Process('1st')
-        Process._Process__start_process = mock.Mock()
-        process._on_terminate_callback = mock.Mock(side_effect=[['2nd'], None])
-
-        with self.patch('Thread', FakeThread):
-            process._exec_loop()
-
-        self.assertEqual(Process._Process__start_process.call_count, 2)
-        self.assertEqual(Process._Process__start_process.call_args_list[0][0],
-                         (['1st'],))
-        self.assertEqual(Process._Process__start_process.call_args_list[1][0],
-                         (['2nd'],))
-
-    def test_exec_loop_does_not_loop_if_stopped(self):
-        process = Process('1st')
-        Process._Process__start_process = mock.Mock()
-        process._on_terminate_callback = mock.Mock(
-            side_effect=['2nd', None])
-        process._stopped = True
-
-        with self.patch('Thread', FakeThread):
-            process._exec_loop()
-
-        self.assertEqual(Process._Process__start_process.call_count, 1)
-        self.assertEqual(
-            Process._Process__start_process.call_args_list[0][0],
-            (['1st'],))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/test_bindings/__init__.py b/src/antlion/unit_tests/libs/test_bindings/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/libs/test_bindings/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/libs/test_bindings/all_tests_decorator_test.py b/src/antlion/unit_tests/libs/test_bindings/all_tests_decorator_test.py
deleted file mode 100644
index 87fb3e5..0000000
--- a/src/antlion/unit_tests/libs/test_bindings/all_tests_decorator_test.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-from antlion.libs.test_binding.all_tests_decorator import for_all_tests
-
-
-class AllTestDecoratorTest(unittest.TestCase):
-
-    def test_add_to_all_tests(self):
-
-        def decorator(decorated):
-            def inner(*args, **kwargs):
-                return 3
-
-            return inner
-
-        @for_all_tests(decorator)
-        class TestTest(object):
-            def test_a_thing(self):
-                return 4
-
-            def not_a_test(self):
-                return 4
-
-        test = TestTest()
-        self.assertEqual(test.test_a_thing(), 3)
-        self.assertEqual(test.not_a_test(), 4)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/test_bindings/binding_test.py b/src/antlion/unit_tests/libs/test_bindings/binding_test.py
deleted file mode 100644
index e76969b..0000000
--- a/src/antlion/unit_tests/libs/test_bindings/binding_test.py
+++ /dev/null
@@ -1,191 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-import mock
-
-from antlion import signals
-
-from antlion.libs.test_binding.binding import Binding
-
-
-class BindingTest(unittest.TestCase):
-
-    def test_instance_binding(self):
-        instance = object()
-        binding = Binding(object)
-
-        instance_binding = binding.__get__(instance, None)
-
-        self.assertEqual(instance_binding.instance_args, [instance])
-
-    def test_call_inner(self):
-        self.inner_args = []
-        self.inner_kwargs = {}
-
-        def inner(*args, **kwargs):
-            self.inner_args = args
-            self.inner_kwargs = kwargs
-
-        binding = Binding(inner, instance_args=['test'])
-
-        try:
-            binding('arg', v=2)
-        except signals.TestSignal:
-            pass
-
-        self.assertEqual(self.inner_args, ('test', 'arg'))
-        self.assertEqual(self.inner_kwargs, {'v': 2})
-
-    def test_call_inner_pass_on_none(self):
-
-        def inner(*args, **kwargs):
-            pass
-
-        binding = Binding(inner)
-
-        try:
-            binding()
-        except signals.TestPass:
-            pass
-
-    def test_call_inner_pass_on_true(self):
-
-        def inner(*args, **kwargs):
-            return True
-
-        binding = Binding(inner, instance_args=['test'])
-
-        try:
-            binding()
-        except signals.TestPass:
-            pass
-
-    def test_call_inner_fail_on_false(self):
-
-        def inner(*_, **__):
-            return False
-
-        binding = Binding(inner, instance_args=['test'])
-
-        try:
-            binding()
-        except signals.TestFailure:
-            pass
-
-    def test_call_inner_pass_through_signal(self):
-
-        def inner(*_, **__):
-            raise signals.TestPass('DETAILS')
-
-        binding = Binding(inner, instance_args=['test'])
-
-        try:
-            binding()
-        except signals.TestPass as signal:
-            self.assertEqual(signal.details, 'DETAILS')
-
-    def test_arg_modifier(self):
-        self.inner_args = []
-        self.inner_kwargs = {}
-
-        def arg_modifier(_, *args, **kwargs):
-            new_args = list(args) + ['new arg']
-            new_kwargs = dict(kwargs, kw='value')
-
-            return new_args, new_kwargs
-
-        def inner(*args, **kwargs):
-            self.inner_args = args
-            self.inner_kwargs = kwargs
-
-        binding = Binding(inner, arg_modifier=arg_modifier)
-
-        try:
-            binding('arg', v=2)
-        except signals.TestSignal:
-            pass
-
-        self.assertEqual(self.inner_args, ('arg', 'new arg'))
-        self.assertEqual(self.inner_kwargs, {'v': 2, 'kw': 'value'})
-
-    def test_call_before(self):
-
-        self.has_called_before = False
-
-        def before(*_, **__):
-            self.has_called_before = True
-
-        def inner(*_, **__):
-            self.assertTrue(self.has_called_before)
-
-        binding = Binding(inner, before=before)
-
-        try:
-            binding()
-        except signals.TestSignal:
-            pass
-
-        self.assertTrue(self.has_called_before)
-
-    def test_call_after(self):
-
-        self.has_called_after = False
-
-        def after(*_, **__):
-            self.has_called_after = True
-
-        def inner(*_, **__):
-            self.assertFalse(self.has_called_after)
-
-        binding = Binding(inner, after=after)
-
-        try:
-            binding()
-        except signals.TestSignal:
-            pass
-
-        self.assertTrue(self.has_called_after)
-
-    def test_signal_modify(self):
-
-        def inner(*_, **__):
-            raise signals.TestPass('DETAILS')
-
-        def signal_modifier(_, signal, *__, **___):
-            raise signals.TestFailure(signal.details)
-
-        binding = Binding(inner, signal_modifier=signal_modifier)
-
-        try:
-            binding()
-        except signals.TestFailure as signal:
-            self.assertEqual(signal.details, 'DETAILS')
-
-    def test_inner_attr_proxy_test(self):
-        def some_func():
-            pass
-
-        inner = some_func
-        inner.x = 10
-
-        binding = Binding(inner)
-
-        self.assertEqual(binding.x, inner.x)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/uicd/__init__.py b/src/antlion/unit_tests/libs/uicd/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/unit_tests/libs/uicd/__init__.py
+++ /dev/null
diff --git a/src/antlion/unit_tests/libs/uicd/uicd_cli_test.py b/src/antlion/unit_tests/libs/uicd/uicd_cli_test.py
deleted file mode 100644
index b2ac4c3..0000000
--- a/src/antlion/unit_tests/libs/uicd/uicd_cli_test.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import mock
-import unittest
-
-from antlion.libs.uicd.uicd_cli import UicdCli
-from antlion.libs.uicd.uicd_cli import UicdError
-
-_MOCK_WALK = {'/dir1': [('/dir1', (), ('file1', 'file2'))],
-              '/dir2': [('/dir2', ('dir3',), ('file3',)),
-                        ('/dir2/dir3', (), ())],
-              '/dir3': [('/dir3', (), ('file1',))]}
-
-
-def _mock_walk(path, **_):
-    return _MOCK_WALK.get(path, [])
-
-
-class UicdCliTest(unittest.TestCase):
-    """Tests the antlion.libs.uicd.uicd_cli.UicdCli class."""
-
-    # _set_workflows
-
-    @mock.patch('os.walk', _mock_walk)
-    @mock.patch('os.makedirs')
-    @mock.patch('tempfile.mkdtemp')
-    @mock.patch('shutil.rmtree')
-    @mock.patch.object(UicdCli, '_setup_cli')
-    def test_set_workflows_sets_correct_file_path(self, *_):
-        """Tests that the workflow name is mapped correctly to its path."""
-        nc = UicdCli('', '/dir1')
-        self.assertIn('file1', nc._workflows,
-                      'Workflow file not added to dictionary.')
-        self.assertEqual(nc._workflows['file1'], '/dir1/file1',
-                         'Workflow name does not point to the correct path.')
-
-    @mock.patch('os.walk', _mock_walk)
-    @mock.patch('os.makedirs')
-    @mock.patch('tempfile.mkdtemp')
-    @mock.patch('shutil.rmtree')
-    @mock.patch.object(UicdCli, '_setup_cli')
-    def test_set_workflows_adds_workflows_from_directories(self, *_):
-        """Tests that providing a directory name adds all files from that
-        directory. Also tests that no directories are added to the dictionary.
-        """
-        nc = UicdCli('', ['/dir1', '/dir2'])
-        for file_name in ['file1', 'file2', 'file3']:
-            self.assertIn(file_name, nc._workflows,
-                          'Workflow file not added to dictionary.')
-        for dir_name in ['dir1', 'dir2', 'dir3']:
-            self.assertNotIn(dir_name, nc._workflows,
-                             'Directories should not be added to dictionary.')
-
-    @mock.patch('os.walk', _mock_walk)
-    @mock.patch('os.makedirs')
-    @mock.patch('tempfile.mkdtemp')
-    @mock.patch('shutil.rmtree')
-    @mock.patch.object(UicdCli, '_setup_cli')
-    def test_set_workflows_rejects_duplicate_workflow_names(self, *_):
-        """Tests that _set_workflows raises an exception if two or more
-        workflows of the same name are provided.
-        """
-        expected_msg = 'Uicd workflows may not share the same name.'
-        with self.assertRaisesRegex(UicdError, expected_msg):
-            nc = UicdCli('', ['/dir1', '/dir3'])
-
-    # run
-
-    @mock.patch('os.makedirs')
-    @mock.patch('tempfile.mkdtemp', return_value='/base')
-    @mock.patch('shutil.rmtree')
-    @mock.patch.object(UicdCli, '_setup_cli')
-    @mock.patch.object(UicdCli, '_set_workflows')
-    def test_run_generates_correct_uicd_cmds(self, *_):
-        """Tests that the correct cmds are generated upon calling run()."""
-        nc = UicdCli('', [])
-        nc._workflows = {'test': '/workflows/test'}
-        # No log path set
-        with mock.patch('antlion.libs.proc.job.run') as job_run:
-            nc.run('SERIAL', 'test')
-            expected_cmd = 'java -jar /base/uicd-commandline.jar ' \
-                           '-d SERIAL -i /workflows/test'
-            job_run.assert_called_with(expected_cmd.split(), timeout=120)
-        # Log path set
-        nc._log_path = '/logs'
-        with mock.patch('antlion.libs.proc.job.run') as job_run:
-            nc.run('SERIAL', 'test')
-            expected_cmd = 'java -jar /base/uicd-commandline.jar ' \
-                           '-d SERIAL -i /workflows/test -o /logs'
-            job_run.assert_called_with(expected_cmd.split(), timeout=120)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/libs/version_selector_test.py b/src/antlion/unit_tests/libs/version_selector_test.py
deleted file mode 100755
index 7abfbe7..0000000
--- a/src/antlion/unit_tests/libs/version_selector_test.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import os
-import sys
-
-# A temporary hack to prevent tests/libs/logging from being selected as the
-# python default logging module.
-sys.path[0] = os.path.join(sys.path[0], '../')
-import mock
-import shutil
-import tempfile
-import unittest
-
-from antlion import base_test
-from antlion.libs import version_selector
-
-from mobly.config_parser import TestRunConfig
-
-
-def versioning_decorator(min_sdk, max_sdk):
-    return version_selector.set_version(lambda ret, *_, **__: ret, min_sdk,
-                                        max_sdk)
-
-
-def versioning_decorator2(min_sdk, max_sdk):
-    return version_selector.set_version(lambda ret, *_, **__: ret, min_sdk,
-                                        max_sdk)
-
-
-def test_versioning(min_sdk, max_sdk):
-    return version_selector.set_version(lambda *_, **__: 1, min_sdk, max_sdk)
-
-
-@versioning_decorator(1, 10)
-def versioned_func(arg1, arg2):
-    return 'function 1', arg1, arg2
-
-
-@versioning_decorator(11, 11)
-def versioned_func(arg1, arg2):
-    return 'function 2', arg1, arg2
-
-
-@versioning_decorator(12, 20)
-def versioned_func(arg1, arg2):
-    return 'function 3', arg1, arg2
-
-
-@versioning_decorator(1, 20)
-def versioned_func_with_kwargs(_, asdf='jkl'):
-    return asdf
-
-
-def class_versioning_decorator(min_sdk, max_sdk):
-    return version_selector.set_version(lambda _, ret, *__, **___: ret,
-                                        min_sdk, max_sdk)
-
-
-class VersionedClass(object):
-    @classmethod
-    @class_versioning_decorator(1, 99999999)
-    def class_func(cls, arg1):
-        return cls, arg1
-
-    @staticmethod
-    @versioning_decorator(1, 99999999)
-    def static_func(arg1):
-        return arg1
-
-    @class_versioning_decorator(1, 99999999)
-    def instance_func(self, arg1):
-        return self, arg1
-
-
-class VersionedTestClass(base_test.BaseTestClass):
-    @mock.patch('mobly.utils.create_dir')
-    def __init__(self, configs, _):
-        super().__init__(configs)
-
-    @test_versioning(1, 1)
-    def test_1(self):
-        pass
-
-    @test_versioning(1, 1)
-    def test_2(self):
-        pass
-
-
-class VersionSelectorIntegrationTest(unittest.TestCase):
-    """Tests the antlion.libs.version_selector module."""
-
-    @classmethod
-    def setUpClass(cls):
-        cls.tmp_dir = tempfile.mkdtemp()
-
-    @classmethod
-    def tearDownClass(cls):
-        shutil.rmtree(cls.tmp_dir)
-
-    def test_raises_syntax_error_if_decorated_with_staticmethod_first(self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(1, 1)
-                @staticmethod
-                def test_1():
-                    pass
-        except SyntaxError:
-            pass
-        else:
-            self.fail('Placing the @staticmethod decorator after the '
-                      'versioning decorator should cause a SyntaxError.')
-
-    def test_raises_syntax_error_if_decorated_with_classmethod_first(self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(1, 1)
-                @classmethod
-                def test_1(cls):
-                    pass
-        except SyntaxError:
-            pass
-        else:
-            self.fail('Placing the @classmethod decorator after the '
-                      'versioning decorator should cause a SyntaxError.')
-
-    def test_overriding_an_undecorated_func_raises_a_syntax_error(self):
-        try:
-
-            class SomeClass(object):
-                def test_1(self):
-                    pass
-
-                @versioning_decorator(1, 1)
-                def test_1(self):
-                    pass
-        except SyntaxError:
-            pass
-        else:
-            self.fail('Overwriting a function that already exists without a '
-                      'versioning decorator should raise a SyntaxError.')
-
-    def test_func_decorated_with_2_different_versioning_decorators_causes_error(
-            self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(1, 1)
-                def test_1(self):
-                    pass
-
-                @versioning_decorator2(2, 2)
-                def test_1(self):
-                    pass
-        except SyntaxError:
-            pass
-        else:
-            self.fail('Using two different versioning decorators to version a '
-                      'single function should raise a SyntaxError.')
-
-    def test_func_decorated_with_overlapping_ranges_causes_value_error(self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(1, 2)
-                def test_1(self):
-                    pass
-
-                @versioning_decorator(2, 2)
-                def test_1(self):
-                    pass
-        except ValueError:
-            pass
-        else:
-            self.fail('Decorated functions with overlapping version ranges '
-                      'should raise a ValueError.')
-
-    def test_func_decorated_with_min_gt_max_causes_value_error(self):
-        try:
-
-            class SomeClass(object):
-                @versioning_decorator(2, 1)
-                def test_1(self):
-                    pass
-        except ValueError:
-            pass
-        else:
-            self.fail(
-                'If the min_version level is higher than the max_version '
-                'level, a ValueError should be raised.')
-
-    def test_calling_versioned_func_on_min_version_level_is_inclusive(self):
-        """Tests that calling some versioned function with the minimum version
-        level of the decorated function will call that function."""
-        ret = versioned_func(1, 'some_value')
-        self.assertEqual(
-            ret, ('function 1', 1, 'some_value'),
-            'Calling versioned_func(1, ...) did not return the '
-            'versioned function for the correct range.')
-
-    def test_calling_versioned_func_on_middle_level_works(self):
-        """Tests that calling some versioned function a version value within the
-        range of the decorated function will call that function."""
-        ret = versioned_func(16, 'some_value')
-        self.assertEqual(
-            ret, ('function 3', 16, 'some_value'),
-            'Calling versioned_func(16, ...) did not return the '
-            'versioned function for the correct range.')
-
-    def test_calling_versioned_func_on_max_version_level_is_inclusive(self):
-        """Tests that calling some versioned function with the maximum version
-        level of the decorated function will call that function."""
-        ret = versioned_func(10, 'some_value')
-        self.assertEqual(
-            ret, ('function 1', 10, 'some_value'),
-            'Calling versioned_func(10, ...) did not return the '
-            'versioned function for the correct range.')
-
-    def test_calling_versioned_func_on_min_equals_max_level_works(self):
-        """Tests that calling some versioned function with the maximum version
-        level of the decorated function will call that function."""
-        ret = versioned_func(11, 'some_value')
-        self.assertEqual(
-            ret, ('function 2', 11, 'some_value'),
-            'Calling versioned_func(10, ...) did not return the '
-            'versioned function for the correct range.')
-
-    def test_sending_kwargs_through_decorated_functions_works(self):
-        """Tests that calling some versioned function with the maximum version
-        level of the decorated function will call that function."""
-        ret = versioned_func_with_kwargs(1, asdf='some_value')
-        self.assertEqual(
-            ret, 'some_value',
-            'Calling versioned_func_with_kwargs(1, ...) did not'
-            'return the kwarg value properly.')
-
-    def test_kwargs_can_default_through_decorated_functions(self):
-        """Tests that calling some versioned function with the maximum version
-        level of the decorated function will call that function."""
-        ret = versioned_func_with_kwargs(1)
-        self.assertEqual(
-            ret, 'jkl', 'Calling versioned_func_with_kwargs(1) did not'
-            'return the default kwarg value properly.')
-
-    def test_staticmethod_can_be_called_properly(self):
-        """Tests that decorating a staticmethod will properly send the arguments
-        in the correct order.
-
-        i.e., we want to make sure self or cls do not get sent as the first
-        argument to the decorated staticmethod.
-        """
-        versioned_class = VersionedClass()
-        ret = versioned_class.static_func(123456)
-        self.assertEqual(
-            ret, 123456, 'The first argument was not set properly for calling '
-            'a staticmethod.')
-
-    def test_instance_method_can_be_called_properly(self):
-        """Tests that decorating a method will properly send the arguments
-        in the correct order.
-
-        i.e., we want to make sure self is the first argument returned.
-        """
-        versioned_class = VersionedClass()
-        ret = versioned_class.instance_func(123456)
-        self.assertEqual(
-            ret, (versioned_class, 123456),
-            'The arguments were not set properly for an instance '
-            'method.')
-
-    def test_classmethod_can_be_called_properly(self):
-        """Tests that decorating a classmethod will properly send the arguments
-        in the correct order.
-
-        i.e., we want to make sure cls is the first argument returned.
-        """
-        versioned_class = VersionedClass()
-        ret = versioned_class.class_func(123456)
-        self.assertEqual(
-            ret, (VersionedClass, 123456),
-            'The arguments were not set properly for a '
-            'classmethod.')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/test_runner_test.py b/src/antlion/unit_tests/test_runner_test.py
deleted file mode 100755
index 032b36a..0000000
--- a/src/antlion/unit_tests/test_runner_test.py
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import shutil
-import tempfile
-import unittest
-
-from mobly.config_parser import TestRunConfig
-from mock import Mock
-from mock import patch
-
-from antlion import test_runner
-
-
-class TestRunnerTest(unittest.TestCase):
-    def setUp(self):
-        self.tmp_dir = tempfile.mkdtemp()
-        self.base_mock_test_config = TestRunConfig()
-        self.base_mock_test_config.testbed_name = 'SampleTestBed'
-        self.base_mock_test_config.log_path = self.tmp_dir
-        self.base_mock_test_config.controller_configs = {'testpaths': ['./']}
-        self.base_mock_test_config.user_params = {
-            'icecream': 42,
-            'extra_param': 'haha'
-        }
-
-    def tearDown(self):
-        shutil.rmtree(self.tmp_dir)
-
-    @staticmethod
-    def create_test_classes(class_names):
-        return {class_name: Mock() for class_name in class_names}
-
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
-    def test_class_name_pattern_single(self, *_):
-        class_names = ['test_class_1', 'test_class_2']
-        pattern = 'test*1'
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [(pattern, None)])
-
-        test_classes = self.create_test_classes(class_names)
-        tr.import_test_modules = Mock(return_value=test_classes)
-        tr.run()
-        self.assertTrue(test_classes[class_names[0]].called)
-        self.assertFalse(test_classes[class_names[1]].called)
-
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
-    def test_class_name_pattern_multi(self, *_):
-        class_names = ['test_class_1', 'test_class_2', 'other_name']
-        pattern = 'test_class*'
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [(pattern, None)])
-
-        test_classes = self.create_test_classes(class_names)
-        tr.import_test_modules = Mock(return_value=test_classes)
-        tr.run()
-        self.assertTrue(test_classes[class_names[0]].called)
-        self.assertTrue(test_classes[class_names[1]].called)
-        self.assertFalse(test_classes[class_names[2]].called)
-
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
-    def test_class_name_pattern_question_mark(self, *_):
-        class_names = ['test_class_1', 'test_class_12']
-        pattern = 'test_class_?'
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [(pattern, None)])
-
-        test_classes = self.create_test_classes(class_names)
-        tr.import_test_modules = Mock(return_value=test_classes)
-        tr.run()
-        self.assertTrue(test_classes[class_names[0]].called)
-        self.assertFalse(test_classes[class_names[1]].called)
-
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
-    def test_class_name_pattern_char_seq(self, *_):
-        class_names = ['test_class_1', 'test_class_2', 'test_class_3']
-        pattern = 'test_class_[1357]'
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [(pattern, None)])
-
-        test_classes = self.create_test_classes(class_names)
-        tr.import_test_modules = Mock(return_value=test_classes)
-        tr.run()
-        self.assertTrue(test_classes[class_names[0]].called)
-        self.assertFalse(test_classes[class_names[1]].called)
-        self.assertTrue(test_classes[class_names[2]].called)
-
-    @patch('antlion.records.TestResult')
-    @patch.object(test_runner.TestRunner, 'dump_config')
-    @patch.object(test_runner.TestRunner, '_write_results_to_file')
-    @patch('antlion.test_runner.logger')
-    def test_class_logpath_contains_proper_directory(self, logger_mock, *_):
-        expected_timestamp = '1970-01-01_00-00-00-00-000000'
-        logger_mock.get_log_file_timestamp.return_value = expected_timestamp
-        tr = test_runner.TestRunner(self.base_mock_test_config,
-                                    [('MockTest', None)])
-        mock_class = Mock()
-        tr.import_test_modules = Mock(return_value={'MockTest': mock_class})
-        tr.run()
-
-        self.assertEqual(
-            mock_class.call_args_list[0][0][0].log_path,
-            os.path.join(self.tmp_dir, self.base_mock_test_config.testbed_name,
-                         expected_timestamp))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/src/antlion/unit_tests/test_suite.py b/src/antlion/unit_tests/test_suite.py
deleted file mode 100755
index 5d2d1ca..0000000
--- a/src/antlion/unit_tests/test_suite.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-import tempfile
-import unittest
-
-
-class TestResult(object):
-    """
-    Attributes:
-        failures_future: The list of failed test cases during this test.
-        output_file: The file containing the stderr/stdout for this test.
-        test_suite: The unittest.TestSuite used. Useful for debugging.
-        test_filename: The *_test.py file that ran in this test.
-    """
-    def __init__(self, test_result, output_file, test_suite, test_filename):
-        self.test_result = test_result
-        self.output_file = output_file
-        self.test_suite = test_suite
-        self.test_filename = test_filename
-
-
-def run_all_unit_tests():
-    suite = unittest.TestSuite()
-    test_files = []
-    loader = unittest.TestLoader()
-    for root, _, files in os.walk(os.path.dirname(__file__)):
-        for filename in files:
-            if filename.endswith('_test.py'):
-                test_files.append(os.path.join(root, filename))
-                try:
-                    suite.addTest(loader.discover(root, filename))
-                except ImportError as e:
-                    if 'Start directory is not importable' not in e.args[0]:
-                        raise
-                    message = '. Did you forget to add an __init__.py file?'
-                    raise ImportError(e.args[0] + message)
-
-    output_dir = tempfile.mkdtemp()
-
-    results = []
-
-    for index, test in enumerate(suite._tests):
-        output_file = os.path.join(output_dir, 'test_%s.output' % index)
-
-        test_result = subprocess.Popen([sys.executable, test_files[index]],
-                                       stdout=open(output_file, 'w+'),
-                                       stderr=subprocess.STDOUT)
-        results.append(
-            TestResult(test_result, output_file, test, test_files[index]))
-
-    all_failures = []
-    for index, result in enumerate(results):
-        try:
-            failures = result.test_result.wait(timeout=60)
-            if failures:
-                print('Failure logs for %s:' % result.test_filename,
-                      file=sys.stderr)
-                with open(result.output_file, 'r') as out_file:
-                    print(out_file.read(), file=sys.stderr)
-                all_failures.append(result.test_filename + ' (failed)')
-        except subprocess.TimeoutExpired:
-            all_failures.append(result.test_filename + ' (timed out)')
-            print('The following test timed out: %r' % result.test_filename,
-                  file=sys.stderr)
-            with open(result.output_file, 'r') as out_file:
-                print(out_file.read(), file=sys.stderr)
-
-    # Prints a summary over all unit tests failed.
-    if all_failures:
-        print('The following tests failed:', file=sys.stderr)
-        for failure in all_failures:
-            print('    ', failure, file=sys.stderr)
-
-    exit(bool(all_failures))
-
-
-if __name__ == '__main__':
-    run_all_unit_tests()
diff --git a/src/antlion/utils.py b/src/antlion/utils.py
deleted file mode 100755
index 5782c84..0000000
--- a/src/antlion/utils.py
+++ /dev/null
@@ -1,1928 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import base64
-import concurrent.futures
-import copy
-import datetime
-import functools
-import ipaddress
-import json
-import logging
-import os
-import platform
-import random
-import re
-import signal
-import string
-import socket
-import subprocess
-import time
-import threading
-import traceback
-import zipfile
-from concurrent.futures import ThreadPoolExecutor
-
-from antlion import signals
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.libs.proc import job
-
-# File name length is limited to 255 chars on some OS, so we need to make sure
-# the file names we output fits within the limit.
-MAX_FILENAME_LEN = 255
-
-# All Fuchsia devices use this suffix for link-local mDNS host names.
-FUCHSIA_MDNS_TYPE = '_fuchsia._udp.local.'
-
-# Default max seconds it takes to Duplicate Address Detection to finish before
-# assigning an IPv6 address.
-DAD_TIMEOUT_SEC = 30
-
-
-class ActsUtilsError(Exception):
-    """Generic error raised for exceptions in ACTS utils."""
-
-
-class NexusModelNames:
-    # TODO(angli): This will be fixed later by angli.
-    ONE = 'sprout'
-    N5 = 'hammerhead'
-    N5v2 = 'bullhead'
-    N6 = 'shamu'
-    N6v2 = 'angler'
-    N6v3 = 'marlin'
-    N5v3 = 'sailfish'
-
-
-class DozeModeStatus:
-    ACTIVE = "ACTIVE"
-    IDLE = "IDLE"
-
-
-ascii_letters_and_digits = string.ascii_letters + string.digits
-valid_filename_chars = "-_." + ascii_letters_and_digits
-
-models = ("sprout", "occam", "hammerhead", "bullhead", "razor", "razorg",
-          "shamu", "angler", "volantis", "volantisg", "mantaray", "fugu",
-          "ryu", "marlin", "sailfish")
-
-manufacture_name_to_model = {
-    "flo": "razor",
-    "flo_lte": "razorg",
-    "flounder": "volantis",
-    "flounder_lte": "volantisg",
-    "dragon": "ryu"
-}
-
-GMT_to_olson = {
-    "GMT-9": "America/Anchorage",
-    "GMT-8": "US/Pacific",
-    "GMT-7": "US/Mountain",
-    "GMT-6": "US/Central",
-    "GMT-5": "US/Eastern",
-    "GMT-4": "America/Barbados",
-    "GMT-3": "America/Buenos_Aires",
-    "GMT-2": "Atlantic/South_Georgia",
-    "GMT-1": "Atlantic/Azores",
-    "GMT+0": "Africa/Casablanca",
-    "GMT+1": "Europe/Amsterdam",
-    "GMT+2": "Europe/Athens",
-    "GMT+3": "Europe/Moscow",
-    "GMT+4": "Asia/Baku",
-    "GMT+5": "Asia/Oral",
-    "GMT+6": "Asia/Almaty",
-    "GMT+7": "Asia/Bangkok",
-    "GMT+8": "Asia/Hong_Kong",
-    "GMT+9": "Asia/Tokyo",
-    "GMT+10": "Pacific/Guam",
-    "GMT+11": "Pacific/Noumea",
-    "GMT+12": "Pacific/Fiji",
-    "GMT+13": "Pacific/Tongatapu",
-    "GMT-11": "Pacific/Midway",
-    "GMT-10": "Pacific/Honolulu"
-}
-
-
-def abs_path(path):
-    """Resolve the '.' and '~' in a path to get the absolute path.
-
-    Args:
-        path: The path to expand.
-
-    Returns:
-        The absolute path of the input path.
-    """
-    return os.path.abspath(os.path.expanduser(path))
-
-
-def get_current_epoch_time():
-    """Current epoch time in milliseconds.
-
-    Returns:
-        An integer representing the current epoch time in milliseconds.
-    """
-    return int(round(time.time() * 1000))
-
-
-def get_current_human_time():
-    """Returns the current time in human readable format.
-
-    Returns:
-        The current time stamp in Month-Day-Year Hour:Min:Sec format.
-    """
-    return time.strftime("%m-%d-%Y %H:%M:%S ")
-
-
-def epoch_to_human_time(epoch_time):
-    """Converts an epoch timestamp to human readable time.
-
-    This essentially converts an output of get_current_epoch_time to an output
-    of get_current_human_time
-
-    Args:
-        epoch_time: An integer representing an epoch timestamp in milliseconds.
-
-    Returns:
-        A time string representing the input time.
-        None if input param is invalid.
-    """
-    if isinstance(epoch_time, int):
-        try:
-            d = datetime.datetime.fromtimestamp(epoch_time / 1000)
-            return d.strftime("%m-%d-%Y %H:%M:%S ")
-        except ValueError:
-            return None
-
-
-def get_timezone_olson_id():
-    """Return the Olson ID of the local (non-DST) timezone.
-
-    Returns:
-        A string representing one of the Olson IDs of the local (non-DST)
-        timezone.
-    """
-    tzoffset = int(time.timezone / 3600)
-    gmt = None
-    if tzoffset <= 0:
-        gmt = "GMT+{}".format(-tzoffset)
-    else:
-        gmt = "GMT-{}".format(tzoffset)
-    return GMT_to_olson[gmt]
-
-
-def get_next_device(test_bed_controllers, used_devices):
-    """Gets the next device in a list of testbed controllers
-
-    Args:
-        test_bed_controllers: A list of testbed controllers of a particular
-            type, for example a list ACTS Android devices.
-        used_devices: A list of devices that have been used.  This can be a
-            mix of devices, for example a fuchsia device and an Android device.
-    Returns:
-        The next device in the test_bed_controllers list or None if there are
-        no items that are not in the used devices list.
-    """
-    if test_bed_controllers:
-        device_list = test_bed_controllers
-    else:
-        raise ValueError('test_bed_controllers is empty.')
-    for used_device in used_devices:
-        if used_device in device_list:
-            device_list.remove(used_device)
-    if device_list:
-        return device_list[0]
-    else:
-        return None
-
-
-def find_files(paths, file_predicate):
-    """Locate files whose names and extensions match the given predicate in
-    the specified directories.
-
-    Args:
-        paths: A list of directory paths where to find the files.
-        file_predicate: A function that returns True if the file name and
-          extension are desired.
-
-    Returns:
-        A list of files that match the predicate.
-    """
-    file_list = []
-    if not isinstance(paths, list):
-        paths = [paths]
-    for path in paths:
-        p = abs_path(path)
-        for dirPath, subdirList, fileList in os.walk(p):
-            for fname in fileList:
-                name, ext = os.path.splitext(fname)
-                if file_predicate(name, ext):
-                    file_list.append((dirPath, name, ext))
-    return file_list
-
-
-def load_config(file_full_path, log_errors=True):
-    """Loads a JSON config file.
-
-    Returns:
-        A JSON object.
-    """
-    with open(file_full_path, 'r') as f:
-        try:
-            return json.load(f)
-        except Exception as e:
-            if log_errors:
-                logging.error("Exception error to load %s: %s", f, e)
-            raise
-
-
-def load_file_to_base64_str(f_path):
-    """Loads the content of a file into a base64 string.
-
-    Args:
-        f_path: full path to the file including the file name.
-
-    Returns:
-        A base64 string representing the content of the file in utf-8 encoding.
-    """
-    path = abs_path(f_path)
-    with open(path, 'rb') as f:
-        f_bytes = f.read()
-        base64_str = base64.b64encode(f_bytes).decode("utf-8")
-        return base64_str
-
-
-def dump_string_to_file(content, file_path, mode='w'):
-    """ Dump content of a string to
-
-    Args:
-        content: content to be dumped to file
-        file_path: full path to the file including the file name.
-        mode: file open mode, 'w' (truncating file) by default
-    :return:
-    """
-    full_path = abs_path(file_path)
-    with open(full_path, mode) as f:
-        f.write(content)
-
-
-def list_of_dict_to_dict_of_dict(list_of_dicts, dict_key):
-    """Transforms a list of dicts to a dict of dicts.
-
-    For instance:
-    >>> list_of_dict_to_dict_of_dict([{'a': '1', 'b':'2'},
-    >>>                               {'a': '3', 'b':'4'}],
-    >>>                              'b')
-
-    returns:
-
-    >>> {'2': {'a': '1', 'b':'2'},
-    >>>  '4': {'a': '3', 'b':'4'}}
-
-    Args:
-        list_of_dicts: A list of dictionaries.
-        dict_key: The key in the inner dict to be used as the key for the
-                  outer dict.
-    Returns:
-        A dict of dicts.
-    """
-    return {d[dict_key]: d for d in list_of_dicts}
-
-
-def dict_purge_key_if_value_is_none(dictionary):
-    """Removes all pairs with value None from dictionary."""
-    for k, v in dict(dictionary).items():
-        if v is None:
-            del dictionary[k]
-    return dictionary
-
-
-def find_field(item_list, cond, comparator, target_field):
-    """Finds the value of a field in a dict object that satisfies certain
-    conditions.
-
-    Args:
-        item_list: A list of dict objects.
-        cond: A param that defines the condition.
-        comparator: A function that checks if an dict satisfies the condition.
-        target_field: Name of the field whose value to be returned if an item
-            satisfies the condition.
-
-    Returns:
-        Target value or None if no item satisfies the condition.
-    """
-    for item in item_list:
-        if comparator(item, cond) and target_field in item:
-            return item[target_field]
-    return None
-
-
-def rand_ascii_str(length):
-    """Generates a random string of specified length, composed of ascii letters
-    and digits.
-
-    Args:
-        length: The number of characters in the string.
-
-    Returns:
-        The random string generated.
-    """
-    letters = [random.choice(ascii_letters_and_digits) for i in range(length)]
-    return ''.join(letters)
-
-
-def rand_hex_str(length):
-    """Generates a random string of specified length, composed of hex digits
-
-    Args:
-        length: The number of characters in the string.
-
-    Returns:
-        The random string generated.
-    """
-    letters = [random.choice(string.hexdigits) for i in range(length)]
-    return ''.join(letters)
-
-
-# Thead/Process related functions.
-def concurrent_exec(func, param_list):
-    """Executes a function with different parameters pseudo-concurrently.
-
-    This is basically a map function. Each element (should be an iterable) in
-    the param_list is unpacked and passed into the function. Due to Python's
-    GIL, there's no true concurrency. This is suited for IO-bound tasks.
-
-    Args:
-        func: The function that parforms a task.
-        param_list: A list of iterables, each being a set of params to be
-            passed into the function.
-
-    Returns:
-        A list of return values from each function execution. If an execution
-        caused an exception, the exception object will be the corresponding
-        result.
-    """
-    with concurrent.futures.ThreadPoolExecutor(max_workers=30) as executor:
-        # Start the load operations and mark each future with its params
-        future_to_params = {executor.submit(func, *p): p for p in param_list}
-        return_vals = []
-        for future in concurrent.futures.as_completed(future_to_params):
-            params = future_to_params[future]
-            try:
-                return_vals.append(future.result())
-            except Exception as exc:
-                print("{} generated an exception: {}".format(
-                    params, traceback.format_exc()))
-                return_vals.append(exc)
-        return return_vals
-
-
-def exe_cmd(*cmds):
-    """Executes commands in a new shell.
-
-    Args:
-        cmds: A sequence of commands and arguments.
-
-    Returns:
-        The output of the command run.
-
-    Raises:
-        OSError is raised if an error occurred during the command execution.
-    """
-    cmd = ' '.join(cmds)
-    proc = subprocess.Popen(cmd,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            shell=True)
-    (out, err) = proc.communicate()
-    if not err:
-        return out
-    raise OSError(err)
-
-
-def require_sl4a(android_devices):
-    """Makes sure sl4a connection is established on the given AndroidDevice
-    objects.
-
-    Args:
-        android_devices: A list of AndroidDevice objects.
-
-    Raises:
-        AssertionError is raised if any given android device does not have SL4A
-        connection established.
-    """
-    for ad in android_devices:
-        msg = "SL4A connection not established properly on %s." % ad.serial
-        assert ad.droid, msg
-
-
-def _assert_subprocess_running(proc):
-    """Checks if a subprocess has terminated on its own.
-
-    Args:
-        proc: A subprocess returned by subprocess.Popen.
-
-    Raises:
-        ActsUtilsError is raised if the subprocess has stopped.
-    """
-    ret = proc.poll()
-    if ret is not None:
-        out, err = proc.communicate()
-        raise ActsUtilsError("Process %d has terminated. ret: %d, stderr: %s,"
-                             " stdout: %s" % (proc.pid, ret, err, out))
-
-
-def start_standing_subprocess(cmd, check_health_delay=0, shell=True):
-    """Starts a long-running subprocess.
-
-    This is not a blocking call and the subprocess started by it should be
-    explicitly terminated with stop_standing_subprocess.
-
-    For short-running commands, you should use exe_cmd, which blocks.
-
-    You can specify a health check after the subprocess is started to make sure
-    it did not stop prematurely.
-
-    Args:
-        cmd: string, the command to start the subprocess with.
-        check_health_delay: float, the number of seconds to wait after the
-                            subprocess starts to check its health. Default is 0,
-                            which means no check.
-
-    Returns:
-        The subprocess that got started.
-    """
-    proc = subprocess.Popen(cmd,
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            shell=shell,
-                            preexec_fn=os.setpgrp)
-    logging.debug("Start standing subprocess with cmd: %s", cmd)
-    if check_health_delay > 0:
-        time.sleep(check_health_delay)
-        _assert_subprocess_running(proc)
-    return proc
-
-
-def stop_standing_subprocess(proc, kill_signal=signal.SIGTERM):
-    """Stops a subprocess started by start_standing_subprocess.
-
-    Before killing the process, we check if the process is running, if it has
-    terminated, ActsUtilsError is raised.
-
-    Catches and ignores the PermissionError which only happens on Macs.
-
-    Args:
-        proc: Subprocess to terminate.
-    """
-    pid = proc.pid
-    logging.debug("Stop standing subprocess %d", pid)
-    _assert_subprocess_running(proc)
-    try:
-        os.killpg(pid, kill_signal)
-    except PermissionError:
-        pass
-
-
-def wait_for_standing_subprocess(proc, timeout=None):
-    """Waits for a subprocess started by start_standing_subprocess to finish
-    or times out.
-
-    Propagates the exception raised by the subprocess.wait(.) function.
-    The subprocess.TimeoutExpired exception is raised if the process timed-out
-    rather then terminating.
-
-    If no exception is raised: the subprocess terminated on its own. No need
-    to call stop_standing_subprocess() to kill it.
-
-    If an exception is raised: the subprocess is still alive - it did not
-    terminate. Either call stop_standing_subprocess() to kill it, or call
-    wait_for_standing_subprocess() to keep waiting for it to terminate on its
-    own.
-
-    Args:
-        p: Subprocess to wait for.
-        timeout: An integer number of seconds to wait before timing out.
-    """
-    proc.wait(timeout)
-
-
-def sync_device_time(ad):
-    """Sync the time of an android device with the current system time.
-
-    Both epoch time and the timezone will be synced.
-
-    Args:
-        ad: The android device to sync time on.
-    """
-    ad.adb.shell("settings put global auto_time 0", ignore_status=True)
-    ad.adb.shell("settings put global auto_time_zone 0", ignore_status=True)
-    droid = ad.droid
-    droid.setTimeZone(get_timezone_olson_id())
-    droid.setTime(get_current_epoch_time())
-
-
-# Timeout decorator block
-class TimeoutError(Exception):
-    """Exception for timeout decorator related errors.
-    """
-
-
-def _timeout_handler(signum, frame):
-    """Handler function used by signal to terminate a timed out function.
-    """
-    raise TimeoutError()
-
-
-def timeout(sec):
-    """A decorator used to add time out check to a function.
-
-    This only works in main thread due to its dependency on signal module.
-    Do NOT use it if the decorated funtion does not run in the Main thread.
-
-    Args:
-        sec: Number of seconds to wait before the function times out.
-            No timeout if set to 0
-
-    Returns:
-        What the decorated function returns.
-
-    Raises:
-        TimeoutError is raised when time out happens.
-    """
-
-    def decorator(func):
-
-        @functools.wraps(func)
-        def wrapper(*args, **kwargs):
-            if sec:
-                signal.signal(signal.SIGALRM, _timeout_handler)
-                signal.alarm(sec)
-            try:
-                return func(*args, **kwargs)
-            except TimeoutError:
-                raise TimeoutError(("Function {} timed out after {} "
-                                    "seconds.").format(func.__name__, sec))
-            finally:
-                signal.alarm(0)
-
-        return wrapper
-
-    return decorator
-
-
-def trim_model_name(model):
-    """Trim any prefix and postfix and return the android designation of the
-    model name.
-
-    e.g. "m_shamu" will be trimmed to "shamu".
-
-    Args:
-        model: model name to be trimmed.
-
-    Returns
-        Trimmed model name if one of the known model names is found.
-        None otherwise.
-    """
-    # Directly look up first.
-    if model in models:
-        return model
-    if model in manufacture_name_to_model:
-        return manufacture_name_to_model[model]
-    # If not found, try trimming off prefix/postfix and look up again.
-    tokens = re.split("_|-", model)
-    for t in tokens:
-        if t in models:
-            return t
-        if t in manufacture_name_to_model:
-            return manufacture_name_to_model[t]
-    return None
-
-
-def force_airplane_mode(ad, new_state, timeout_value=60):
-    """Force the device to set airplane mode on or off by adb shell command.
-
-    Args:
-        ad: android device object.
-        new_state: Turn on airplane mode if True.
-            Turn off airplane mode if False.
-        timeout_value: max wait time for 'adb wait-for-device'
-
-    Returns:
-        True if success.
-        False if timeout.
-    """
-
-    # Using timeout decorator.
-    # Wait for device with timeout. If after <timeout_value> seconds, adb
-    # is still waiting for device, throw TimeoutError exception.
-    @timeout(timeout_value)
-    def wait_for_device_with_timeout(ad):
-        ad.adb.wait_for_device()
-
-    try:
-        wait_for_device_with_timeout(ad)
-        ad.adb.shell("settings put global airplane_mode_on {}".format(
-            1 if new_state else 0))
-        ad.adb.shell("am broadcast -a android.intent.action.AIRPLANE_MODE")
-    except TimeoutError:
-        # adb wait for device timeout
-        return False
-    return True
-
-
-def get_battery_level(ad):
-    """Gets battery level from device
-
-    Returns:
-        battery_level: int indicating battery level
-    """
-    output = ad.adb.shell("dumpsys battery")
-    match = re.search(r"level: (?P<battery_level>\S+)", output)
-    battery_level = int(match.group("battery_level"))
-    return battery_level
-
-
-def get_device_usb_charging_status(ad):
-    """ Returns the usb charging status of the device.
-
-    Args:
-        ad: android device object
-
-    Returns:
-        True if charging
-        False if not charging
-     """
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get charging")
-    ad.log.info("Device Charging State: {}".format(adb_shell_result))
-    return adb_shell_result == 'true'
-
-
-def disable_usb_charging(ad):
-    """ Unplug device from usb charging.
-
-    Args:
-        ad: android device object
-
-    Returns:
-        True if device is unplugged
-        False otherwise
-    """
-    ad.adb.shell("dumpsys battery unplug")
-    if not get_device_usb_charging_status(ad):
-        return True
-    else:
-        ad.log.info("Could not disable USB charging")
-        return False
-
-
-def enable_usb_charging(ad):
-    """ Plug device to usb charging.
-
-    Args:
-        ad: android device object
-
-    Returns:
-        True if device is Plugged
-        False otherwise
-    """
-    ad.adb.shell("dumpsys battery reset")
-    if get_device_usb_charging_status(ad):
-        return True
-    else:
-        ad.log.info("Could not enable USB charging")
-        return False
-
-
-def enable_doze(ad):
-    """Force the device into doze mode.
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if device is in doze mode.
-        False otherwise.
-    """
-    ad.adb.shell("dumpsys battery unplug")
-    ad.adb.shell("dumpsys deviceidle enable")
-    ad.adb.shell("dumpsys deviceidle force-idle")
-    ad.droid.goToSleepNow()
-    time.sleep(5)
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get deep")
-    if not adb_shell_result.startswith(DozeModeStatus.IDLE):
-        info = ("dumpsys deviceidle get deep: {}".format(adb_shell_result))
-        print(info)
-        return False
-    return True
-
-
-def disable_doze(ad):
-    """Force the device not in doze mode.
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if device is not in doze mode.
-        False otherwise.
-    """
-    ad.adb.shell("dumpsys deviceidle disable")
-    ad.adb.shell("dumpsys battery reset")
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get deep")
-    if not adb_shell_result.startswith(DozeModeStatus.ACTIVE):
-        info = ("dumpsys deviceidle get deep: {}".format(adb_shell_result))
-        print(info)
-        return False
-    return True
-
-
-def enable_doze_light(ad):
-    """Force the device into doze light mode.
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if device is in doze light mode.
-        False otherwise.
-    """
-    ad.adb.shell("dumpsys battery unplug")
-    ad.droid.goToSleepNow()
-    time.sleep(5)
-    ad.adb.shell("cmd deviceidle enable light")
-    ad.adb.shell("cmd deviceidle step light")
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get light")
-    if not adb_shell_result.startswith(DozeModeStatus.IDLE):
-        info = ("dumpsys deviceidle get light: {}".format(adb_shell_result))
-        print(info)
-        return False
-    return True
-
-
-def disable_doze_light(ad):
-    """Force the device not in doze light mode.
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if device is not in doze light mode.
-        False otherwise.
-    """
-    ad.adb.shell("dumpsys battery reset")
-    ad.adb.shell("cmd deviceidle disable light")
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get light")
-    if not adb_shell_result.startswith(DozeModeStatus.ACTIVE):
-        info = ("dumpsys deviceidle get light: {}".format(adb_shell_result))
-        print(info)
-        return False
-    return True
-
-
-def set_ambient_display(ad, new_state):
-    """Set "Ambient Display" in Settings->Display
-
-    Args:
-        ad: android device object.
-        new_state: new state for "Ambient Display". True or False.
-    """
-    ad.adb.shell(
-        "settings put secure doze_enabled {}".format(1 if new_state else 0))
-
-
-def set_adaptive_brightness(ad, new_state):
-    """Set "Adaptive Brightness" in Settings->Display
-
-    Args:
-        ad: android device object.
-        new_state: new state for "Adaptive Brightness". True or False.
-    """
-    ad.adb.shell("settings put system screen_brightness_mode {}".format(
-        1 if new_state else 0))
-
-
-def set_auto_rotate(ad, new_state):
-    """Set "Auto-rotate" in QuickSetting
-
-    Args:
-        ad: android device object.
-        new_state: new state for "Auto-rotate". True or False.
-    """
-    ad.adb.shell("settings put system accelerometer_rotation {}".format(
-        1 if new_state else 0))
-
-
-def set_location_service(ad, new_state):
-    """Set Location service on/off in Settings->Location
-
-    Args:
-        ad: android device object.
-        new_state: new state for "Location service".
-            If new_state is False, turn off location service.
-            If new_state if True, set location service to "High accuracy".
-    """
-    ad.adb.shell("content insert --uri "
-                 " content://com.google.settings/partner --bind "
-                 "name:s:network_location_opt_in --bind value:s:1")
-    ad.adb.shell("content insert --uri "
-                 " content://com.google.settings/partner --bind "
-                 "name:s:use_location_for_services --bind value:s:1")
-    if new_state:
-        ad.adb.shell("settings put secure location_mode 3")
-    else:
-        ad.adb.shell("settings put secure location_mode 0")
-
-
-def set_mobile_data_always_on(ad, new_state):
-    """Set Mobile_Data_Always_On feature bit
-
-    Args:
-        ad: android device object.
-        new_state: new state for "mobile_data_always_on"
-            if new_state is False, set mobile_data_always_on disabled.
-            if new_state if True, set mobile_data_always_on enabled.
-    """
-    ad.adb.shell("settings put global mobile_data_always_on {}".format(
-        1 if new_state else 0))
-
-
-def bypass_setup_wizard(ad):
-    """Bypass the setup wizard on an input Android device
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if Android device successfully bypassed the setup wizard.
-        False if failed.
-    """
-    try:
-        ad.adb.shell("am start -n \"com.google.android.setupwizard/"
-                     ".SetupWizardExitActivity\"")
-        logging.debug("No error during default bypass call.")
-    except AdbError as adb_error:
-        if adb_error.stdout == "ADB_CMD_OUTPUT:0":
-            if adb_error.stderr and \
-                    not adb_error.stderr.startswith("Error type 3\n"):
-                logging.error("ADB_CMD_OUTPUT:0, but error is %s " %
-                              adb_error.stderr)
-                raise adb_error
-            logging.debug("Bypass wizard call received harmless error 3: "
-                          "No setup to bypass.")
-        elif adb_error.stdout == "ADB_CMD_OUTPUT:255":
-            # Run it again as root.
-            ad.adb.root_adb()
-            logging.debug("Need root access to bypass setup wizard.")
-            try:
-                ad.adb.shell("am start -n \"com.google.android.setupwizard/"
-                             ".SetupWizardExitActivity\"")
-                logging.debug("No error during rooted bypass call.")
-            except AdbError as adb_error:
-                if adb_error.stdout == "ADB_CMD_OUTPUT:0":
-                    if adb_error.stderr and \
-                            not adb_error.stderr.startswith("Error type 3\n"):
-                        logging.error("Rooted ADB_CMD_OUTPUT:0, but error is "
-                                      "%s " % adb_error.stderr)
-                        raise adb_error
-                    logging.debug(
-                        "Rooted bypass wizard call received harmless "
-                        "error 3: No setup to bypass.")
-
-    # magical sleep to wait for the gservices override broadcast to complete
-    time.sleep(3)
-
-    provisioned_state = int(
-        ad.adb.shell("settings get global device_provisioned"))
-    if provisioned_state != 1:
-        logging.error("Failed to bypass setup wizard.")
-        return False
-    logging.debug("Setup wizard successfully bypassed.")
-    return True
-
-
-def parse_ping_ouput(ad, count, out, loss_tolerance=20):
-    """Ping Parsing util.
-
-    Args:
-        ad: Android Device Object.
-        count: Number of ICMP packets sent
-        out: shell output text of ping operation
-        loss_tolerance: Threshold after which flag test as false
-    Returns:
-        False: if packet loss is more than loss_tolerance%
-        True: if all good
-    """
-    result = re.search(
-        r"(\d+) packets transmitted, (\d+) received, (\d+)% packet loss", out)
-    if not result:
-        ad.log.info("Ping failed with %s", out)
-        return False
-
-    packet_loss = int(result.group(3))
-    packet_xmit = int(result.group(1))
-    packet_rcvd = int(result.group(2))
-    min_packet_xmit_rcvd = (100 - loss_tolerance) * 0.01
-    if (packet_loss > loss_tolerance
-            or packet_xmit < count * min_packet_xmit_rcvd
-            or packet_rcvd < count * min_packet_xmit_rcvd):
-        ad.log.error("%s, ping failed with loss more than tolerance %s%%",
-                     result.group(0), loss_tolerance)
-        return False
-    ad.log.info("Ping succeed with %s", result.group(0))
-    return True
-
-
-def adb_shell_ping(ad,
-                   count=120,
-                   dest_ip="www.google.com",
-                   timeout=200,
-                   loss_tolerance=20):
-    """Ping utility using adb shell.
-
-    Args:
-        ad: Android Device Object.
-        count: Number of ICMP packets to send
-        dest_ip: hostname or IP address
-                 default www.google.com
-        timeout: timeout for icmp pings to complete.
-    """
-    ping_cmd = "ping -W 1"
-    if count:
-        ping_cmd += " -c %d" % count
-    if dest_ip:
-        ping_cmd += " %s" % dest_ip
-    try:
-        ad.log.info("Starting ping test to %s using adb command %s", dest_ip,
-                    ping_cmd)
-        out = ad.adb.shell(ping_cmd, timeout=timeout, ignore_status=True)
-        if not parse_ping_ouput(ad, count, out, loss_tolerance):
-            return False
-        return True
-    except Exception as e:
-        ad.log.warning("Ping Test to %s failed with exception %s", dest_ip, e)
-        return False
-
-
-def zip_directory(zip_name, src_dir):
-    """Compress a directory to a .zip file.
-
-    This implementation is thread-safe.
-
-    Args:
-        zip_name: str, name of the generated archive
-        src_dir: str, path to the source directory
-    """
-    with zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED) as zip:
-        for root, dirs, files in os.walk(src_dir):
-            for file in files:
-                path = os.path.join(root, file)
-                zip.write(path, os.path.relpath(path, src_dir))
-
-
-def unzip_maintain_permissions(zip_path, extract_location):
-    """Unzip a .zip file while maintaining permissions.
-
-    Args:
-        zip_path: The path to the zipped file.
-        extract_location: the directory to extract to.
-    """
-    with zipfile.ZipFile(zip_path, 'r') as zip_file:
-        for info in zip_file.infolist():
-            _extract_file(zip_file, info, extract_location)
-
-
-def _extract_file(zip_file, zip_info, extract_location):
-    """Extracts a single entry from a ZipFile while maintaining permissions.
-
-    Args:
-        zip_file: A zipfile.ZipFile.
-        zip_info: A ZipInfo object from zip_file.
-        extract_location: The directory to extract to.
-    """
-    out_path = zip_file.extract(zip_info.filename, path=extract_location)
-    perm = zip_info.external_attr >> 16
-    os.chmod(out_path, perm)
-
-
-def get_directory_size(path):
-    """Computes the total size of the files in a directory, including subdirectories.
-
-    Args:
-        path: The path of the directory.
-    Returns:
-        The size of the provided directory.
-    """
-    total = 0
-    for dirpath, dirnames, filenames in os.walk(path):
-        for filename in filenames:
-            total += os.path.getsize(os.path.join(dirpath, filename))
-    return total
-
-
-def get_command_uptime(command_regex):
-    """Returns the uptime for a given command.
-
-    Args:
-        command_regex: A regex that matches the command line given. Must be
-            pgrep compatible.
-    """
-    pid = job.run('pgrep -f %s' % command_regex).stdout
-    runtime = ''
-    if pid:
-        runtime = job.run('ps -o etime= -p "%s"' % pid).stdout
-    return runtime
-
-
-def get_process_uptime(process):
-    """Returns the runtime in [[dd-]hh:]mm:ss, or '' if not running."""
-    pid = job.run('pidof %s' % process, ignore_status=True).stdout
-    runtime = ''
-    if pid:
-        runtime = job.run('ps -o etime= -p "%s"' % pid).stdout
-    return runtime
-
-
-def get_device_process_uptime(adb, process):
-    """Returns the uptime of a device process."""
-    pid = adb.shell('pidof %s' % process, ignore_status=True)
-    runtime = ''
-    if pid:
-        runtime = adb.shell('ps -o etime= -p "%s"' % pid)
-    return runtime
-
-
-def wait_until(func, timeout_s, condition=True, sleep_s=1.0):
-    """Executes a function repeatedly until condition is met.
-
-    Args:
-      func: The function pointer to execute.
-      timeout_s: Amount of time (in seconds) to wait before raising an
-                 exception.
-      condition: The ending condition of the WaitUntil loop.
-      sleep_s: The amount of time (in seconds) to sleep between each function
-               execution.
-
-    Returns:
-      The time in seconds before detecting a successful condition.
-
-    Raises:
-      TimeoutError: If the condition was never met and timeout is hit.
-    """
-    start_time = time.time()
-    end_time = start_time + timeout_s
-    count = 0
-    while True:
-        count += 1
-        if func() == condition:
-            return time.time() - start_time
-        if time.time() > end_time:
-            break
-        time.sleep(sleep_s)
-    raise TimeoutError('Failed to complete function %s in %d seconds having '
-                       'attempted %d times.' % (str(func), timeout_s, count))
-
-
-# Adapted from
-# https://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Levenshtein_distance#Python
-# Available under the Creative Commons Attribution-ShareAlike License
-def levenshtein(string1, string2):
-    """Returns the Levenshtein distance of two strings.
-    Uses Dynamic Programming approach, only keeping track of
-    two rows of the DP table at a time.
-
-    Args:
-      string1: String to compare to string2
-      string2: String to compare to string1
-
-    Returns:
-      distance: the Levenshtein distance between string1 and string2
-    """
-
-    if len(string1) < len(string2):
-        return levenshtein(string2, string1)
-
-    if len(string2) == 0:
-        return len(string1)
-
-    previous_row = range(len(string2) + 1)
-    for i, char1 in enumerate(string1):
-        current_row = [i + 1]
-        for j, char2 in enumerate(string2):
-            insertions = previous_row[j + 1] + 1
-            deletions = current_row[j] + 1
-            substitutions = previous_row[j] + (char1 != char2)
-            current_row.append(min(insertions, deletions, substitutions))
-        previous_row = current_row
-
-    return previous_row[-1]
-
-
-def string_similarity(s1, s2):
-    """Returns a similarity measurement based on Levenshtein distance.
-
-    Args:
-      s1: the string to compare to s2
-      s2: the string to compare to s1
-
-    Returns:
-      result: the similarity metric
-    """
-    lev = levenshtein(s1, s2)
-    try:
-        lev_ratio = float(lev) / max(len(s1), len(s2))
-        result = (1.0 - lev_ratio) * 100
-    except ZeroDivisionError:
-        result = 100 if not s2 else 0
-    return float(result)
-
-
-def run_concurrent_actions_no_raise(*calls):
-    """Concurrently runs all callables passed in using multithreading.
-
-    Example:
-
-    >>> def test_function_1(arg1, arg2):
-    >>>     return arg1, arg2
-    >>>
-    >>> def test_function_2(arg1, kwarg='kwarg'):
-    >>>     raise arg1(kwarg)
-    >>>
-    >>> run_concurrent_actions_no_raise(
-    >>>     lambda: test_function_1('arg1', 'arg2'),
-    >>>     lambda: test_function_2(IndexError, kwarg='kwarg'),
-    >>> )
-    >>> # Output:
-    >>> [('arg1', 'arg2'), IndexError('kwarg')]
-
-    Args:
-        *calls: A *args list of argumentless callable objects to be called. Note
-            that if a function has arguments it can be turned into an
-            argumentless function via the lambda keyword or functools.partial.
-
-    Returns:
-        An array of the returned values or exceptions received from calls,
-        respective of the order given.
-    """
-    with ThreadPoolExecutor(max_workers=len(calls)) as executor:
-        futures = [executor.submit(call) for call in calls]
-
-    results = []
-    for future in futures:
-        try:
-            results.append(future.result())
-        except Exception as e:
-            results.append(e)
-    return results
-
-
-def run_concurrent_actions(*calls):
-    """Runs all callables passed in concurrently using multithreading.
-
-    Examples:
-
-    >>> def test_function_1(arg1, arg2):
-    >>>     print(arg1, arg2)
-    >>>
-    >>> def test_function_2(arg1, kwarg='kwarg'):
-    >>>     raise arg1(kwarg)
-    >>>
-    >>> run_concurrent_actions(
-    >>>     lambda: test_function_1('arg1', 'arg2'),
-    >>>     lambda: test_function_2(IndexError, kwarg='kwarg'),
-    >>> )
-    >>> 'The above line raises IndexError("kwarg")'
-
-    Args:
-        *calls: A *args list of argumentless callable objects to be called. Note
-            that if a function has arguments it can be turned into an
-            argumentless function via the lambda keyword or functools.partial.
-
-    Returns:
-        An array of the returned values respective of the order of the calls
-        argument.
-
-    Raises:
-        If an exception is raised in any of the calls, the first exception
-        caught will be raised.
-    """
-    first_exception = None
-
-    class WrappedException(Exception):
-        """Raised when a passed-in callable raises an exception."""
-
-    def call_wrapper(call):
-        nonlocal first_exception
-
-        try:
-            return call()
-        except Exception as e:
-            logging.exception(e)
-            # Note that there is a potential race condition between two
-            # exceptions setting first_exception. Even if a locking mechanism
-            # was added to prevent this from happening, it is still possible
-            # that we capture the second exception as the first exception, as
-            # the active thread can swap to the thread that raises the second
-            # exception. There is no way to solve this with the tools we have
-            # here, so we do not bother. The effects this issue has on the
-            # system as a whole are negligible.
-            if first_exception is None:
-                first_exception = e
-            raise WrappedException(e)
-
-    with ThreadPoolExecutor(max_workers=len(calls)) as executor:
-        futures = [executor.submit(call_wrapper, call) for call in calls]
-
-    results = []
-    for future in futures:
-        try:
-            results.append(future.result())
-        except WrappedException:
-            # We do not need to raise here, since first_exception will already
-            # be set to the first exception raised by these callables.
-            break
-
-    if first_exception:
-        raise first_exception
-
-    return results
-
-
-def test_concurrent_actions(*calls, failure_exceptions=(Exception, )):
-    """Concurrently runs all passed in calls using multithreading.
-
-    If any callable raises an Exception found within failure_exceptions, the
-    test case is marked as a failure.
-
-    Example:
-    >>> def test_function_1(arg1, arg2):
-    >>>     print(arg1, arg2)
-    >>>
-    >>> def test_function_2(kwarg='kwarg'):
-    >>>     raise IndexError(kwarg)
-    >>>
-    >>> test_concurrent_actions(
-    >>>     lambda: test_function_1('arg1', 'arg2'),
-    >>>     lambda: test_function_2(kwarg='kwarg'),
-    >>>     failure_exceptions=IndexError
-    >>> )
-    >>> 'raises signals.TestFailure due to IndexError being raised.'
-
-    Args:
-        *calls: A *args list of argumentless callable objects to be called. Note
-            that if a function has arguments it can be turned into an
-            argumentless function via the lambda keyword or functools.partial.
-        failure_exceptions: A tuple of all possible Exceptions that will mark
-            the test as a FAILURE. Any exception that is not in this list will
-            mark the tests as UNKNOWN.
-
-    Returns:
-        An array of the returned values respective of the order of the calls
-        argument.
-
-    Raises:
-        signals.TestFailure if any call raises an Exception.
-    """
-    try:
-        return run_concurrent_actions(*calls)
-    except signals.TestFailure:
-        # Do not modify incoming test failures
-        raise
-    except failure_exceptions as e:
-        raise signals.TestFailure(e)
-
-
-class SuppressLogOutput(object):
-    """Context manager used to suppress all logging output for the specified
-    logger and level(s).
-    """
-
-    def __init__(self, logger=logging.getLogger(), log_levels=None):
-        """Create a SuppressLogOutput context manager
-
-        Args:
-            logger: The logger object to suppress
-            log_levels: Levels of log handlers to disable.
-        """
-
-        self._logger = logger
-        self._log_levels = log_levels or [
-            logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,
-            logging.CRITICAL
-        ]
-        if isinstance(self._log_levels, int):
-            self._log_levels = [self._log_levels]
-        self._handlers = copy.copy(self._logger.handlers)
-
-    def __enter__(self):
-        for handler in self._handlers:
-            if handler.level in self._log_levels:
-                self._logger.removeHandler(handler)
-        return self
-
-    def __exit__(self, *_):
-        for handler in self._handlers:
-            self._logger.addHandler(handler)
-
-
-class BlockingTimer(object):
-    """Context manager used to block until a specified amount of time has
-     elapsed.
-     """
-
-    def __init__(self, secs):
-        """Initializes a BlockingTimer
-
-        Args:
-            secs: Number of seconds to wait before exiting
-        """
-        self._thread = threading.Timer(secs, lambda: None)
-
-    def __enter__(self):
-        self._thread.start()
-        return self
-
-    def __exit__(self, *_):
-        self._thread.join()
-
-
-def is_valid_ipv4_address(address):
-    try:
-        socket.inet_pton(socket.AF_INET, address)
-    except AttributeError:  # no inet_pton here, sorry
-        try:
-            socket.inet_aton(address)
-        except socket.error:
-            return False
-        return address.count('.') == 3
-    except socket.error:  # not a valid address
-        return False
-
-    return True
-
-
-def is_valid_ipv6_address(address):
-    if '%' in address:
-        address = address.split('%')[0]
-    try:
-        socket.inet_pton(socket.AF_INET6, address)
-    except socket.error:  # not a valid address
-        return False
-    return True
-
-
-def merge_dicts(*dict_args):
-    """ Merges args list of dictionaries into a single dictionary.
-
-    Args:
-        dict_args: an args list of dictionaries to be merged. If multiple
-            dictionaries share a key, the last in the list will appear in the
-            final result.
-    """
-    result = {}
-    for dictionary in dict_args:
-        result.update(dictionary)
-    return result
-
-
-def ascii_string(uc_string):
-    """Converts unicode string to ascii"""
-    return str(uc_string).encode('ASCII')
-
-
-def get_interface_ip_addresses(comm_channel, interface):
-    """Gets all of the ip addresses, ipv4 and ipv6, associated with a
-       particular interface name.
-
-    Args:
-        comm_channel: How to send commands to a device.  Can be ssh, adb serial,
-            etc.  Must have the run function implemented.
-        interface: The interface name on the device, ie eth0
-
-    Returns:
-        A list of dictionaries of the the various IP addresses:
-            ipv4_private: Any 192.168, 172.16, 10, or 169.254 addresses
-            ipv4_public: Any IPv4 public addresses
-            ipv6_link_local: Any fe80:: addresses
-            ipv6_private_local: Any fd00:: addresses
-            ipv6_public: Any publicly routable addresses
-    """
-    # Local imports are used here to prevent cyclic dependency.
-    from antlion.controllers.android_device import AndroidDevice
-    from antlion.controllers.fuchsia_device import FuchsiaDevice
-    from antlion.controllers.utils_lib.ssh.connection import SshConnection
-
-    is_local = comm_channel == job
-    if type(comm_channel) is AndroidDevice:
-        addrs = comm_channel.adb.shell(
-            f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
-        ).splitlines()
-    elif (type(comm_channel) is SshConnection or is_local):
-        addrs = comm_channel.run(
-            f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
-        ).stdout.splitlines()
-    elif type(comm_channel) is FuchsiaDevice:
-        interfaces = comm_channel.sl4f.netstack_lib.netstackListInterfaces()
-        err = interfaces.get('error')
-        if err is not None:
-            raise ActsUtilsError(f'Failed get_interface_ip_addresses: {err}')
-        addrs = []
-        for item in interfaces.get('result'):
-            if item['name'] != interface:
-                continue
-            for ipv4_address in item['ipv4_addresses']:
-                ipv4_address = '.'.join(map(str, ipv4_address))
-                addrs.append(ipv4_address)
-            for ipv6_address in item['ipv6_addresses']:
-                converted_ipv6_address = []
-                for octet in ipv6_address:
-                    converted_ipv6_address.append(format(octet, 'x').zfill(2))
-                ipv6_address = ''.join(converted_ipv6_address)
-                ipv6_address = (':'.join(
-                    ipv6_address[i:i + 4]
-                    for i in range(0, len(ipv6_address), 4)))
-                addrs.append(str(ipaddress.ip_address(ipv6_address)))
-    else:
-        raise ValueError('Unsupported method to send command to device.')
-
-    ipv4_private_local_addresses = []
-    ipv4_public_addresses = []
-    ipv6_link_local_addresses = []
-    ipv6_private_local_addresses = []
-    ipv6_public_addresses = []
-
-    for addr in addrs:
-        on_device_ip = ipaddress.ip_address(addr)
-        if on_device_ip.version == 4:
-            if on_device_ip.is_private:
-                ipv4_private_local_addresses.append(str(on_device_ip))
-            elif on_device_ip.is_global or (
-                    # Carrier private doesn't have a property, so we check if
-                    # all other values are left unset.
-                    not on_device_ip.is_reserved
-                    and not on_device_ip.is_unspecified
-                    and not on_device_ip.is_link_local
-                    and not on_device_ip.is_loopback
-                    and not on_device_ip.is_multicast):
-                ipv4_public_addresses.append(str(on_device_ip))
-        elif on_device_ip.version == 6:
-            if on_device_ip.is_link_local:
-                ipv6_link_local_addresses.append(str(on_device_ip))
-            elif on_device_ip.is_private:
-                ipv6_private_local_addresses.append(str(on_device_ip))
-            elif on_device_ip.is_global:
-                ipv6_public_addresses.append(str(on_device_ip))
-
-    return {
-        'ipv4_private': ipv4_private_local_addresses,
-        'ipv4_public': ipv4_public_addresses,
-        'ipv6_link_local': ipv6_link_local_addresses,
-        'ipv6_private_local': ipv6_private_local_addresses,
-        'ipv6_public': ipv6_public_addresses
-    }
-
-
-class AddressTimeout(signals.TestError):
-    pass
-
-
-class MultipleAddresses(signals.TestError):
-    pass
-
-
-def get_addr(comm_channel,
-             interface,
-             addr_type='ipv4_private',
-             timeout_sec=None):
-    """Get the requested type of IP address for an interface; if an address is
-    not available, retry until the timeout has been reached.
-
-    Args:
-        addr_type: Type of address to get as defined by the return value of
-            utils.get_interface_ip_addresses.
-        timeout_sec: Seconds to wait to acquire an address if there isn't one
-            already available. If fetching an IPv4 address, the default is 3
-            seconds. If IPv6, the default is 30 seconds for Duplicate Address
-            Detection.
-
-    Returns:
-        A string containing the requested address.
-
-    Raises:
-        TestAbortClass: timeout_sec is None and invalid addr_type
-        AddressTimeout: No address is available after timeout_sec
-        MultipleAddresses: Several addresses are available
-    """
-    if not timeout_sec:
-        if 'ipv4' in addr_type:
-            timeout_sec = 3
-        elif 'ipv6' in addr_type:
-            timeout_sec = DAD_TIMEOUT_SEC
-        else:
-            raise signals.TestAbortClass(f'Unknown addr_type "{addr_type}"')
-
-    start = time.time()
-    elapsed = 0
-
-    while elapsed <= timeout_sec:
-        ip_addrs = get_interface_ip_addresses(comm_channel,
-                                              interface)[addr_type]
-        if len(ip_addrs) > 1:
-            raise MultipleAddresses(
-                f'Expected only one "{addr_type}" address, got {ip_addrs}')
-        elif len(ip_addrs) == 1:
-            return ip_addrs[0]
-        elapsed = time.time() - start
-
-    raise AddressTimeout(
-        f'No available "{addr_type}" address after {timeout_sec}s')
-
-
-def get_interface_based_on_ip(comm_channel, desired_ip_address):
-    """Gets the interface for a particular IP
-
-    Args:
-        comm_channel: How to send commands to a device.  Can be ssh, adb serial,
-            etc.  Must have the run function implemented.
-        desired_ip_address: The IP address that is being looked for on a device.
-
-    Returns:
-        The name of the test interface.
-    """
-
-    desired_ip_address = desired_ip_address.split('%', 1)[0]
-    all_ips_and_interfaces = comm_channel.run(
-        '(ip -o -4 addr show; ip -o -6 addr show) | '
-        'awk \'{print $2" "$4}\'').stdout
-    for ip_address_and_interface in all_ips_and_interfaces.split('\n'):
-        if desired_ip_address in ip_address_and_interface:
-            return ip_address_and_interface.split()[1][:-1]
-    return None
-
-
-def renew_linux_ip_address(comm_channel, interface):
-    comm_channel.run('sudo ip link set %s down' % interface)
-    comm_channel.run('sudo ip link set %s up' % interface)
-    comm_channel.run('sudo dhclient -r %s' % interface)
-    comm_channel.run('sudo dhclient %s' % interface)
-
-
-def get_ping_command(dest_ip,
-                     count=3,
-                     interval=1000,
-                     timeout=1000,
-                     size=56,
-                     os_type='Linux',
-                     additional_ping_params=None):
-    """Builds ping command string based on address type, os, and params.
-
-    Args:
-        dest_ip: string, address to ping (ipv4 or ipv6)
-        count: int, number of requests to send
-        interval: int, time in seconds between requests
-        timeout: int, time in seconds to wait for response
-        size: int, number of bytes to send,
-        os_type: string, os type of the source device (supports 'Linux',
-            'Darwin')
-        additional_ping_params: string, command option flags to
-            append to the command string
-
-    Returns:
-        List of string, represetning the ping command.
-    """
-    if is_valid_ipv4_address(dest_ip):
-        ping_binary = 'ping'
-    elif is_valid_ipv6_address(dest_ip):
-        ping_binary = 'ping6'
-    else:
-        raise ValueError('Invalid ip addr: %s' % dest_ip)
-
-    if os_type == 'Darwin':
-        if is_valid_ipv6_address(dest_ip):
-            # ping6 on MacOS doesn't support timeout
-            logging.debug(
-                'Ignoring timeout, as ping6 on MacOS does not support it.')
-            timeout_flag = []
-        else:
-            timeout_flag = ['-t', str(timeout / 1000)]
-    elif os_type == 'Linux':
-        timeout_flag = ['-W', str(timeout / 1000)]
-    else:
-        raise ValueError('Invalid OS.  Only Linux and MacOS are supported.')
-
-    if not additional_ping_params:
-        additional_ping_params = ''
-
-    ping_cmd = [
-        ping_binary, *timeout_flag, '-c',
-        str(count), '-i',
-        str(interval / 1000), '-s',
-        str(size), additional_ping_params, dest_ip
-    ]
-    return ' '.join(ping_cmd)
-
-
-def ping(comm_channel,
-         dest_ip,
-         count=3,
-         interval=1000,
-         timeout=1000,
-         size=56,
-         additional_ping_params=None):
-    """ Generic linux ping function, supports local (acts.libs.proc.job) and
-    SshConnections (acts.libs.proc.job over ssh) to Linux based OSs and MacOS.
-
-    NOTES: This will work with Android over SSH, but does not function over ADB
-    as that has a unique return format.
-
-    Args:
-        comm_channel: communication channel over which to send ping command.
-            Must have 'run' function that returns at least command, stdout,
-            stderr, and exit_status (see acts.libs.proc.job)
-        dest_ip: address to ping (ipv4 or ipv6)
-        count: int, number of packets to send
-        interval: int, time in milliseconds between pings
-        timeout: int, time in milliseconds to wait for response
-        size: int, size of packets in bytes
-        additional_ping_params: string, command option flags to
-            append to the command string
-
-    Returns:
-        Dict containing:
-            command: string
-            exit_status: int (0 or 1)
-            stdout: string
-            stderr: string
-            transmitted: int, number of packets transmitted
-            received: int, number of packets received
-            packet_loss: int, percentage packet loss
-            time: int, time of ping command execution (in milliseconds)
-            rtt_min: float, minimum round trip time
-            rtt_avg: float, average round trip time
-            rtt_max: float, maximum round trip time
-            rtt_mdev: float, round trip time standard deviation
-
-        Any values that cannot be parsed are left as None
-    """
-    from antlion.controllers.utils_lib.ssh.connection import SshConnection
-    is_local = comm_channel == job
-    os_type = platform.system() if is_local else 'Linux'
-    ping_cmd = get_ping_command(dest_ip,
-                                count=count,
-                                interval=interval,
-                                timeout=timeout,
-                                size=size,
-                                os_type=os_type,
-                                additional_ping_params=additional_ping_params)
-
-    if (type(comm_channel) is SshConnection or is_local):
-        logging.debug(
-            'Running ping with parameters (count: %s, interval: %s, timeout: '
-            '%s, size: %s)' % (count, interval, timeout, size))
-        ping_result = comm_channel.run(ping_cmd, ignore_status=True)
-    else:
-        raise ValueError('Unsupported comm_channel: %s' % type(comm_channel))
-
-    if isinstance(ping_result, job.Error):
-        ping_result = ping_result.result
-
-    transmitted = None
-    received = None
-    packet_loss = None
-    time = None
-    rtt_min = None
-    rtt_avg = None
-    rtt_max = None
-    rtt_mdev = None
-
-    summary = re.search(
-        '([0-9]+) packets transmitted.*?([0-9]+) received.*?([0-9]+)% packet '
-        'loss.*?time ([0-9]+)', ping_result.stdout)
-    if summary:
-        transmitted = summary[1]
-        received = summary[2]
-        packet_loss = summary[3]
-        time = summary[4]
-
-    rtt_stats = re.search('= ([0-9.]+)/([0-9.]+)/([0-9.]+)/([0-9.]+)',
-                          ping_result.stdout)
-    if rtt_stats:
-        rtt_min = rtt_stats[1]
-        rtt_avg = rtt_stats[2]
-        rtt_max = rtt_stats[3]
-        rtt_mdev = rtt_stats[4]
-
-    return {
-        'command': ping_result.command,
-        'exit_status': ping_result.exit_status,
-        'stdout': ping_result.stdout,
-        'stderr': ping_result.stderr,
-        'transmitted': transmitted,
-        'received': received,
-        'packet_loss': packet_loss,
-        'time': time,
-        'rtt_min': rtt_min,
-        'rtt_avg': rtt_avg,
-        'rtt_max': rtt_max,
-        'rtt_mdev': rtt_mdev
-    }
-
-
-def can_ping(comm_channel,
-             dest_ip,
-             count=3,
-             interval=1000,
-             timeout=1000,
-             size=56,
-             additional_ping_params=None):
-    """Returns whether device connected via comm_channel can ping a dest
-    address"""
-    ping_results = ping(comm_channel,
-                        dest_ip,
-                        count=count,
-                        interval=interval,
-                        timeout=timeout,
-                        size=size,
-                        additional_ping_params=additional_ping_params)
-
-    return ping_results['exit_status'] == 0
-
-
-def ip_in_subnet(ip, subnet):
-    """Validate that ip is in a given subnet.
-
-    Args:
-        ip: string, ip address to verify (eg. '192.168.42.158')
-        subnet: string, subnet to check (eg. '192.168.42.0/24')
-
-    Returns:
-        True, if ip in subnet, else False
-    """
-    return ipaddress.ip_address(ip) in ipaddress.ip_network(subnet)
-
-
-def mac_address_str_to_list(mac_addr_str):
-    """Converts mac address string to list of decimal octets.
-
-    Args:
-        mac_addr_string: string, mac address
-            e.g. '12:34:56:78:9a:bc'
-
-    Returns
-        list, representing mac address octets in decimal
-            e.g. [18, 52, 86, 120, 154, 188]
-    """
-    return [int(octet, 16) for octet in mac_addr_str.split(':')]
-
-
-def mac_address_list_to_str(mac_addr_list):
-    """Converts list of decimal octets represeting mac address to string.
-
-    Args:
-        mac_addr_list: list, representing mac address octets in decimal
-            e.g. [18, 52, 86, 120, 154, 188]
-
-    Returns:
-        string, mac address
-            e.g. '12:34:56:78:9a:bc'
-    """
-    hex_list = []
-    for octet in mac_addr_list:
-        hex_octet = hex(octet)[2:]
-        if octet < 16:
-            hex_list.append('0%s' % hex_octet)
-        else:
-            hex_list.append(hex_octet)
-
-    return ':'.join(hex_list)
-
-
-def get_fuchsia_mdns_ipv6_address(device_mdns_name):
-    """Finds the IPv6 link-local address of a Fuchsia device matching a mDNS
-    name.
-
-    Args:
-        device_mdns_name: name of Fuchsia device (e.g. gig-clone-sugar-slash)
-
-    Returns:
-        string, IPv6 link-local address
-    """
-    import psutil
-    from zeroconf import IPVersion, Zeroconf
-
-    if not device_mdns_name:
-        return None
-
-    def mdns_query(interface, address):
-        logging.info(
-            f'Sending mDNS query for device "{device_mdns_name}" using "{address}"'
-        )
-        try:
-            zeroconf = Zeroconf(ip_version=IPVersion.V6Only,
-                                interfaces=[address])
-        except RuntimeError as e:
-            if 'No adapter found for IP address' in e.args[0]:
-                # Most likely, a device went offline and its control
-                # interface was deleted. This is acceptable since the
-                # device that went offline isn't guaranteed to be the
-                # device we're searching for.
-                logging.warning('No adapter found for "%s"' % address)
-                return None
-            raise
-
-        device_records = zeroconf.get_service_info(
-            FUCHSIA_MDNS_TYPE, device_mdns_name + '.' + FUCHSIA_MDNS_TYPE)
-
-        if device_records:
-            for device_address in device_records.parsed_addresses():
-                device_ip_address = ipaddress.ip_address(device_address)
-                scoped_address = '%s%%%s' % (device_address, interface)
-                if (device_ip_address.version == 6
-                        and device_ip_address.is_link_local
-                        and can_ping(job, dest_ip=scoped_address)):
-                    logging.info('Found device "%s" at "%s"' %
-                                 (device_mdns_name, scoped_address))
-                    zeroconf.close()
-                    del zeroconf
-                    return scoped_address
-
-        zeroconf.close()
-        del zeroconf
-        return None
-
-    with ThreadPoolExecutor() as executor:
-        futures = []
-
-        interfaces = psutil.net_if_addrs()
-        for interface in interfaces:
-            for addr in interfaces[interface]:
-                address = addr.address.split('%')[0]
-                if addr.family == socket.AF_INET6 and ipaddress.ip_address(
-                        address).is_link_local and address != 'fe80::1':
-                    futures.append(
-                        executor.submit(mdns_query, interface, address))
-
-        for future in futures:
-            addr = future.result()
-            if addr:
-                return addr
-
-    logging.error('Unable to find IP address for device "%s"' %
-                  device_mdns_name)
-    return None
-
-
-def get_device(devices, device_type):
-    """Finds a unique device with the specified "device_type" attribute from a
-    list. If none is found, defaults to the first device in the list.
-
-    Example:
-        get_device(android_devices, device_type="DUT")
-        get_device(fuchsia_devices, device_type="DUT")
-        get_device(android_devices + fuchsia_devices, device_type="DUT")
-
-    Args:
-        devices: A list of device controller objects.
-        device_type: (string) Type of device to find, specified by the
-            "device_type" attribute.
-
-    Returns:
-        The matching device controller object, or the first device in the list
-        if not found.
-
-    Raises:
-        ValueError is raised if none or more than one device is
-        matched.
-    """
-    if not devices:
-        raise ValueError('No devices available')
-
-    matches = [
-        d for d in devices
-        if hasattr(d, 'device_type') and d.device_type == device_type
-    ]
-
-    if len(matches) == 0:
-        # No matches for the specified "device_type", use the first device
-        # declared.
-        return devices[0]
-    if len(matches) > 1:
-        # Specifing multiple devices with the same "device_type" is a
-        # configuration error.
-        raise ValueError(
-            'More than one device matching "device_type" == "{}"'.format(
-                device_type))
-
-    return matches[0]
diff --git a/stubs/README.md b/stubs/README.md
new file mode 100644
index 0000000..07ec6ae
--- /dev/null
+++ b/stubs/README.md
@@ -0,0 +1,11 @@
+# Python typing stubs
+
+Contains typing stubs for Python packages that do not expose typing of their
+own. Generated initially with [`stubgen`][stubgen] then manually modified to
+satisfy [`mypy`][mypy].
+
+> TODO(http://b/285005406): Contribute type annotations to Mobly, bump the
+> version of Mobly, then remove these type stubs.
+
+[stubgen]: https://mypy.readthedocs.io/en/stable/stubgen.html
+[mypy]: https://mypy.readthedocs.io/en/stable/
diff --git a/src/antlion/__init__.py b/stubs/mobly/__init__.pyi
similarity index 100%
copy from src/antlion/__init__.py
copy to stubs/mobly/__init__.pyi
diff --git a/stubs/mobly/asserts.pyi b/stubs/mobly/asserts.pyi
new file mode 100644
index 0000000..0fa0557
--- /dev/null
+++ b/stubs/mobly/asserts.pyi
@@ -0,0 +1,104 @@
+from _typeshed import Incomplete
+from mobly import signals as signals
+
+def assert_equal(
+    first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_not_equal(
+    first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_almost_equal(
+    first,
+    second,
+    places: Incomplete | None = ...,
+    msg: Incomplete | None = ...,
+    delta: Incomplete | None = ...,
+    extras: Incomplete | None = ...,
+) -> None: ...
+def assert_not_almost_equal(
+    first,
+    second,
+    places: Incomplete | None = ...,
+    msg: Incomplete | None = ...,
+    delta: Incomplete | None = ...,
+    extras: Incomplete | None = ...,
+) -> None: ...
+def assert_in(
+    member, container, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_not_in(
+    member, container, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is(
+    expr1, expr2, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is_not(
+    expr1, expr2, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_count_equal(
+    first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_less(
+    a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_less_equal(
+    a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_greater(
+    a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_greater_equal(
+    a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is_none(
+    obj, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is_not_none(
+    obj, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is_instance(
+    obj, cls, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_not_is_instance(
+    obj, cls, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_regex(
+    text, expected_regex, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_not_regex(
+    text,
+    unexpected_regex,
+    msg: Incomplete | None = ...,
+    extras: Incomplete | None = ...,
+) -> None: ...
+def assert_raises(
+    expected_exception, extras: Incomplete | None = ..., *args, **kwargs
+): ...
+def assert_raises_regex(
+    expected_exception, expected_regex, extras: Incomplete | None = ..., *args, **kwargs
+): ...
+def assert_true(expr, msg, extras: Incomplete | None = ...) -> None: ...
+def assert_false(expr, msg, extras: Incomplete | None = ...) -> None: ...
+def skip(reason, extras: Incomplete | None = ...) -> None: ...
+def skip_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
+def abort_class(reason, extras: Incomplete | None = ...) -> None: ...
+def abort_class_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
+def abort_all(reason, extras: Incomplete | None = ...) -> None: ...
+def abort_all_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
+def fail(msg, extras: Incomplete | None = ...) -> None: ...
+def explicit_pass(msg, extras: Incomplete | None = ...) -> None: ...
+
+class _AssertRaisesContext:
+    expected: Incomplete
+    failureException: Incomplete
+    expected_regexp: Incomplete
+    extras: Incomplete
+    def __init__(
+        self,
+        expected,
+        expected_regexp: Incomplete | None = ...,
+        extras: Incomplete | None = ...,
+    ) -> None: ...
+    def __enter__(self): ...
+    exception: Incomplete
+    def __exit__(self, exc_type, exc_value, tb): ...
diff --git a/stubs/mobly/base_instrumentation_test.pyi b/stubs/mobly/base_instrumentation_test.pyi
new file mode 100644
index 0000000..b06f307
--- /dev/null
+++ b/stubs/mobly/base_instrumentation_test.pyi
@@ -0,0 +1,97 @@
+from enum import Enum
+
+from _typeshed import Incomplete
+from mobly import base_test as base_test
+from mobly import records as records
+from mobly import signals as signals
+from mobly import utils as utils
+
+class _InstrumentationStructurePrefixes:
+    STATUS: str
+    STATUS_CODE: str
+    RESULT: str
+    CODE: str
+    FAILED: str
+
+class _InstrumentationKnownStatusKeys:
+    CLASS: str
+    ERROR: str
+    STACK: str
+    TEST: str
+    STREAM: str
+
+class _InstrumentationStatusCodes:
+    UNKNOWN: Incomplete
+    OK: str
+    START: str
+    IN_PROGRESS: str
+    ERROR: str
+    FAILURE: str
+    IGNORED: str
+    ASSUMPTION_FAILURE: str
+
+class _InstrumentationStatusCodeCategories:
+    TIMING: Incomplete
+    PASS: Incomplete
+    FAIL: Incomplete
+    SKIPPED: Incomplete
+
+class _InstrumentationKnownResultKeys:
+    LONGMSG: str
+    SHORTMSG: str
+
+class _InstrumentationResultSignals:
+    FAIL: str
+    PASS: str
+
+class _InstrumentationBlockStates(Enum):
+    UNKNOWN: int
+    METHOD: int
+    RESULT: int
+
+class _InstrumentationBlock:
+    state: Incomplete
+    prefix: Incomplete
+    previous_instrumentation_block: Incomplete
+    error_message: str
+    status_code: Incomplete
+    current_key: Incomplete
+    known_keys: Incomplete
+    unknown_keys: Incomplete
+    begin_time: Incomplete
+    def __init__(
+        self,
+        state=...,
+        prefix: Incomplete | None = ...,
+        previous_instrumentation_block: Incomplete | None = ...,
+    ) -> None: ...
+    @property
+    def is_empty(self): ...
+    def set_error_message(self, error_message) -> None: ...
+    def set_status_code(self, status_code_line) -> None: ...
+    def set_key(self, structure_prefix, key_line) -> None: ...
+    def add_value(self, line) -> None: ...
+    def transition_state(self, new_state): ...
+
+class _InstrumentationBlockFormatter:
+    DEFAULT_INSTRUMENTATION_METHOD_NAME: str
+    def __init__(self, instrumentation_block) -> None: ...
+    def create_test_record(self, mobly_test_class): ...
+    def has_completed_result_block_format(self, error_message): ...
+
+class InstrumentationTestMixin:
+    DEFAULT_INSTRUMENTATION_OPTION_PREFIX: str
+    DEFAULT_INSTRUMENTATION_ERROR_MESSAGE: str
+    def parse_instrumentation_options(self, parameters: Incomplete | None = ...): ...
+    def run_instrumentation_test(
+        self,
+        device,
+        package,
+        options: Incomplete | None = ...,
+        prefix: Incomplete | None = ...,
+        runner: Incomplete | None = ...,
+    ): ...
+
+class BaseInstrumentationTestClass(
+    InstrumentationTestMixin, base_test.BaseTestClass
+): ...
diff --git a/stubs/mobly/base_suite.pyi b/stubs/mobly/base_suite.pyi
new file mode 100644
index 0000000..48912a5
--- /dev/null
+++ b/stubs/mobly/base_suite.pyi
@@ -0,0 +1,18 @@
+import abc
+
+from _typeshed import Incomplete
+
+class BaseSuite(abc.ABC, metaclass=abc.ABCMeta):
+    def __init__(self, runner, config) -> None: ...
+    @property
+    def user_params(self): ...
+    def add_test_class(
+        self,
+        clazz,
+        config: Incomplete | None = ...,
+        tests: Incomplete | None = ...,
+        name_suffix: Incomplete | None = ...,
+    ) -> None: ...
+    @abc.abstractmethod
+    def setup_suite(self, config): ...
+    def teardown_suite(self) -> None: ...
diff --git a/stubs/mobly/base_test.pyi b/stubs/mobly/base_test.pyi
new file mode 100644
index 0000000..1f1d7d1
--- /dev/null
+++ b/stubs/mobly/base_test.pyi
@@ -0,0 +1,68 @@
+from _typeshed import Incomplete
+from mobly import controller_manager as controller_manager
+from mobly import expects as expects
+from mobly import records as records
+from mobly import runtime_test_info as runtime_test_info
+from mobly import signals as signals
+from mobly import utils as utils
+
+TEST_CASE_TOKEN: str
+RESULT_LINE_TEMPLATE: Incomplete
+TEST_STAGE_BEGIN_LOG_TEMPLATE: str
+TEST_STAGE_END_LOG_TEMPLATE: str
+STAGE_NAME_PRE_RUN: str
+STAGE_NAME_SETUP_GENERATED_TESTS: str
+STAGE_NAME_SETUP_CLASS: str
+STAGE_NAME_SETUP_TEST: str
+STAGE_NAME_TEARDOWN_TEST: str
+STAGE_NAME_TEARDOWN_CLASS: str
+STAGE_NAME_CLEAN_UP: str
+ATTR_REPEAT_CNT: str
+ATTR_MAX_RETRY_CNT: str
+ATTR_MAX_CONSEC_ERROR: str
+
+class Error(Exception): ...
+
+def repeat(count, max_consecutive_error: Incomplete | None = ...): ...
+def retry(max_count): ...
+
+class BaseTestClass:
+    TAG: Incomplete
+    tests: Incomplete
+    root_output_path: Incomplete
+    log_path: Incomplete
+    test_bed_name: Incomplete
+    testbed_name: Incomplete
+    user_params: Incomplete
+    results: Incomplete
+    summary_writer: Incomplete
+    controller_configs: Incomplete
+    def __init__(self, configs) -> None: ...
+    def unpack_userparams(
+        self,
+        req_param_names: Incomplete | None = ...,
+        opt_param_names: Incomplete | None = ...,
+        **kwargs,
+    ) -> None: ...
+    def register_controller(
+        self, module, required: bool = ..., min_number: int = ...
+    ): ...
+    def pre_run(self) -> None: ...
+    def setup_generated_tests(self) -> None: ...
+    def setup_class(self) -> None: ...
+    def teardown_class(self) -> None: ...
+    def setup_test(self) -> None: ...
+    def teardown_test(self) -> None: ...
+    def on_fail(self, record) -> None: ...
+    def on_pass(self, record) -> None: ...
+    def on_skip(self, record) -> None: ...
+    def record_data(self, content) -> None: ...
+    current_test_info: Incomplete
+    def exec_one_test(
+        self, test_name, test_method, record: Incomplete | None = ...
+    ): ...
+    def generate_tests(
+        self, test_logic, name_func, arg_sets, uid_func: Incomplete | None = ...
+    ) -> None: ...
+    def get_existing_test_names(self): ...
+    def run(self, test_names: Incomplete | None = ...): ...
diff --git a/stubs/mobly/config_parser.pyi b/stubs/mobly/config_parser.pyi
new file mode 100644
index 0000000..f9f74ff
--- /dev/null
+++ b/stubs/mobly/config_parser.pyi
@@ -0,0 +1,20 @@
+from _typeshed import Incomplete
+from mobly import keys as keys
+from mobly import utils as utils
+
+ENV_MOBLY_LOGPATH: str
+
+class MoblyConfigError(Exception): ...
+
+def load_test_config_file(test_config_path, tb_filters: Incomplete | None = ...): ...
+
+class TestRunConfig:
+    log_path: str
+    test_bed_name: Incomplete
+    testbed_name: Incomplete
+    controller_configs: Incomplete
+    user_params: Incomplete
+    summary_writer: Incomplete
+    test_class_name_suffix: Incomplete
+    def __init__(self) -> None: ...
+    def copy(self): ...
diff --git a/stubs/mobly/controller_manager.pyi b/stubs/mobly/controller_manager.pyi
new file mode 100644
index 0000000..6e59a30
--- /dev/null
+++ b/stubs/mobly/controller_manager.pyi
@@ -0,0 +1,15 @@
+from _typeshed import Incomplete
+from mobly import expects as expects
+from mobly import records as records
+from mobly import signals as signals
+
+def verify_controller_module(module) -> None: ...
+
+class ControllerManager:
+    controller_configs: Incomplete
+    def __init__(self, class_name, controller_configs) -> None: ...
+    def register_controller(
+        self, module, required: bool = ..., min_number: int = ...
+    ): ...
+    def unregister_controllers(self) -> None: ...
+    def get_controller_info_records(self): ...
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py b/stubs/mobly/controllers/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
copy to stubs/mobly/controllers/__init__.pyi
diff --git a/stubs/mobly/controllers/android_device.pyi b/stubs/mobly/controllers/android_device.pyi
new file mode 100644
index 0000000..80a352b
--- /dev/null
+++ b/stubs/mobly/controllers/android_device.pyi
@@ -0,0 +1,141 @@
+import enum
+import logging
+from collections.abc import Generator
+
+from _typeshed import Incomplete
+from mobly import runtime_test_info as runtime_test_info
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import adb as adb
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib import fastboot as fastboot
+from mobly.controllers.android_device_lib import service_manager as service_manager
+from mobly.controllers.android_device_lib.services import logcat as logcat
+from mobly.controllers.android_device_lib.services import (
+    snippet_management_service as snippet_management_service,
+)
+
+MBS_PACKAGE: str
+MOBLY_CONTROLLER_CONFIG_NAME: str
+ANDROID_DEVICE_PICK_ALL_TOKEN: str
+ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY: str
+ANDROID_DEVICE_EMPTY_CONFIG_MSG: str
+ANDROID_DEVICE_NOT_LIST_CONFIG_MSG: str
+CACHED_SYSTEM_PROPS: Incomplete
+KEY_DEVICE_REQUIRED: str
+DEFAULT_VALUE_DEVICE_REQUIRED: bool
+KEY_SKIP_LOGCAT: str
+DEFAULT_VALUE_SKIP_LOGCAT: bool
+SERVICE_NAME_LOGCAT: str
+DEFAULT_BUG_REPORT_NAME: str
+DEFAULT_TIMEOUT_BOOT_COMPLETION_SECOND: Incomplete
+TAKE_SCREENSHOT_TIMEOUT_SECOND: int
+Error = errors.Error
+DeviceError = errors.DeviceError
+SnippetError = snippet_management_service.Error
+EMULATOR_SERIAL_REGEX: Incomplete
+
+def create(configs): ...
+def destroy(ads) -> None: ...
+def get_info(ads): ...
+def parse_device_list(device_list_str, key): ...
+def list_adb_devices(): ...
+def list_adb_devices_by_usb_id(): ...
+def list_fastboot_devices(): ...
+def get_instances(serials): ...
+def get_instances_with_configs(configs): ...
+def get_all_instances(include_fastboot: bool = ...): ...
+def filter_devices(ads, func): ...
+def get_devices(ads, **kwargs): ...
+def get_device(ads, **kwargs): ...
+def take_bug_reports(
+    ads,
+    test_name: Incomplete | None = ...,
+    begin_time: Incomplete | None = ...,
+    destination: Incomplete | None = ...,
+) -> None: ...
+
+class BuildInfoConstants(enum.Enum):
+    BUILD_ID: Incomplete
+    BUILD_TYPE: Incomplete
+    BUILD_FINGERPRINT: Incomplete
+    BUILD_VERSION_CODENAME: Incomplete
+    BUILD_VERSION_INCREMENTAL: Incomplete
+    BUILD_VERSION_SDK: Incomplete
+    BUILD_PRODUCT: Incomplete
+    BUILD_CHARACTERISTICS: Incomplete
+    DEBUGGABLE: Incomplete
+    PRODUCT_NAME: Incomplete
+    HARDWARE: Incomplete
+    build_info_key: Incomplete
+    system_prop_key: Incomplete
+    def __init__(self, build_info_key, system_prop_key) -> None: ...
+
+class AndroidDevice:
+    log: Incomplete
+    adb: Incomplete
+    fastboot: Incomplete
+    services: Incomplete
+    def __init__(self, serial: str = ...) -> None: ...
+    @property
+    def adb_logcat_file_path(self): ...
+    @property
+    def device_info(self): ...
+    def add_device_info(self, name, info) -> None: ...
+    @property
+    def sl4a(self): ...
+    @property
+    def ed(self): ...
+    @property
+    def debug_tag(self): ...
+    @debug_tag.setter
+    def debug_tag(self, tag) -> None: ...
+    @property
+    def has_active_service(self): ...
+    @property
+    def log_path(self): ...
+    @log_path.setter
+    def log_path(self, new_path) -> None: ...
+    @property
+    def serial(self): ...
+    def update_serial(self, new_serial) -> None: ...
+    def handle_reboot(self) -> Generator[None, None, None]: ...
+    def handle_usb_disconnect(self) -> Generator[None, None, None]: ...
+    @property
+    def build_info(self): ...
+    @property
+    def is_bootloader(self): ...
+    @property
+    def is_adb_root(self): ...
+    @property
+    def is_rootable(self): ...
+    @property
+    def model(self): ...
+    @property
+    def is_emulator(self): ...
+    def load_config(self, config) -> None: ...
+    def root_adb(self) -> None: ...
+    def load_snippet(self, name, package) -> None: ...
+    def unload_snippet(self, name) -> None: ...
+    def generate_filename(
+        self,
+        file_type,
+        time_identifier: Incomplete | None = ...,
+        extension_name: Incomplete | None = ...,
+    ): ...
+    def take_bug_report(
+        self,
+        test_name: Incomplete | None = ...,
+        begin_time: Incomplete | None = ...,
+        timeout: int = ...,
+        destination: Incomplete | None = ...,
+    ): ...
+    def take_screenshot(self, destination, prefix: str = ...): ...
+    def run_iperf_client(self, server_host, extra_args: str = ...): ...
+    def wait_for_boot_completion(self, timeout=...) -> None: ...
+    def is_boot_completed(self): ...
+    def is_adb_detectable(self): ...
+    def reboot(self) -> None: ...
+    def __getattr__(self, name): ...
+
+class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
+    def process(self, msg, kwargs): ...
diff --git a/src/antlion/controllers/adb_lib/__init__.py b/stubs/mobly/controllers/android_device_lib/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/adb_lib/__init__.py
copy to stubs/mobly/controllers/android_device_lib/__init__.pyi
diff --git a/stubs/mobly/controllers/android_device_lib/adb.pyi b/stubs/mobly/controllers/android_device_lib/adb.pyi
new file mode 100644
index 0000000..473537d
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/adb.pyi
@@ -0,0 +1,51 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+
+ADB: str
+ADB_PORT_LOCK: Incomplete
+ADB_ROOT_RETRY_ATTMEPTS: int
+ADB_ROOT_RETRY_ATTEMPT_INTERVAL_SEC: int
+DEFAULT_INSTRUMENTATION_RUNNER: str
+DEFAULT_GETPROP_TIMEOUT_SEC: int
+DEFAULT_GETPROPS_ATTEMPTS: int
+DEFAULT_GETPROPS_RETRY_SLEEP_SEC: int
+PATTERN_ADB_CONNECT_SUCCESS: Incomplete
+
+class Error(Exception): ...
+
+class AdbError(Error):
+    cmd: Incomplete
+    stdout: Incomplete
+    stderr: Incomplete
+    ret_code: Incomplete
+    serial: Incomplete
+    def __init__(self, cmd, stdout, stderr, ret_code, serial: str = ...) -> None: ...
+
+class AdbTimeoutError(Error):
+    cmd: Incomplete
+    timeout: Incomplete
+    serial: Incomplete
+    def __init__(self, cmd, timeout, serial: str = ...) -> None: ...
+
+def is_adb_available(): ...
+def list_occupied_adb_ports(): ...
+
+class AdbProxy:
+    serial: Incomplete
+    def __init__(self, serial: str = ...) -> None: ...
+    @property
+    def current_user_id(self) -> int: ...
+    def connect(self, address) -> bytes: ...
+    def getprop(self, prop_name): ...
+    def getprops(self, prop_names): ...
+    def has_shell_command(self, command) -> bool: ...
+    def forward(self, args: Incomplete | None = ..., shell: bool = ...) -> bytes: ...
+    def instrument(
+        self,
+        package,
+        options: Incomplete | None = ...,
+        runner: Incomplete | None = ...,
+        handler: Incomplete | None = ...,
+    ) -> bytes: ...
+    def root(self) -> bytes: ...
+    def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/callback_handler.pyi b/stubs/mobly/controllers/android_device_lib/callback_handler.pyi
new file mode 100644
index 0000000..0fb9383
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/callback_handler.pyi
@@ -0,0 +1,19 @@
+from _typeshed import Incomplete
+from mobly.controllers.android_device_lib import snippet_event as snippet_event
+from mobly.snippet import errors as errors
+
+MAX_TIMEOUT: Incomplete
+DEFAULT_TIMEOUT: int
+Error = errors.CallbackHandlerBaseError
+TimeoutError = errors.CallbackHandlerTimeoutError
+
+class CallbackHandler:
+    ret_value: Incomplete
+    def __init__(
+        self, callback_id, event_client, ret_value, method_name, ad
+    ) -> None: ...
+    @property
+    def callback_id(self): ...
+    def waitAndGet(self, event_name, timeout=...): ...
+    def waitForEvent(self, event_name, predicate, timeout=...): ...
+    def getAll(self, event_name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi b/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi
new file mode 100644
index 0000000..a24f38f
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi
@@ -0,0 +1,8 @@
+from mobly.snippet import callback_handler_base as callback_handler_base
+from mobly.snippet import errors as errors
+
+TIMEOUT_ERROR_MESSAGE: str
+
+class CallbackHandlerV2(callback_handler_base.CallbackHandlerBase):
+    def callEventWaitAndGetRpc(self, callback_id, event_name, timeout_sec): ...
+    def callEventGetAllRpc(self, callback_id, event_name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/errors.pyi b/stubs/mobly/controllers/android_device_lib/errors.pyi
new file mode 100644
index 0000000..562da05
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/errors.pyi
@@ -0,0 +1,13 @@
+from _typeshed import Incomplete
+from mobly import signals as signals
+
+HIERARCHY_TOKEN: str
+
+class Error(signals.ControllerError): ...
+
+class DeviceError(Error):
+    def __init__(self, ad, msg) -> None: ...
+
+class ServiceError(DeviceError):
+    SERVICE_TYPE: Incomplete
+    def __init__(self, device, msg) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi b/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi
new file mode 100644
index 0000000..4f63d2a
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi
@@ -0,0 +1,36 @@
+from _typeshed import Incomplete
+
+class EventDispatcherError(Exception): ...
+class IllegalStateError(EventDispatcherError): ...
+class DuplicateError(EventDispatcherError): ...
+
+class EventDispatcher:
+    DEFAULT_TIMEOUT: int
+    started: bool
+    executor: Incomplete
+    poller: Incomplete
+    event_dict: Incomplete
+    handlers: Incomplete
+    lock: Incomplete
+    def __init__(self, sl4a) -> None: ...
+    def poll_events(self) -> None: ...
+    def register_handler(self, handler, event_name, args) -> None: ...
+    def start(self) -> None: ...
+    def clean_up(self) -> None: ...
+    def pop_event(self, event_name, timeout=...): ...
+    def wait_for_event(self, event_name, predicate, timeout=..., *args, **kwargs): ...
+    def pop_events(self, regex_pattern, timeout): ...
+    def get_event_q(self, event_name): ...
+    def handle_subscribed_event(self, event_obj, event_name) -> None: ...
+    def handle_event(
+        self,
+        event_handler,
+        event_name,
+        user_args,
+        event_timeout: Incomplete | None = ...,
+        cond: Incomplete | None = ...,
+        cond_timeout: Incomplete | None = ...,
+    ): ...
+    def pop_all(self, event_name): ...
+    def clear_events(self, event_name) -> None: ...
+    def clear_all_events(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/fastboot.pyi b/stubs/mobly/controllers/android_device_lib/fastboot.pyi
new file mode 100644
index 0000000..e734c1a
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/fastboot.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+
+def exe_cmd(*cmds): ...
+
+class FastbootProxy:
+    serial: Incomplete
+    fastboot_str: Incomplete
+    def __init__(self, serial: str = ...) -> None: ...
+    def args(self, *args): ...
+    def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi b/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi
new file mode 100644
index 0000000..550126d
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi
@@ -0,0 +1,36 @@
+import abc
+
+from _typeshed import Incomplete
+from mobly.controllers.android_device_lib import callback_handler as callback_handler
+from mobly.snippet import errors as errors
+
+UNKNOWN_UID: int
+Error = errors.Error
+AppStartError = errors.ServerStartError
+AppRestoreConnectionError = errors.ServerRestoreConnectionError
+ApiError = errors.ApiError
+ProtocolError = errors.ProtocolError
+
+class JsonRpcCommand:
+    INIT: str
+    CONTINUE: str
+
+class JsonRpcClientBase(abc.ABC):
+    host_port: Incomplete
+    device_port: Incomplete
+    app_name: Incomplete
+    log: Incomplete
+    uid: Incomplete
+    verbose_logging: bool
+    def __init__(self, app_name, ad) -> None: ...
+    def __del__(self) -> None: ...
+    def start_app_and_connect(self) -> None: ...
+    def stop_app(self) -> None: ...
+    def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
+    def connect(self, uid=..., cmd=...) -> None: ...
+    def disconnect(self) -> None: ...
+    def close_socket_connection(self) -> None: ...
+    def clear_host_port(self) -> None: ...
+    def disable_hidden_api_blacklist(self) -> None: ...
+    def __getattr__(self, name): ...
+    def set_snippet_client_verbose_logging(self, verbose) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi b/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi
new file mode 100644
index 0000000..6033e90
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi
@@ -0,0 +1,9 @@
+from _typeshed import Incomplete
+from mobly.controllers import android_device as android_device
+
+class Error(Exception): ...
+
+class JsonRpcShellBase:
+    def load_device(self, serial: Incomplete | None = ...) -> None: ...
+    def start_console(self) -> None: ...
+    def main(self, serial: Incomplete | None = ...) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/service_manager.pyi b/stubs/mobly/controllers/android_device_lib/service_manager.pyi
new file mode 100644
index 0000000..06aad4e
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/service_manager.pyi
@@ -0,0 +1,31 @@
+from _typeshed import Incomplete
+from mobly import expects as expects
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib.services import base_service as base_service
+
+class Error(errors.DeviceError): ...
+
+class ServiceManager:
+    def __init__(self, device) -> None: ...
+    def has_service_by_name(self, name): ...
+    @property
+    def is_any_alive(self): ...
+    def register(
+        self,
+        alias,
+        service_class,
+        configs: Incomplete | None = ...,
+        start_service: bool = ...,
+    ) -> None: ...
+    def unregister(self, alias) -> None: ...
+    def for_each(self, func) -> None: ...
+    def list_live_services(self): ...
+    def create_output_excerpts_all(self, test_info): ...
+    def unregister_all(self) -> None: ...
+    def start_all(self) -> None: ...
+    def start_services(self, service_alises) -> None: ...
+    def stop_all(self) -> None: ...
+    def pause_all(self) -> None: ...
+    def resume_all(self) -> None: ...
+    def resume_services(self, service_alises) -> None: ...
+    def __getattr__(self, name): ...
diff --git a/src/antlion/controllers/relay_lib/devices/__init__.py b/stubs/mobly/controllers/android_device_lib/services/__init__.pyi
similarity index 100%
rename from src/antlion/controllers/relay_lib/devices/__init__.py
rename to stubs/mobly/controllers/android_device_lib/services/__init__.pyi
diff --git a/stubs/mobly/controllers/android_device_lib/services/base_service.pyi b/stubs/mobly/controllers/android_device_lib/services/base_service.pyi
new file mode 100644
index 0000000..c99f0e7
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/services/base_service.pyi
@@ -0,0 +1,17 @@
+import abc
+
+from _typeshed import Incomplete
+
+class BaseService(abc.ABC):
+    def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
+    @property
+    def alias(self): ...
+    @alias.setter
+    def alias(self, alias) -> None: ...
+    @property
+    def is_alive(self) -> None: ...
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
+    def pause(self) -> None: ...
+    def resume(self) -> None: ...
+    def create_output_excerpts(self, test_info): ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/logcat.pyi b/stubs/mobly/controllers/android_device_lib/services/logcat.pyi
new file mode 100644
index 0000000..e21d22e
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/services/logcat.pyi
@@ -0,0 +1,35 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import adb as adb
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib.services import base_service as base_service
+
+CREATE_LOGCAT_FILE_TIMEOUT_SEC: int
+
+class Error(errors.ServiceError):
+    SERVICE_TYPE: str
+
+class Config:
+    clear_log: Incomplete
+    logcat_params: Incomplete
+    output_file_path: Incomplete
+    def __init__(
+        self,
+        logcat_params: Incomplete | None = ...,
+        clear_log: bool = ...,
+        output_file_path: Incomplete | None = ...,
+    ) -> None: ...
+
+class Logcat(base_service.BaseService):
+    OUTPUT_FILE_TYPE: str
+    adb_logcat_file_path: Incomplete
+    def __init__(self, android_device, configs: Incomplete | None = ...) -> None: ...
+    def create_output_excerpts(self, test_info): ...
+    @property
+    def is_alive(self): ...
+    def clear_adb_log(self) -> None: ...
+    def update_config(self, new_config) -> None: ...
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
+    def pause(self) -> None: ...
+    def resume(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi b/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi
new file mode 100644
index 0000000..1a0f295
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi
@@ -0,0 +1,13 @@
+from _typeshed import Incomplete
+from mobly.controllers.android_device_lib import sl4a_client as sl4a_client
+from mobly.controllers.android_device_lib.services import base_service as base_service
+
+class Sl4aService(base_service.BaseService):
+    def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
+    @property
+    def is_alive(self): ...
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
+    def pause(self) -> None: ...
+    def resume(self) -> None: ...
+    def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi b/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi
new file mode 100644
index 0000000..7ca5056
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi
@@ -0,0 +1,22 @@
+from _typeshed import Incomplete
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib import snippet_client_v2 as snippet_client_v2
+from mobly.controllers.android_device_lib.services import base_service as base_service
+
+MISSING_SNIPPET_CLIENT_MSG: str
+
+class Error(errors.ServiceError):
+    SERVICE_TYPE: str
+
+class SnippetManagementService(base_service.BaseService):
+    def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
+    @property
+    def is_alive(self): ...
+    def get_snippet_client(self, name): ...
+    def add_snippet_client(self, name, package) -> None: ...
+    def remove_snippet_client(self, name) -> None: ...
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
+    def pause(self) -> None: ...
+    def resume(self) -> None: ...
+    def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi b/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi
new file mode 100644
index 0000000..9b12fc2
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi
@@ -0,0 +1,16 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import event_dispatcher as event_dispatcher
+from mobly.controllers.android_device_lib import (
+    jsonrpc_client_base as jsonrpc_client_base,
+)
+
+class Sl4aClient(jsonrpc_client_base.JsonRpcClientBase):
+    ed: Incomplete
+    def __init__(self, ad) -> None: ...
+    device_port: Incomplete
+    def start_app_and_connect(self) -> None: ...
+    host_port: Incomplete
+    def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
+    def stop_app(self) -> None: ...
+    def stop_event_dispatcher(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_client.pyi b/stubs/mobly/controllers/android_device_lib/snippet_client.pyi
new file mode 100644
index 0000000..96f0a88
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/snippet_client.pyi
@@ -0,0 +1,24 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import adb as adb
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib import (
+    jsonrpc_client_base as jsonrpc_client_base,
+)
+from mobly.snippet import errors as snippet_errors
+
+AppStartPreCheckError = snippet_errors.ServerStartPreCheckError
+ProtocolVersionError = snippet_errors.ServerStartProtocolError
+
+class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
+    package: Incomplete
+    def __init__(self, package, ad) -> None: ...
+    @property
+    def is_alive(self): ...
+    @property
+    def user_id(self): ...
+    def start_app_and_connect(self) -> None: ...
+    host_port: Incomplete
+    def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
+    def stop_app(self) -> None: ...
+    def help(self, print_output: bool = ...): ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi b/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi
new file mode 100644
index 0000000..92fd966
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi
@@ -0,0 +1,41 @@
+import enum
+
+from _typeshed import Incomplete
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import adb as adb
+from mobly.controllers.android_device_lib import (
+    callback_handler_v2 as callback_handler_v2,
+)
+from mobly.snippet import client_base as client_base
+from mobly.snippet import errors as errors
+
+UNKNOWN_UID: int
+
+class ConnectionHandshakeCommand(enum.Enum):
+    INIT: str
+    CONTINUE: str
+
+class SnippetClientV2(client_base.ClientBase):
+    host_port: Incomplete
+    device_port: Incomplete
+    uid: Incomplete
+    def __init__(self, package, ad) -> None: ...
+    @property
+    def user_id(self): ...
+    @property
+    def is_alive(self): ...
+    def before_starting_server(self) -> None: ...
+    def start_server(self) -> None: ...
+    def make_connection(self) -> None: ...
+    def create_socket_connection(self) -> None: ...
+    def send_handshake_request(self, uid=..., cmd=...) -> None: ...
+    def check_server_proc_running(self) -> None: ...
+    def send_rpc_request(self, request): ...
+    def handle_callback(self, callback_id, ret_value, rpc_func_name): ...
+    def make_connection_with_forwarded_port(
+        self, host_port, device_port, uid=..., cmd=...
+    ) -> None: ...
+    def stop(self) -> None: ...
+    def close_connection(self) -> None: ...
+    def restore_server_connection(self, port: Incomplete | None = ...) -> None: ...
+    def help(self, print_output: bool = ...): ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_event.pyi b/stubs/mobly/controllers/android_device_lib/snippet_event.pyi
new file mode 100644
index 0000000..5d99106
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/snippet_event.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+
+def from_dict(event_dict): ...
+
+class SnippetEvent:
+    callback_id: Incomplete
+    name: Incomplete
+    creation_time: Incomplete
+    data: Incomplete
+    def __init__(self, callback_id, name, creation_time, data) -> None: ...
diff --git a/stubs/mobly/controllers/attenuator.pyi b/stubs/mobly/controllers/attenuator.pyi
new file mode 100644
index 0000000..e07d7b9
--- /dev/null
+++ b/stubs/mobly/controllers/attenuator.pyi
@@ -0,0 +1,24 @@
+from _typeshed import Incomplete
+
+MOBLY_CONTROLLER_CONFIG_NAME: str
+KEY_ADDRESS: str
+KEY_PORT: str
+KEY_MODEL: str
+KEY_PATHS: str
+PACKAGE_PATH_TEMPLATE: str
+
+def create(configs): ...
+def destroy(objs) -> None: ...
+
+class Error(Exception): ...
+
+class AttenuatorPath:
+    model: Incomplete
+    attenuation_device: Incomplete
+    idx: Incomplete
+    def __init__(
+        self, attenuation_device, idx: int = ..., name: Incomplete | None = ...
+    ) -> None: ...
+    def set_atten(self, value) -> None: ...
+    def get_atten(self): ...
+    def get_max_atten(self): ...
diff --git a/src/antlion/controllers/attenuator_lib/__init__.py b/stubs/mobly/controllers/attenuator_lib/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/attenuator_lib/__init__.py
copy to stubs/mobly/controllers/attenuator_lib/__init__.pyi
diff --git a/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi b/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi
new file mode 100644
index 0000000..5dfa6e4
--- /dev/null
+++ b/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi
@@ -0,0 +1,15 @@
+from _typeshed import Incomplete
+from mobly.controllers import attenuator as attenuator
+from mobly.controllers.attenuator_lib import telnet_scpi_client as telnet_scpi_client
+
+class AttenuatorDevice:
+    path_count: Incomplete
+    def __init__(self, path_count: int = ...) -> None: ...
+    @property
+    def is_open(self): ...
+    properties: Incomplete
+    max_atten: Incomplete
+    def open(self, host, port: int = ...) -> None: ...
+    def close(self) -> None: ...
+    def set_atten(self, idx, value) -> None: ...
+    def get_atten(self, idx: int = ...): ...
diff --git a/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi b/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi
new file mode 100644
index 0000000..3ebb042
--- /dev/null
+++ b/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi
@@ -0,0 +1,20 @@
+from _typeshed import Incomplete
+from mobly.controllers import attenuator as attenuator
+
+class TelnetScpiClient:
+    tx_cmd_separator: Incomplete
+    rx_cmd_separator: Incomplete
+    prompt: Incomplete
+    host: Incomplete
+    port: Incomplete
+    def __init__(
+        self,
+        tx_cmd_separator: str = ...,
+        rx_cmd_separator: str = ...,
+        prompt: str = ...,
+    ) -> None: ...
+    def open(self, host, port: int = ...) -> None: ...
+    @property
+    def is_open(self): ...
+    def close(self) -> None: ...
+    def cmd(self, cmd_str, wait_ret: bool = ...): ...
diff --git a/stubs/mobly/controllers/iperf_server.pyi b/stubs/mobly/controllers/iperf_server.pyi
new file mode 100644
index 0000000..29fd940
--- /dev/null
+++ b/stubs/mobly/controllers/iperf_server.pyi
@@ -0,0 +1,31 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+
+MOBLY_CONTROLLER_CONFIG_NAME: str
+
+def create(configs): ...
+def destroy(objs) -> None: ...
+
+class IPerfResult:
+    result: Incomplete
+    def __init__(self, result_path) -> None: ...
+    def get_json(self): ...
+    @property
+    def error(self): ...
+    @property
+    def avg_rate(self): ...
+    @property
+    def avg_receive_rate(self): ...
+    @property
+    def avg_send_rate(self): ...
+
+class IPerfServer:
+    port: Incomplete
+    log_path: Incomplete
+    iperf_str: Incomplete
+    iperf_process: Incomplete
+    log_files: Incomplete
+    started: bool
+    def __init__(self, port, log_path) -> None: ...
+    def start(self, extra_args: str = ..., tag: str = ...) -> None: ...
+    def stop(self) -> None: ...
diff --git a/stubs/mobly/controllers/sniffer.pyi b/stubs/mobly/controllers/sniffer.pyi
new file mode 100644
index 0000000..7ee9062
--- /dev/null
+++ b/stubs/mobly/controllers/sniffer.pyi
@@ -0,0 +1,36 @@
+from _typeshed import Incomplete
+
+MOBLY_CONTROLLER_CONFIG_NAME: str
+
+def create(configs): ...
+def destroy(objs) -> None: ...
+
+class SnifferError(Exception): ...
+class InvalidDataError(Exception): ...
+class ExecutionError(SnifferError): ...
+class InvalidOperationError(SnifferError): ...
+
+class Sniffer:
+    CONFIG_KEY_CHANNEL: str
+    def __init__(
+        self, interface, logger, base_configs: Incomplete | None = ...
+    ) -> None: ...
+    def get_descriptor(self) -> None: ...
+    def get_type(self) -> None: ...
+    def get_subtype(self) -> None: ...
+    def get_interface(self) -> None: ...
+    def get_capture_file(self) -> None: ...
+    def start_capture(
+        self,
+        override_configs: Incomplete | None = ...,
+        additional_args: Incomplete | None = ...,
+        duration: Incomplete | None = ...,
+        packet_count: Incomplete | None = ...,
+    ) -> None: ...
+    def stop_capture(self) -> None: ...
+    def wait_for_capture(self, timeout: Incomplete | None = ...) -> None: ...
+
+class ActiveCaptureContext:
+    def __init__(self, sniffer, timeout: Incomplete | None = ...) -> None: ...
+    def __enter__(self) -> None: ...
+    def __exit__(self, type, value, traceback) -> None: ...
diff --git a/src/antlion/controllers/sniffer_lib/__init__.py b/stubs/mobly/controllers/sniffer_lib/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/sniffer_lib/__init__.py
copy to stubs/mobly/controllers/sniffer_lib/__init__.pyi
diff --git a/src/antlion/controllers/sniffer_lib/local/__init__.py b/stubs/mobly/controllers/sniffer_lib/local/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/sniffer_lib/local/__init__.py
copy to stubs/mobly/controllers/sniffer_lib/local/__init__.pyi
diff --git a/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi b/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi
new file mode 100644
index 0000000..4e56926
--- /dev/null
+++ b/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi
@@ -0,0 +1,21 @@
+from _typeshed import Incomplete
+from mobly import logger as logger
+from mobly import utils as utils
+from mobly.controllers import sniffer as sniffer
+
+class SnifferLocalBase(sniffer.Sniffer):
+    def __init__(
+        self, interface, logger, base_configs: Incomplete | None = ...
+    ) -> None: ...
+    def get_interface(self): ...
+    def get_type(self): ...
+    def get_capture_file(self): ...
+    def start_capture(
+        self,
+        override_configs: Incomplete | None = ...,
+        additional_args: Incomplete | None = ...,
+        duration: Incomplete | None = ...,
+        packet_count: Incomplete | None = ...,
+    ): ...
+    def stop_capture(self) -> None: ...
+    def wait_for_capture(self, timeout: Incomplete | None = ...) -> None: ...
diff --git a/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi b/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi
new file mode 100644
index 0000000..2cc12b3
--- /dev/null
+++ b/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+from mobly.controllers import sniffer as sniffer
+from mobly.controllers.sniffer_lib.local import local_base as local_base
+
+class Sniffer(local_base.SnifferLocalBase):
+    def __init__(
+        self, config_path, logger, base_configs: Incomplete | None = ...
+    ) -> None: ...
+    def get_descriptor(self): ...
+    def get_subtype(self): ...
diff --git a/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi b/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi
new file mode 100644
index 0000000..2cc12b3
--- /dev/null
+++ b/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+from mobly.controllers import sniffer as sniffer
+from mobly.controllers.sniffer_lib.local import local_base as local_base
+
+class Sniffer(local_base.SnifferLocalBase):
+    def __init__(
+        self, config_path, logger, base_configs: Incomplete | None = ...
+    ) -> None: ...
+    def get_descriptor(self): ...
+    def get_subtype(self): ...
diff --git a/stubs/mobly/expects.pyi b/stubs/mobly/expects.pyi
new file mode 100644
index 0000000..e7aa36c
--- /dev/null
+++ b/stubs/mobly/expects.pyi
@@ -0,0 +1,28 @@
+from collections.abc import Generator
+
+from _typeshed import Incomplete
+from mobly import asserts as asserts
+from mobly import records as records
+from mobly import signals as signals
+
+DEFAULT_TEST_RESULT_RECORD: Incomplete
+
+class _ExpectErrorRecorder:
+    def __init__(self, record: Incomplete | None = ...) -> None: ...
+    def reset_internal_states(self, record: Incomplete | None = ...) -> None: ...
+    @property
+    def has_error(self): ...
+    @property
+    def error_count(self): ...
+    def add_error(self, error) -> None: ...
+
+def expect_true(condition, msg, extras: Incomplete | None = ...) -> None: ...
+def expect_false(condition, msg, extras: Incomplete | None = ...) -> None: ...
+def expect_equal(
+    first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def expect_no_raises(
+    message: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> Generator[None, None, None]: ...
+
+recorder: Incomplete
diff --git a/stubs/mobly/keys.pyi b/stubs/mobly/keys.pyi
new file mode 100644
index 0000000..9007329
--- /dev/null
+++ b/stubs/mobly/keys.pyi
@@ -0,0 +1,9 @@
+import enum
+
+class Config(enum.Enum):
+    key_mobly_params: str
+    key_log_path: str
+    key_testbed: str
+    key_testbed_name: str
+    key_testbed_controllers: str
+    key_testbed_test_params: str
diff --git a/stubs/mobly/logger.pyi b/stubs/mobly/logger.pyi
new file mode 100644
index 0000000..86ccf16
--- /dev/null
+++ b/stubs/mobly/logger.pyi
@@ -0,0 +1,35 @@
+import logging
+
+from _typeshed import Incomplete
+from mobly import records as records
+from mobly import utils as utils
+
+LINUX_MAX_FILENAME_LENGTH: int
+WINDOWS_MAX_FILENAME_LENGTH: int
+WINDOWS_RESERVED_CHARACTERS_REPLACEMENTS: Incomplete
+WINDOWS_RESERVED_FILENAME_REGEX: Incomplete
+WINDOWS_RESERVED_FILENAME_PREFIX: str
+log_line_format: str
+log_line_time_format: str
+log_line_timestamp_len: int
+logline_timestamp_re: Incomplete
+
+def is_valid_logline_timestamp(timestamp): ...
+def logline_timestamp_comparator(t1, t2): ...
+def epoch_to_log_line_timestamp(epoch_time, time_zone: Incomplete | None = ...): ...
+def get_log_line_timestamp(delta: Incomplete | None = ...): ...
+def get_log_file_timestamp(delta: Incomplete | None = ...): ...
+def kill_test_logger(logger) -> None: ...
+def create_latest_log_alias(actual_path, alias) -> None: ...
+def setup_test_logger(
+    log_path, prefix: Incomplete | None = ..., alias: str = ..., console_level=...
+) -> None: ...
+def sanitize_filename(filename): ...
+def normalize_log_line_timestamp(log_line_timestamp): ...
+
+class PrefixLoggerAdapter(logging.LoggerAdapter):
+    EXTRA_KEY_LOG_PREFIX: str
+    _KWARGS_TYPE: Incomplete
+    _PROCESS_RETURN_TYPE: Incomplete
+    extra: _KWARGS_TYPE
+    def process(self, msg: str, kwargs: _KWARGS_TYPE) -> _PROCESS_RETURN_TYPE: ...
diff --git a/stubs/mobly/records.pyi b/stubs/mobly/records.pyi
new file mode 100644
index 0000000..2ae6905
--- /dev/null
+++ b/stubs/mobly/records.pyi
@@ -0,0 +1,118 @@
+import enum
+
+from _typeshed import Incomplete
+from mobly import signals as signals
+from mobly import utils as utils
+
+OUTPUT_FILE_INFO_LOG: str
+OUTPUT_FILE_DEBUG_LOG: str
+OUTPUT_FILE_SUMMARY: str
+
+class Error(Exception): ...
+
+def uid(uid): ...
+
+class TestSummaryEntryType(enum.Enum):
+    TEST_NAME_LIST: str
+    RECORD: str
+    SUMMARY: str
+    CONTROLLER_INFO: str
+    USER_DATA: str
+
+class TestSummaryWriter:
+    def __init__(self, path) -> None: ...
+    def __copy__(self): ...
+    def __deepcopy__(self, *args): ...
+    def dump(self, content, entry_type) -> None: ...
+
+class TestResultEnums:
+    RECORD_NAME: str
+    RECORD_CLASS: str
+    RECORD_BEGIN_TIME: str
+    RECORD_END_TIME: str
+    RECORD_RESULT: str
+    RECORD_UID: str
+    RECORD_EXTRAS: str
+    RECORD_EXTRA_ERRORS: str
+    RECORD_DETAILS: str
+    RECORD_TERMINATION_SIGNAL_TYPE: str
+    RECORD_STACKTRACE: str
+    RECORD_SIGNATURE: str
+    RECORD_RETRY_PARENT: str
+    RECORD_POSITION: str
+    TEST_RESULT_PASS: str
+    TEST_RESULT_FAIL: str
+    TEST_RESULT_SKIP: str
+    TEST_RESULT_ERROR: str
+
+class ControllerInfoRecord:
+    KEY_TEST_CLASS: Incomplete
+    KEY_CONTROLLER_NAME: str
+    KEY_CONTROLLER_INFO: str
+    KEY_TIMESTAMP: str
+    test_class: Incomplete
+    controller_name: Incomplete
+    controller_info: Incomplete
+    timestamp: Incomplete
+    def __init__(self, test_class, controller_name, info) -> None: ...
+    def to_dict(self): ...
+
+class ExceptionRecord:
+    exception: Incomplete
+    type: Incomplete
+    stacktrace: Incomplete
+    extras: Incomplete
+    position: Incomplete
+    is_test_signal: Incomplete
+    def __init__(self, e, position: Incomplete | None = ...) -> None: ...
+    def to_dict(self): ...
+    def __deepcopy__(self, memo): ...
+
+class TestResultRecord:
+    test_name: Incomplete
+    test_class: Incomplete
+    begin_time: Incomplete
+    end_time: Incomplete
+    uid: Incomplete
+    signature: Incomplete
+    retry_parent: Incomplete
+    termination_signal: Incomplete
+    extra_errors: Incomplete
+    result: Incomplete
+    def __init__(self, t_name, t_class: Incomplete | None = ...) -> None: ...
+    @property
+    def details(self): ...
+    @property
+    def termination_signal_type(self): ...
+    @property
+    def stacktrace(self): ...
+    @property
+    def extras(self): ...
+    def test_begin(self) -> None: ...
+    def update_record(self) -> None: ...
+    def test_pass(self, e: Incomplete | None = ...) -> None: ...
+    def test_fail(self, e: Incomplete | None = ...) -> None: ...
+    def test_skip(self, e: Incomplete | None = ...) -> None: ...
+    def test_error(self, e: Incomplete | None = ...) -> None: ...
+    def add_error(self, position, e) -> None: ...
+    def to_dict(self): ...
+
+class TestResult:
+    requested: Incomplete
+    failed: Incomplete
+    executed: Incomplete
+    passed: Incomplete
+    skipped: Incomplete
+    error: Incomplete
+    controller_info: Incomplete
+    def __init__(self) -> None: ...
+    def __add__(self, r): ...
+    def add_record(self, record) -> None: ...
+    def add_controller_info_record(self, controller_info_record) -> None: ...
+    def add_class_error(self, test_record) -> None: ...
+    def is_test_executed(self, test_name): ...
+    @property
+    def is_all_pass(self): ...
+    def requested_test_names_dict(self): ...
+    def summary_str(self): ...
+    def summary_dict(self): ...
diff --git a/stubs/mobly/runtime_test_info.pyi b/stubs/mobly/runtime_test_info.pyi
new file mode 100644
index 0000000..9dd1733
--- /dev/null
+++ b/stubs/mobly/runtime_test_info.pyi
@@ -0,0 +1,12 @@
+from mobly import utils as utils
+
+class RuntimeTestInfo:
+    def __init__(self, test_name, log_path, record) -> None: ...
+    @property
+    def name(self): ...
+    @property
+    def signature(self): ...
+    @property
+    def record(self): ...
+    @property
+    def output_path(self): ...
diff --git a/stubs/mobly/signals.pyi b/stubs/mobly/signals.pyi
new file mode 100644
index 0000000..f4fbe53
--- /dev/null
+++ b/stubs/mobly/signals.pyi
@@ -0,0 +1,17 @@
+from _typeshed import Incomplete
+
+class TestSignalError(Exception): ...
+
+class TestSignal(Exception):
+    details: Incomplete
+    extras: Incomplete
+    def __init__(self, details, extras: Incomplete | None = ...) -> None: ...
+
+class TestError(TestSignal): ...
+class TestFailure(TestSignal): ...
+class TestPass(TestSignal): ...
+class TestSkip(TestSignal): ...
+class TestAbortSignal(TestSignal): ...
+class TestAbortClass(TestAbortSignal): ...
+class TestAbortAll(TestAbortSignal): ...
+class ControllerError(Exception): ...
diff --git a/src/antlion/event/__init__.py b/stubs/mobly/snippet/__init__.pyi
similarity index 100%
copy from src/antlion/event/__init__.py
copy to stubs/mobly/snippet/__init__.pyi
diff --git a/stubs/mobly/snippet/callback_event.pyi b/stubs/mobly/snippet/callback_event.pyi
new file mode 100644
index 0000000..f3dfbb9
--- /dev/null
+++ b/stubs/mobly/snippet/callback_event.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+
+def from_dict(event_dict): ...
+
+class CallbackEvent:
+    callback_id: Incomplete
+    name: Incomplete
+    creation_time: Incomplete
+    data: Incomplete
+    def __init__(self, callback_id, name, creation_time, data) -> None: ...
diff --git a/stubs/mobly/snippet/callback_handler_base.pyi b/stubs/mobly/snippet/callback_handler_base.pyi
new file mode 100644
index 0000000..11502d0
--- /dev/null
+++ b/stubs/mobly/snippet/callback_handler_base.pyi
@@ -0,0 +1,31 @@
+import abc
+
+from _typeshed import Incomplete
+from mobly.snippet import callback_event as callback_event
+from mobly.snippet import errors as errors
+
+class CallbackHandlerBase(abc.ABC, metaclass=abc.ABCMeta):
+    ret_value: Incomplete
+    def __init__(
+        self,
+        callback_id,
+        event_client,
+        ret_value,
+        method_name,
+        device,
+        rpc_max_timeout_sec,
+        default_timeout_sec: int = ...,
+    ) -> None: ...
+    @property
+    def rpc_max_timeout_sec(self): ...
+    @property
+    def default_timeout_sec(self): ...
+    @property
+    def callback_id(self): ...
+    @abc.abstractmethod
+    def callEventWaitAndGetRpc(self, callback_id, event_name, timeout_sec): ...
+    @abc.abstractmethod
+    def callEventGetAllRpc(self, callback_id, event_name): ...
+    def waitAndGet(self, event_name, timeout: Incomplete | None = ...): ...
+    def waitForEvent(self, event_name, predicate, timeout: Incomplete | None = ...): ...
+    def getAll(self, event_name): ...
diff --git a/stubs/mobly/snippet/client_base.pyi b/stubs/mobly/snippet/client_base.pyi
new file mode 100644
index 0000000..92d4b9c
--- /dev/null
+++ b/stubs/mobly/snippet/client_base.pyi
@@ -0,0 +1,34 @@
+import abc
+
+from _typeshed import Incomplete
+from mobly.snippet import errors as errors
+
+RPC_RESPONSE_REQUIRED_FIELDS: Incomplete
+
+class ClientBase(abc.ABC, metaclass=abc.ABCMeta):
+    package: Incomplete
+    log: Incomplete
+    verbose_logging: bool
+    def __init__(self, package, device) -> None: ...
+    def __del__(self) -> None: ...
+    def initialize(self) -> None: ...
+    @abc.abstractmethod
+    def before_starting_server(self): ...
+    @abc.abstractmethod
+    def start_server(self): ...
+    @abc.abstractmethod
+    def make_connection(self): ...
+    def __getattr__(self, name): ...
+    def set_snippet_client_verbose_logging(self, verbose) -> None: ...
+    @abc.abstractmethod
+    def restore_server_connection(self, port: Incomplete | None = ...): ...
+    @abc.abstractmethod
+    def check_server_proc_running(self): ...
+    @abc.abstractmethod
+    def send_rpc_request(self, request): ...
+    @abc.abstractmethod
+    def handle_callback(self, callback_id, ret_value, rpc_func_name): ...
+    @abc.abstractmethod
+    def stop(self): ...
+    @abc.abstractmethod
+    def close_connection(self): ...
diff --git a/stubs/mobly/snippet/errors.pyi b/stubs/mobly/snippet/errors.pyi
new file mode 100644
index 0000000..2c2ac2a
--- /dev/null
+++ b/stubs/mobly/snippet/errors.pyi
@@ -0,0 +1,18 @@
+from mobly.controllers.android_device_lib import errors as errors
+
+class Error(errors.DeviceError): ...
+class ServerRestoreConnectionError(Error): ...
+class ServerStartError(Error): ...
+class ServerStartProtocolError(ServerStartError): ...
+class ServerStartPreCheckError(Error): ...
+class ApiError(Error): ...
+
+class ProtocolError(Error):
+    NO_RESPONSE_FROM_HANDSHAKE: str
+    NO_RESPONSE_FROM_SERVER: str
+    MISMATCHED_API_ID: str
+    RESPONSE_MISSING_FIELD: str
+
+class ServerDiedError(Error): ...
+class CallbackHandlerBaseError(errors.DeviceError): ...
+class CallbackHandlerTimeoutError(Error): ...
diff --git a/stubs/mobly/suite_runner.pyi b/stubs/mobly/suite_runner.pyi
new file mode 100644
index 0000000..415ed32
--- /dev/null
+++ b/stubs/mobly/suite_runner.pyi
@@ -0,0 +1,12 @@
+from _typeshed import Incomplete
+from mobly import base_suite as base_suite
+from mobly import base_test as base_test
+from mobly import config_parser as config_parser
+from mobly import signals as signals
+from mobly import test_runner as test_runner
+
+class Error(Exception): ...
+
+def run_suite_class(argv: Incomplete | None = ...) -> None: ...
+def run_suite(test_classes, argv: Incomplete | None = ...) -> None: ...
+def compute_selected_tests(test_classes, selected_tests): ...
diff --git a/stubs/mobly/test_runner.pyi b/stubs/mobly/test_runner.pyi
new file mode 100644
index 0000000..f2aee14
--- /dev/null
+++ b/stubs/mobly/test_runner.pyi
@@ -0,0 +1,53 @@
+from collections.abc import Generator
+
+from _typeshed import Incomplete
+from mobly import base_test as base_test
+from mobly import config_parser as config_parser
+from mobly import logger as logger
+from mobly import records as records
+from mobly import signals as signals
+from mobly import utils as utils
+
+class Error(Exception): ...
+
+def main(argv: Incomplete | None = ...) -> None: ...
+def parse_mobly_cli_args(argv): ...
+
+class TestRunner:
+    class _TestRunInfo:
+        config: Incomplete
+        test_class: Incomplete
+        test_class_name_suffix: Incomplete
+        tests: Incomplete
+        def __init__(
+            self,
+            config,
+            test_class,
+            tests: Incomplete | None = ...,
+            test_class_name_suffix: Incomplete | None = ...,
+        ) -> None: ...
+
+    class _TestRunMetaData:
+        root_output_path: Incomplete
+        def __init__(self, log_dir, testbed_name) -> None: ...
+        def generate_test_run_log_path(self): ...
+        def set_start_point(self) -> None: ...
+        def set_end_point(self) -> None: ...
+        @property
+        def run_id(self): ...
+        @property
+        def time_elapsed_sec(self): ...
+
+    results: Incomplete
+    def __init__(self, log_dir, testbed_name) -> None: ...
+    def mobly_logger(
+        self, alias: str = ..., console_level=...
+    ) -> Generator[Incomplete, None, None]: ...
+    def add_test_class(
+        self,
+        config,
+        test_class,
+        tests: Incomplete | None = ...,
+        name_suffix: Incomplete | None = ...,
+    ) -> None: ...
+    def run(self) -> None: ...
diff --git a/stubs/mobly/utils.pyi b/stubs/mobly/utils.pyi
new file mode 100644
index 0000000..7024b47
--- /dev/null
+++ b/stubs/mobly/utils.pyi
@@ -0,0 +1,43 @@
+from _typeshed import Incomplete
+
+MAX_FILENAME_LEN: int
+MAX_PORT_ALLOCATION_RETRY: int
+ascii_letters_and_digits: Incomplete
+valid_filename_chars: Incomplete
+GMT_to_olson: Incomplete
+
+class Error(Exception): ...
+
+def abs_path(path): ...
+def create_dir(path) -> None: ...
+def create_alias(target_path, alias_path) -> None: ...
+def get_current_epoch_time(): ...
+def get_current_human_time(): ...
+def epoch_to_human_time(epoch_time): ...
+def get_timezone_olson_id(): ...
+def find_files(paths, file_predicate): ...
+def load_file_to_base64_str(f_path): ...
+def find_field(item_list, cond, comparator, target_field): ...
+def rand_ascii_str(length): ...
+def concurrent_exec(
+    func, param_list, max_workers: int = ..., raise_on_exception: bool = ...
+): ...
+def run_command(
+    cmd,
+    stdout=...,
+    stderr=...,
+    shell=...,
+    timeout=...,
+    cwd=...,
+    env=...,
+    universal_newlines: bool = ...,
+) -> tuple[int, bytes, bytes] | tuple[int, str, str]: ...
+def start_standing_subprocess(cmd, shell: bool = ..., env: Incomplete | None = ...): ...
+def stop_standing_subprocess(proc) -> None: ...
+def wait_for_standing_subprocess(proc, timeout: Incomplete | None = ...) -> None: ...
+def get_available_host_port(): ...
+def grep(regex, output): ...
+def cli_cmd_to_string(args): ...
+def get_settable_properties(cls): ...
+def find_subclasses_in_module(base_classes, module): ...
+def find_subclass_in_module(base_class, module): ...
diff --git a/tests/BUILD.gn b/tests/BUILD.gn
new file mode 100644
index 0000000..072e58b
--- /dev/null
+++ b/tests/BUILD.gn
@@ -0,0 +1,31 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_host, "antlion tests only supported for host testing")
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    "dhcp:e2e_tests",
+    "examples:e2e_tests",
+    "wlan:e2e_tests",
+    "wlan_policy:e2e_tests",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    "examples:e2e_tests_quick",
+    "wlan:e2e_tests_quick",
+    "wlan_policy:e2e_tests_quick",
+  ]
+}
+
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    "wlan:e2e_tests_manual",
+  ]
+}
diff --git a/tests/dhcp/BUILD.gn b/tests/dhcp/BUILD.gn
new file mode 100644
index 0000000..c3acdd3
--- /dev/null
+++ b/tests/dhcp/BUILD.gn
@@ -0,0 +1,38 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("dhcpv4_duplicate_address_test") {
+  main_source = "Dhcpv4DuplicateAddressTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("dhcpv4_interop_basic_test") {
+  main_source = "Dhcpv4InteropBasicTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("dhcpv4_interop_combinatorial_options_test") {
+  main_source = "Dhcpv4InteropCombinatorialOptionsTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("dhcpv4_interop_fixture_test") {
+  main_source = "Dhcpv4InteropFixtureTest.py"
+  environments = display_ap_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":dhcpv4_duplicate_address_test($host_toolchain)",
+    ":dhcpv4_interop_basic_test($host_toolchain)",
+    ":dhcpv4_interop_combinatorial_options_test($host_toolchain)",
+    ":dhcpv4_interop_fixture_test($host_toolchain)",
+  ]
+}
diff --git a/tests/dhcp/Dhcpv4DuplicateAddressTest.py b/tests/dhcp/Dhcpv4DuplicateAddressTest.py
new file mode 100644
index 0000000..f564011
--- /dev/null
+++ b/tests/dhcp/Dhcpv4DuplicateAddressTest.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import re
+from ipaddress import IPv4Address
+
+from mobly import asserts, signals, test_runner
+
+from antlion.controllers.ap_lib import dhcp_config
+from antlion.controllers.utils_lib.commands import ip
+from antlion.test_utils.dhcp import base_test
+
+
+class Dhcpv4DuplicateAddressTest(base_test.Dhcpv4InteropFixture):
+    def setup_test(self) -> None:
+        super().setup_test()
+        self.extra_addresses: list[IPv4Address] = []
+        self.ap_params = self.setup_ap()
+        self.ap_ip_cmd = ip.LinuxIpCommand(self.access_point.ssh)
+
+    def teardown_test(self) -> None:
+        super().teardown_test()
+        for ip in self.extra_addresses:
+            self.ap_ip_cmd.remove_ipv4_address(self.ap_params.id, ip)
+
+    def test_duplicate_address_assignment(self) -> None:
+        """It's possible for a DHCP server to assign an address that already exists on the network.
+        DHCP clients are expected to perform a "gratuitous ARP" of the to-be-assigned address, and
+        refuse to assign that address. Clients should also recover by asking for a different
+        address.
+        """
+        # Modify subnet to hold fewer addresses.
+        # A '/29' has 8 addresses (6 usable excluding router / broadcast)
+        subnet = next(self.ap_params.network.subnets(new_prefix=29))
+        subnet_conf = dhcp_config.Subnet(
+            subnet=subnet,
+            router=self.ap_params.ip,
+            # When the DHCP server is considering dynamically allocating an IP address to a client,
+            # it first sends an ICMP Echo request (a ping) to the address being assigned. It waits
+            # for a second, and if no ICMP Echo response has been heard, it assigns the address.
+            # If a response is heard, the lease is abandoned, and the server does not respond to
+            # the client.
+            # The ping-check configuration parameter can be used to control checking - if its value
+            # is false, no ping check is done.
+            additional_parameters={"ping-check": "false"},
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+
+        # Add each of the usable IPs as an alias for the router's interface, such that the router
+        # will respond to any pings on it.
+        for ip in subnet.hosts():
+            self.ap_ip_cmd.add_ipv4_address(
+                self.ap_params.id, ipaddress.IPv4Interface(f"{ip}/{ip.max_prefixlen}")
+            )
+            # Ensure we remove the address in self.teardown_test() even if the test fails
+            self.extra_addresses.append(ip)
+
+        self.connect(ap_params=self.ap_params)
+        with asserts.assert_raises(ConnectionError):
+            self.get_device_ipv4_addr()
+
+        # Per spec, the flow should be:
+        # Discover -> Offer -> Request -> Ack -> client optionally performs DAD
+        dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestError("DHCP logs not found; was the DHCP server started?")
+
+        for expected_message in [
+            r"DHCPDISCOVER from \S+",
+            r"DHCPOFFER on [0-9.]+ to \S+",
+            r"DHCPREQUEST for [0-9.]+",
+            r"DHCPACK on [0-9.]+",
+            r"DHCPDECLINE of [0-9.]+ from \S+ via .*: abandoned",
+            r"Abandoning IP address [0-9.]+: declined",
+        ]:
+            asserts.assert_true(
+                re.search(expected_message, dhcp_logs),
+                f"Did not find expected message ({expected_message}) in dhcp logs: {dhcp_logs}"
+                + "\n",
+            )
+
+        # Remove each of the IP aliases.
+        # Note: this also removes the router's address (e.g. 192.168.1.1), so pinging the
+        # router after this will not work.
+        while self.extra_addresses:
+            self.ap_ip_cmd.remove_ipv4_address(
+                self.ap_params.id, self.extra_addresses.pop()
+            )
+
+        # Now, we should get an address successfully
+        ip = self.get_device_ipv4_addr()
+        dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestError("DHCP logs not found; was the DHCP server started?")
+
+        expected_string = f"DHCPREQUEST for {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 1,
+            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+        expected_string = f"DHCPACK on {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 1,
+            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/dhcp/Dhcpv4InteropBasicTest.py b/tests/dhcp/Dhcpv4InteropBasicTest.py
new file mode 100644
index 0000000..7f3da59
--- /dev/null
+++ b/tests/dhcp/Dhcpv4InteropBasicTest.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import time
+
+from mobly import asserts, signals, test_runner
+
+from antlion.controllers.ap_lib import dhcp_config
+from antlion.test_utils.dhcp import base_test
+
+
+class Dhcpv4InteropBasicTest(base_test.Dhcpv4InteropFixture):
+    """DhcpV4 tests which validate basic DHCP client/server interactions."""
+
+    def test_basic_dhcp_assignment(self) -> None:
+        self.run_test_case_expect_dhcp_success(
+            dhcp_options={},
+            dhcp_parameters={},
+        )
+
+    def test_pool_allows_unknown_clients(self) -> None:
+        self.run_test_case_expect_dhcp_success(
+            dhcp_options={},
+            dhcp_parameters={"allow": "unknown-clients"},
+        )
+
+    def test_pool_disallows_unknown_clients(self) -> None:
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params.network,
+            router=ap_params.ip,
+            additional_parameters={"deny": "unknown-clients"},
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+
+        self.connect(ap_params=ap_params)
+        with asserts.assert_raises(ConnectionError):
+            self.get_device_ipv4_addr()
+
+        dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestError("DHCP logs not found; was the DHCP server started?")
+
+        asserts.assert_true(
+            re.search(r"DHCPDISCOVER from .*no free leases", dhcp_logs),
+            "Did not find expected message in dhcp logs: " + dhcp_logs + "\n",
+        )
+
+    def test_lease_renewal(self) -> None:
+        """Validates that a client renews their DHCP lease."""
+        LEASE_TIME = 30
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(subnet=ap_params.network, router=ap_params.ip)
+        dhcp_conf = dhcp_config.DhcpConfig(
+            subnets=[subnet_conf],
+            default_lease_time=LEASE_TIME,
+            max_lease_time=LEASE_TIME,
+        )
+        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+        self.connect(ap_params=ap_params)
+        ip = self.get_device_ipv4_addr()
+
+        SLEEP_TIME = LEASE_TIME + 3
+        self.log.info(f"Sleeping {SLEEP_TIME}s to await DHCP renewal")
+        time.sleep(SLEEP_TIME)
+
+        dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestError("DHCP logs not found; was the DHCP server started?")
+
+        # Fuchsia renews at LEASE_TIME / 2, so there should be at least 2 DHCPREQUESTs in logs.
+        # The log lines look like:
+        # INFO dhcpd[17385]: DHCPREQUEST for 192.168.9.2 from f8:0f:f9:3d:ce:d1 via wlan1
+        # INFO dhcpd[17385]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
+        expected_string = f"DHCPREQUEST for {ip}"
+        asserts.assert_true(
+            dhcp_logs.count(expected_string) >= 2,
+            f'Not enough DHCP renewals ("{expected_string}") in logs: '
+            + dhcp_logs
+            + "\n",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py b/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
new file mode 100644
index 0000000..ce9af9d
--- /dev/null
+++ b/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import random
+from dataclasses import dataclass
+
+from mobly import asserts, test_runner
+
+from antlion.test_utils.dhcp import base_test
+
+OPT_NUM_DOMAIN_SEARCH = 119
+OPT_NUM_DOMAIN_NAME = 15
+
+
+@dataclass
+class Test:
+    name: str
+    dhcp_options: dict[str, int | str]
+    dhcp_parameters: dict[str, str]
+
+
+class Dhcpv4InteropCombinatorialOptionsTest(base_test.Dhcpv4InteropFixture):
+    """DhcpV4 tests which validate combinations of DHCP options."""
+
+    def setup_generated_tests(self) -> None:
+        def test_logic(t: Test) -> None:
+            self.run_test_case_expect_dhcp_success(t.dhcp_parameters, t.dhcp_options)
+
+        def name_func(t: Test) -> str:
+            return f"test_{t.name}"
+
+        self.generate_tests(
+            test_logic=test_logic,
+            name_func=name_func,
+            arg_sets=[
+                (t,)
+                for t in [
+                    Test(
+                        name="domain_name_valid",
+                        dhcp_options={
+                            "domain-name": '"example.test"',
+                            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME,
+                        },
+                        dhcp_parameters={},
+                    ),
+                    Test(
+                        name="domain_name_invalid",
+                        dhcp_options={
+                            "domain-name": '"example.invalid"',
+                            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME,
+                        },
+                        dhcp_parameters={},
+                    ),
+                    Test(
+                        name="domain_search_valid",
+                        dhcp_options={
+                            "domain-name": '"example.test"',
+                            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
+                        },
+                        dhcp_parameters={},
+                    ),
+                    Test(
+                        name="domain_search_invalid",
+                        dhcp_options={
+                            "domain-name": '"example.invalid"',
+                            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
+                        },
+                        dhcp_parameters={},
+                    ),
+                    Test(
+                        name="max_sized_message",
+                        dhcp_options=self._generate_max_sized_message_dhcp_options(),
+                        dhcp_parameters={},
+                    ),
+                ]
+            ],
+        )
+
+    def _generate_max_sized_message_dhcp_options(self) -> dict[str, int | str]:
+        """Generates the DHCP options for max sized message test.
+
+        The RFC limits DHCP payloads to 576 bytes unless the client signals it
+        can handle larger payloads, which it does by sending DHCP option 57,
+        "Maximum DHCP Message Size". Despite being able to accept larger
+        payloads, clients typically don't advertise this. The test verifies that
+        the client accepts a large message split across multiple ethernet
+        frames. The test is created by sending many bytes of options through the
+        domain-name-servers option, which is of unbounded length (though is
+        compressed per RFC1035 section 4.1.4).
+
+        Returns:
+            A dict of DHCP options.
+        """
+        typical_ethernet_mtu = 1500
+
+        long_dns_setting = ", ".join(
+            f'"ns{num}.example"'
+            for num in random.sample(range(100_000, 1_000_000), 250)
+        )
+        # RFC1035 compression means any shared suffix ('.example' in this case)
+        # will be deduplicated. Calculate approximate length by removing that
+        # suffix.
+        long_dns_setting_len = len(
+            long_dns_setting.replace(", ", "")
+            .replace('"', "")
+            .replace(".example", "")
+            .encode("utf-8")
+        )
+        asserts.assert_true(
+            long_dns_setting_len > typical_ethernet_mtu,
+            "Expected to generate message greater than ethernet mtu",
+        )
+
+        return {
+            "dhcp-max-message-size": long_dns_setting_len * 2,
+            "domain-search": long_dns_setting,
+            "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
+        }
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/dhcp/Dhcpv4InteropFixtureTest.py b/tests/dhcp/Dhcpv4InteropFixtureTest.py
new file mode 100644
index 0000000..7303052
--- /dev/null
+++ b/tests/dhcp/Dhcpv4InteropFixtureTest.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mobly import asserts, test_runner
+
+from antlion.controllers.ap_lib import dhcp_config
+from antlion.test_utils.dhcp import base_test
+
+
+class Dhcpv4InteropFixtureTest(base_test.Dhcpv4InteropFixture):
+    """Tests which validate the behavior of the Dhcpv4InteropFixture.
+
+    In theory, these are more similar to unit tests than ACTS tests, but
+    since they interact with hardware (specifically, the AP), we have to
+    write and run them like the rest of the ACTS tests."""
+
+    def test_invalid_options_not_accepted(self) -> None:
+        """Ensures the DHCP server doesn't accept invalid options"""
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params.network,
+            router=ap_params.ip,
+            additional_options={"foo": "bar"},
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+        with asserts.assert_raises_regex(Exception, r"failed to start"):
+            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+
+    def test_invalid_parameters_not_accepted(self) -> None:
+        """Ensures the DHCP server doesn't accept invalid parameters"""
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params.network,
+            router=ap_params.ip,
+            additional_parameters={"foo": "bar"},
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+        with asserts.assert_raises_regex(Exception, r"failed to start"):
+            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+
+    def test_no_dhcp_server_started(self) -> None:
+        """Validates that the test fixture does not start a DHCP server."""
+        ap_params = self.setup_ap()
+        self.connect(ap_params=ap_params)
+        with asserts.assert_raises(ConnectionError):
+            self.get_device_ipv4_addr()
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/examples/BUILD.gn b/tests/examples/BUILD.gn
new file mode 100644
index 0000000..066d515
--- /dev/null
+++ b/tests/examples/BUILD.gn
@@ -0,0 +1,27 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("sl4f_sanity_test") {
+  main_source = "Sl4fSanityTest.py"
+  environments = display_envs + [ qemu_env ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    ":sl4f_sanity_test($host_toolchain)",
+  ]
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":sl4f_sanity_test($host_toolchain)",
+  ]
+}
diff --git a/tests/examples/Sl4fSanityTest.py b/tests/examples/Sl4fSanityTest.py
new file mode 100644
index 0000000..4056e65
--- /dev/null
+++ b/tests/examples/Sl4fSanityTest.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Test to verify SL4F is running on a Fuchsia device and can communicate with
+antlion successfully.
+"""
+
+import logging
+
+from mobly import asserts, test_runner
+
+from antlion import base_test
+from antlion.controllers import fuchsia_device
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+
+
+class Sl4fSanityTest(base_test.AntlionBaseTest):
+    def setup_class(self):
+        self.log = logging.getLogger()
+        self.fuchsia_devices: list[FuchsiaDevice] = self.register_controller(
+            fuchsia_device
+        )
+
+        asserts.abort_class_if(
+            len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
+        )
+
+    def test_example(self):
+        for fuchsia_device in self.fuchsia_devices:
+            res = fuchsia_device.sl4f.netstack_lib.netstackListInterfaces()
+            self.log.info(res)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/flash/FlashTest.py b/tests/flash/FlashTest.py
new file mode 100644
index 0000000..f1263b8
--- /dev/null
+++ b/tests/flash/FlashTest.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Script for to flash Fuchsia devices and reports the DUT's version of Fuchsia in
+the Sponge test result properties. Uses the built in flashing tool for
+fuchsia_devices.
+"""
+
+import logging
+
+from mobly import asserts, signals, test_runner
+
+from antlion import base_test
+from antlion.controllers import fuchsia_device, pdu
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.pdu import PduDevice
+from antlion.utils import get_device
+
+MAX_FLASH_ATTEMPTS = 3
+
+
+class FlashTest(base_test.AntlionBaseTest):
+    def setup_class(self):
+        self.log = logging.getLogger()
+        self.fuchsia_devices: list[FuchsiaDevice] = self.register_controller(
+            fuchsia_device
+        )
+        self.pdu_devices: list[PduDevice] = self.register_controller(pdu)
+        self.failed_to_get_device_info = False
+
+    def teardown_class(self):
+        # Verify that FlashTest successfully reported the DUT version. This is
+        # working around a flaw in ACTS where signals.TestAbortAll does not
+        # report any errors.
+        #
+        # TODO(http://b/253515812): This has been fixed in Mobly already. Remove
+        # teardown_class and change "TestError" to "abort_all" in
+        # test_flash_devices once we move to Mobly.
+        if self.failed_to_get_device_info:
+            asserts.abort_all("Failed to get DUT device information")
+
+        return super().teardown_class()
+
+    def test_flash_devices(self) -> None:
+        """Flashes a Fuchsia device for testing.
+
+        This method calls the fuchsia_device reboot() with 'flash' argument.
+        This kicks off a flash, not pave, of the fuchsia device. It also soft
+        reboots the device. On error it will attempt to reflash up to
+        MAX_FLASH_ATTEMPTS hard rebooting inbetween each attempt.
+        """
+        for device in self.fuchsia_devices:
+            flash_counter = 0
+            while True:
+                try:
+                    device.reboot(
+                        reboot_type="flash", use_ssh=True, unreachable_timeout=120
+                    )
+                    self.log.info(f"{device.orig_ip} has been flashed.")
+                    break
+                except Exception as err:
+                    self.log.error(
+                        f"Failed to flash {device.orig_ip} with error:\n{err}"
+                    )
+
+                    if not device.device_pdu_config:
+                        asserts.abort_all(
+                            f"Failed to flash {device.orig_ip} and no PDU"
+                            "available for hard reboot"
+                        )
+
+                    flash_counter = flash_counter + 1
+                    if flash_counter == MAX_FLASH_ATTEMPTS:
+                        asserts.abort_all(
+                            f"Failed to flash {device.orig_ip} after"
+                            f"{MAX_FLASH_ATTEMPTS} attempts"
+                        )
+
+                    self.log.info(
+                        f"Hard rebooting {device.orig_ip} and retrying flash."
+                    )
+                    device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices)
+
+        # Report the new Fuchsia version
+        try:
+            dut = get_device(self.fuchsia_devices, "DUT")
+            version = dut.version()
+            device_name = dut.device_name()
+            product_name = dut.product_name()
+
+            self.record_data(
+                {
+                    "sponge_properties": {
+                        "DUT_VERSION": version,
+                        "DUT_NAME": device_name,
+                        "DUT_PRODUCT": product_name,
+                    },
+                }
+            )
+
+            self.log.info(f"DUT version: {version}")
+            self.log.info(f"DUT name: {device_name}")
+            self.log.info(f"DUT product: {product_name}")
+        except Exception as e:
+            self.failed_to_get_device_info = True
+            raise signals.TestError(f"Failed to get DUT device information: {e}") from e
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/logging/FuchsiaLoggingTest.py b/tests/logging/FuchsiaLoggingTest.py
new file mode 100644
index 0000000..5383b77
--- /dev/null
+++ b/tests/logging/FuchsiaLoggingTest.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from mobly import signals, test_runner
+
+from antlion import base_test, controllers
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+
+MESSAGE = "Logging Test"
+
+
+class FuchsiaLoggingTest(base_test.AntlionBaseTest):
+    def setup_class(self):
+        fuchsia_devices: list[FuchsiaDevice] | None = self.register_controller(
+            controllers.fuchsia_device
+        )
+        if fuchsia_devices is None or len(fuchsia_devices) == 0:
+            raise signals.TestAbortClass("Requires at least one Fuchsia device")
+        self.fuchsia_device = fuchsia_devices[0]
+
+    def test_log_err(self):
+        result = self.fuchsia_device.sl4f.logging_lib.logE(MESSAGE)
+        if result.get("error") is None:
+            signals.TestPass(result.get("result"))
+        else:
+            signals.TestFailure(result.get("error"))
+
+    def test_log_info(self):
+        result = self.fuchsia_device.sl4f.logging_lib.logI(MESSAGE)
+        if result.get("error") is None:
+            signals.TestPass(result.get("result"))
+        else:
+            signals.TestFailure(result.get("error"))
+
+    def test_log_warn(self):
+        result = self.fuchsia_device.sl4f.logging_lib.logW(MESSAGE)
+        if result.get("error") is None:
+            signals.TestPass(result.get("result"))
+        else:
+            signals.TestFailure(result.get("error"))
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/netstack/NetstackIfaceTest.py b/tests/netstack/NetstackIfaceTest.py
new file mode 100644
index 0000000..9d423ce
--- /dev/null
+++ b/tests/netstack/NetstackIfaceTest.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from mobly import signals, test_runner
+
+from antlion import base_test, controllers
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+
+
+class NetstackIfaceTest(base_test.AntlionBaseTest):
+    default_timeout = 10
+    active_scan_callback_list = []
+    active_adv_callback_list = []
+    droid = None
+
+    def setup_class(self):
+        self.log = logging.getLogger()
+        fuchsia_devices: list[FuchsiaDevice] | None = self.register_controller(
+            controllers.fuchsia_device
+        )
+        if fuchsia_devices is None or len(fuchsia_devices) == 0:
+            raise signals.TestAbortClass("Requires at least one Fuchsia device")
+        self.fuchsia_device = fuchsia_devices[0]
+
+    def _enable_all_interfaces(self):
+        interfaces = self.fuchsia_device.sl4f.netstack_lib.netstackListInterfaces()
+        for item in interfaces.get("result"):
+            identifier = item.get("id")
+            self.fuchsia_device.sl4f.netstack_lib.enableInterface(identifier)
+
+    def setup_test(self):
+        # Always make sure all interfaces listed are in an up state.
+        self._enable_all_interfaces()
+
+    def teardown_test(self):
+        # Always make sure all interfaces listed are in an up state.
+        self._enable_all_interfaces()
+
+    def test_list_interfaces(self):
+        """Test listing all interfaces.
+
+        Steps:
+        1. Call ListInterfaces FIDL api.
+        2. Verify there is at least one interface returned.
+
+        Expected Result:
+        There were no errors in retrieving the list of interfaces.
+        There was at least one interface in the list.
+
+        Returns:
+          signals.TestPass if no errors
+          signals.TestFailure if there are any errors during the test.
+
+        TAGS: Netstack
+        Priority: 1
+        """
+        interfaces = self.fuchsia_device.sl4f.netstack_lib.netstackListInterfaces()
+        if interfaces.get("error") is not None:
+            raise signals.TestFailure(f"Failed with {interfaces.get('error')}")
+        if len(interfaces.get("result")) < 1:
+            raise signals.TestFailure("No interfaces found.")
+        self.log.info(f"Interfaces found: {interfaces.get('result')}")
+        raise signals.TestPass("Success")
+
+    def test_toggle_wlan_interface(self):
+        """Test toggling the wlan interface if it exists.
+
+        Steps:
+        1. Call ListInterfaces FIDL api.
+        2. Find the wlan interface.
+        3. Disable the interface.
+        4. Verify interface attributes in a down state.
+        5. Enable the interface.
+        6. Verify interface attributes in an up state.
+
+        Expected Result:
+        WLAN interface was successfully brought down and up again.
+
+        Returns:
+          signals.TestPass if no errors
+          signals.TestFailure if there are any errors during the test.
+          signals.TestSkip if there are no wlan interfaces.
+
+        TAGS: Netstack
+        Priority: 1
+        """
+
+        def get_wlan_interfaces():
+            result = self.fuchsia_device.sl4f.netstack_lib.netstackListInterfaces()
+            if error := result.get("error"):
+                raise signals.TestFailure(f"unable to list interfaces: {error}")
+            return [
+                interface
+                for interface in result.get("result")
+                if "wlan" in interface.get("name")
+            ]
+
+        def get_ids(interfaces):
+            return [get_id(interface) for interface in interfaces]
+
+        wlan_interfaces = get_wlan_interfaces()
+        if not wlan_interfaces:
+            raise signals.TestSkip("no wlan interface found")
+        interface_ids = get_ids(wlan_interfaces)
+
+        # Disable the interfaces.
+        for identifier in interface_ids:
+            result = self.fuchsia_device.sl4f.netstack_lib.disableInterface(identifier)
+            if error := result.get("error"):
+                raise signals.TestFailure(
+                    f"failed to disable wlan interface {identifier}: {error}"
+                )
+
+        # Retrieve the interfaces again.
+        disabled_wlan_interfaces = get_wlan_interfaces()  # noqa
+        disabled_interface_ids = get_ids(wlan_interfaces)
+
+        if not disabled_interface_ids == interface_ids:
+            raise signals.TestFailure(
+                f"disabled interface IDs do not match original interface IDs: original={interface_ids} disabled={disabled_interface_ids}"
+            )
+
+        # Check the current state of the interfaces.
+        for interface in disabled_interfaces:
+            if len(interface_info.get("ipv4_addresses")) > 0:
+                raise signals.TestFailure(
+                    f"no Ipv4 Address should be present: {interface}"
+                )
+
+            # TODO (35981): Verify other values when interface down.
+
+        # Re-enable the interfaces.
+        for identifier in disabled_interface_ids:
+            result = self.fuchsia_device.sl4f.netstack_lib.enableInterface(identifier)
+            if error := result.get("error"):
+                raise signals.TestFailure(
+                    f"failed to enable wlan interface {identifier}: {error}"
+                )
+
+        # TODO (35981): Verify other values when interface up.
+        raise signals.TestPass("Success")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/netstack/ToggleWlanInterfaceStressTest.py b/tests/netstack/ToggleWlanInterfaceStressTest.py
new file mode 100644
index 0000000..7004500
--- /dev/null
+++ b/tests/netstack/ToggleWlanInterfaceStressTest.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+
+from mobly import signals, test_runner
+from mobly.config_parser import TestRunConfig
+
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+class ToggleWlanInterfaceStressTest(base_test.WifiBaseTest):
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+    def test_iface_toggle_and_ping(self) -> None:
+        """Test that we don't error out when toggling WLAN interfaces.
+
+        Steps:
+        1. Find a WLAN interface
+        2. Destroy it
+        3. Create a new WLAN interface
+        4. Ping after association
+        5. Repeat 1-4 1,000 times
+
+        Expected Result:
+        Verify there are no errors in destroying the wlan interface.
+
+        Returns:
+          signals.TestPass if no errors
+          signals.TestFailure if there are any errors during the test.
+
+        TAGS: WLAN, Stability
+        Priority: 1
+        """
+
+        # Test assumes you've already connected to some AP.
+
+        for i in range(1000):
+            wlan_interfaces = self.dut.get_wlan_interface_id_list()
+            self.log.info(wlan_interfaces)
+
+            if len(wlan_interfaces) < 1:
+                raise signals.TestFailure("Not enough wlan interfaces for test")
+
+            self.dut.destroy_wlan_interface(wlan_interfaces[0])
+            # Really make sure it is dead
+            self.fuchsia_device.ssh.run(f"wlan iface del {wlan_interfaces[0]}")
+            # Grace period
+            time.sleep(2)
+            self.fuchsia_device.ssh.run("wlan iface new --phy 0 --role Client")
+            end_time = time.time() + 300
+
+            while time.time() < end_time:
+                time.sleep(1)
+                if self.dut.is_connected():
+                    try:
+                        ping_result = self.dut.ping("8.8.8.8", 10, 1000, 1000, 25)
+                        print(ping_result)
+                    except Exception as err:
+                        # TODO: Once we gain more stability, fail test when pinging fails
+                        print(f"some err {err}")
+                    time.sleep(2)  # give time for some traffic
+                    break
+            if not self.dut.is_connected():
+                raise signals.TestFailure(f"Failed at iteration {i + 1}")
+            self.log.info(f"Iteration {i + 1} successful")
+        raise signals.TestPass("Success")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/BUILD.gn b/tests/wlan/BUILD.gn
new file mode 100644
index 0000000..a79ca51
--- /dev/null
+++ b/tests/wlan/BUILD.gn
@@ -0,0 +1,34 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_host, "antlion tests only supported for host testing")
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    "compliance:e2e_tests",
+    "facade:e2e_tests",
+    "functional:e2e_tests",
+    "misc:e2e_tests",
+    "performance:e2e_tests",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    "compliance:e2e_tests_quick",
+    "functional:e2e_tests_quick",
+    "performance:e2e_tests_quick",
+  ]
+}
+
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    "compliance:e2e_tests_manual",
+    "functional:e2e_tests_manual",
+    "performance:e2e_tests_manual",
+  ]
+}
diff --git a/tests/wlan/compliance/BUILD.gn b/tests/wlan/compliance/BUILD.gn
new file mode 100644
index 0000000..98551aa
--- /dev/null
+++ b/tests/wlan/compliance/BUILD.gn
@@ -0,0 +1,83 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("regulatory_compliance_test") {
+  main_source = "RegulatoryComplianceTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("vape_interop_test") {
+  main_source = "VapeInteropTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_phy_compliance_11ac_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_phy_compliance_11n_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_phy_compliance_abg_test") {
+  main_source = "WlanPhyComplianceABGTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_security_compliance_abg_test") {
+  main_source = "WlanSecurityComplianceABGTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_security_compliance_abg_test_quick") {
+  main_source = "WlanSecurityComplianceABGTest.py"
+  environments = display_ap_envs
+  test_cases = [
+    "test_associate_11bg_sec_open_wep_26_hex_ptk_none",
+    "test_associate_11bg_sec_wpa_psk_ptk_tkip",
+    "test_associate_11bg_sec_wpa_psk_ptk_ccmp",
+    "test_associate_11bg_sec_wpa2_psk_ptk_tkip",
+    "test_associate_11bg_sec_wpa2_psk_ptk_ccmp",
+    "test_associate_11bg_pmf_sec_wpa2_psk_ptk_ccmp",
+    "test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip",
+    "test_associate_11bg_sec_wpa_wpa2_psk_ptk_ccmp",
+    "test_associate_11bg_sec_wpa3_sae_ptk_ccmp",
+    "test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_ccmp",
+  ]
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":vape_interop_test($host_toolchain)",
+    ":wlan_phy_compliance_11ac_test($host_toolchain)",
+    ":wlan_phy_compliance_11n_test($host_toolchain)",
+    ":wlan_phy_compliance_abg_test($host_toolchain)",
+    ":wlan_security_compliance_abg_test($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    ":wlan_security_compliance_abg_test_quick($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    # Running RegulatoryComplianceTest is usually only necessary when verifying
+    # new WLAN firmware patches. Take it out of automation; it takes too long
+    # otherwise.
+    ":regulatory_compliance_test($host_toolchain)",
+  ]
+}
diff --git a/tests/wlan/compliance/RegulatoryComplianceTest.py b/tests/wlan/compliance/RegulatoryComplianceTest.py
new file mode 100644
index 0000000..5e13176
--- /dev/null
+++ b/tests/wlan/compliance/RegulatoryComplianceTest.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import NamedTuple
+
+from honeydew.typing.wlan import CountryCode
+from mobly import asserts, test_runner
+from mobly.config_parser import TestRunConfig
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.regulatory_channels import (
+    COUNTRY_CHANNELS,
+    TEST_CHANNELS,
+)
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+N_CAPABILITIES_DEFAULT = [
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_SGI40,
+    hostapd_constants.N_CAPABILITY_TX_STBC,
+    hostapd_constants.N_CAPABILITY_RX_STBC1,
+]
+
+MAX_2_4_CHANNEL = 14
+
+
+class RegulatoryTest(NamedTuple):
+    country_code: str
+    channel: int
+    channel_bandwidth: int
+    expect_association: bool
+
+
+class RegulatoryComplianceTest(base_test.WifiBaseTest):
+    """Tests regulatory compliance.
+
+    Testbed Requirement:
+    * 1 x Fuchsia device (dut)
+    * 1 x access point
+    """
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        self.access_point = self.access_points[0]
+        self.access_point.stop_all_aps()
+
+        self.regulatory_results = [
+            "====CountryCode,Channel,Frequency,ChannelBandwith,Connected/Not-Connected===="
+        ]
+
+    def pre_run(self) -> None:
+        tests: list[RegulatoryTest] = []
+        for country in COUNTRY_CHANNELS.values():
+            for channel in TEST_CHANNELS:
+                for bandwidth in TEST_CHANNELS[channel]:
+                    tests.append(
+                        RegulatoryTest(
+                            country_code=country.country_code,
+                            channel=channel,
+                            channel_bandwidth=bandwidth,
+                            expect_association=(
+                                channel in country.allowed_channels
+                                and bandwidth in country.allowed_channels[channel]
+                            ),
+                        )
+                    )
+
+        def generate_test_name(
+            code: str, channel: int, channel_bandwidth: int, *_
+        ) -> str:
+            return f"test_{code}_channel_{channel}_{channel_bandwidth}mhz"
+
+        self.generate_tests(self.verify_channel_compliance, generate_test_name, tests)
+
+    def teardown_class(self) -> None:
+        super().teardown_class()
+
+        regulatory_save_path = f"{self.log_path}/regulatory_results.txt"
+        with open(regulatory_save_path, "w", encoding="utf-8") as file:
+            file.write("\n".join(self.regulatory_results))
+
+    def setup_test(self) -> None:
+        super().setup_test()
+        self.access_point.stop_all_aps()
+        for ad in self.android_devices:
+            ad.droid.wakeLockAcquireBright()
+            ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+        self.dut.disconnect()
+
+    def teardown_test(self) -> None:
+        for ad in self.android_devices:
+            ad.droid.wakeLockRelease()
+            ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_test()
+
+    def setup_ap(
+        self,
+        channel: int,
+        channel_bandwidth: int,
+    ) -> str:
+        """Start network on AP with basic configuration.
+
+        Args:
+            channel: channel to use for network
+            channel_bandwidth: channel bandwidth in mhz to use for network,
+
+        Returns:
+            SSID of the newly created and running network
+
+        Raises:
+            ConnectionError if network is not started successfully.
+        """
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        try:
+            setup_ap(
+                access_point=self.access_point,
+                profile_name="whirlwind",
+                channel=channel,
+                force_wmm=True,
+                ssid=ssid,
+                vht_bandwidth=channel_bandwidth,
+                setup_bridge=True,
+            )
+            self.log.info(
+                f"Network (ssid: {ssid}) up on channel {channel} "
+                f"w/ channel bandwidth {channel_bandwidth} MHz"
+            )
+            return ssid
+        except Exception as err:
+            raise ConnectionError(
+                f"Failed to setup ap on channel: {channel}, "
+                f"channel bandwidth: {channel_bandwidth} MHz. "
+            ) from err
+
+    def verify_channel_compliance(
+        self,
+        country_code: str,
+        channel: int,
+        channel_bandwidth: int,
+        expect_association: bool,
+    ) -> None:
+        """Verify device complies with provided regulatory requirements for a
+        specific channel and channel bandwidth. Run with generated test cases
+        in the verify_regulatory_compliance parent test.
+        """
+        self.fuchsia_device.wlan_controller.set_country_code(CountryCode(country_code))
+
+        ssid = self.setup_ap(channel, channel_bandwidth)
+
+        self.log.info(
+            f'Attempting to associate to network "{ssid}" on channel '
+            f"{channel} @ {channel_bandwidth}mhz"
+        )
+
+        associated = self.dut.associate(ssid)
+
+        channel_ghz = "2.4" if channel < 36 else "5"
+        association_code = "c" if associated else "nc"
+        regulatory_result = f"REGTRACKER: {country_code},{channel},{channel_ghz},{channel_bandwidth},{association_code}"
+        self.regulatory_results.append(regulatory_result)
+        self.log.info(regulatory_result)
+
+        asserts.assert_true(
+            associated == expect_association,
+            f"Expected device to{'' if expect_association else ' NOT'} "
+            f"associate using country code {country_code} for channel "
+            f"{channel} with channel bandwidth {channel_bandwidth} MHz.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/compliance/VapeInteropTest.py b/tests/wlan/compliance/VapeInteropTest.py
new file mode 100644
index 0000000..1daa98c
--- /dev/null
+++ b/tests/wlan/compliance/VapeInteropTest.py
@@ -0,0 +1,886 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+class VapeInteropTest(base_test.WifiBaseTest):
+    """Tests interoperability with mock third party AP profiles.
+
+    Test Bed Requirement:
+    * One Android or Fuchsia Device
+    * One Whirlwind Access Point
+    """
+
+    def setup_class(self) -> None:
+        super().setup_class()
+
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+        # Same for both 2g and 5g
+        self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        self.password = utils.rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
+        self.security_profile_wpa2 = Security(
+            security_mode=SecurityMode.WPA2,
+            password=self.password,
+            wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
+        )
+
+        self.access_point.stop_all_aps()
+
+    def setup_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockAcquireBright()
+                ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockRelease()
+                ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        super().on_fail(record)
+        self.access_point.stop_all_aps()
+
+    def test_associate_actiontec_pk5000_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="actiontec_pk5000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_actiontec_pk5000_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="actiontec_pk5000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_actiontec_mi424wr_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="actiontec_mi424wr",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_actiontec_mi424wr_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="actiontec_mi424wr",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtac66u_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtac66u_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtac66u_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtac66u_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtac86u_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac86u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtac86u_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac86u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtac86u_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac86u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtac86u_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac86u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtac5300_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac5300",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtac5300_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac5300",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtac5300_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac5300",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtac5300_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtac5300",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtn56u_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn56u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtn56u_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn56u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtn56u_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn56u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtn56u_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn56u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtn66u_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtn66u_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_asus_rtn66u_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_asus_rtn66u_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="asus_rtn66u",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_belkin_f9k1001v5_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="belkin_f9k1001v5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_belkin_f9k1001v5_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="belkin_f9k1001v5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_linksys_ea4500_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea4500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_linksys_ea4500_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea4500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_linksys_ea4500_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea4500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_linksys_ea4500_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea4500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_linksys_ea9500_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea9500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_linksys_ea9500_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea9500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_linksys_ea9500_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea9500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_linksys_ea9500_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_ea9500",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_linksys_wrt1900acv2_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_wrt1900acv2",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_linksys_wrt1900acv2_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_wrt1900acv2",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_linksys_wrt1900acv2_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_wrt1900acv2",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_linksys_wrt1900acv2_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="linksys_wrt1900acv2",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_netgear_r7000_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_r7000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_netgear_r7000_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_r7000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_netgear_r7000_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_r7000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_netgear_r7000_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_r7000",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_netgear_wndr3400_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_wndr3400",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_netgear_wndr3400_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_wndr3400",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_netgear_wndr3400_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_wndr3400",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_netgear_wndr3400_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="netgear_wndr3400",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_securifi_almond_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="securifi_almond",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_securifi_almond_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="securifi_almond",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_tplink_archerc5_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_tplink_archerc5_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_tplink_archerc5_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_tplink_archerc5_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc5",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_tplink_archerc7_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc7",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_tplink_archerc7_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc7",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_tplink_archerc7_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc7",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_tplink_archerc7_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_archerc7",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_tplink_c1200_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_c1200",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_tplink_c1200_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_c1200",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_tplink_c1200_5ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_c1200",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_tplink_c1200_5ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_c1200",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+    def test_associate_tplink_tlwr940n_24ghz_open(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_tlwr940n",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+
+    def test_associate_tplink_tlwr940n_24ghz_wpa2(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="tplink_tlwr940n",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile_wpa2,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_pwd=self.password,
+                target_security=SecurityMode.WPA2,
+            ),
+            "Failed to connect.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/compliance/WlanPhyCompliance11ACTest.py b/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
new file mode 100644
index 0000000..737fece
--- /dev/null
+++ b/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
@@ -0,0 +1,294 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+from dataclasses import dataclass
+from typing import Any
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str
+
+# AC Capabilities
+"""
+Capabilities Not Supported on Whirlwind:
+    - Supported Channel Width ([VHT160], [VHT160-80PLUS80]): 160mhz and 80+80
+        unsupported
+    - SU Beamformer [SU-BEAMFORMER]
+    - SU Beamformee [SU-BEAMFORMEE]
+    - MU Beamformer [MU-BEAMFORMER]
+    - MU Beamformee [MU-BEAMFORMEE]
+    - BF Antenna ([BF-ANTENNA-2], [BF-ANTENNA-3], [BF-ANTENNA-4])
+    - Rx STBC 2, 3, & 4 ([RX-STBC-12],[RX-STBC-123],[RX-STBC-124])
+    - VHT Link Adaptation ([VHT-LINK-ADAPT2],[VHT-LINK-ADAPT3])
+    - VHT TXOP Power Save [VHT-TXOP-PS]
+    - HTC-VHT [HTC-VHT]
+"""
+VHT_MAX_MPDU_LEN = [
+    hostapd_constants.AC_CAPABILITY_MAX_MPDU_7991,
+    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
+    "",
+]
+RXLDPC = [hostapd_constants.AC_CAPABILITY_RXLDPC, ""]
+SHORT_GI_80 = [hostapd_constants.AC_CAPABILITY_SHORT_GI_80, ""]
+TX_STBC = [hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, ""]
+RX_STBC = [hostapd_constants.AC_CAPABILITY_RX_STBC_1, ""]
+MAX_A_MPDU = [
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0,
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1,
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2,
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3,
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4,
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5,
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6,
+    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
+    "",
+]
+RX_ANTENNA = [hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, ""]
+TX_ANTENNA = [hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, ""]
+
+# Default 11N Capabilities
+N_CAPABS_40MHZ = [
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_RX_STBC1,
+    hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_SGI40,
+    hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+    hostapd_constants.N_CAPABILITY_HT40_PLUS,
+]
+
+N_CAPABS_20MHZ = [
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_RX_STBC1,
+    hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
+    hostapd_constants.N_CAPABILITY_HT20,
+]
+
+# Default wpa2 profile.
+WPA2_SECURITY = Security(
+    security_mode=SecurityMode.WPA2,
+    password=rand_ascii_str(20),
+    wpa_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
+    wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
+)
+
+SECURITIES: list[Security] = [Security(), WPA2_SECURITY]
+
+
+@dataclass
+class TestParams:
+    security: Security
+    vht_bandwidth_mhz: int
+    # TODO(http://b/290396383): Type AP capabilities as enums
+    n_capabilities: list[Any]
+    ac_capabilities: list[Any]
+
+
+# 6912 test cases
+class WlanPhyCompliance11ACTest(base_test.WifiBaseTest):
+    """Tests for validating 11ac PHYS.
+
+    Test Bed Requirement:
+    * One Android device or Fuchsia device
+    * One Access Point
+    """
+
+    def __init__(self, controllers):
+        super().__init__(controllers)
+
+    def setup_generated_tests(self):
+        test_args: list[tuple[TestParams]] = (
+            self._generate_20mhz_test_args()
+            + self._generate_40mhz_test_args()
+            + self._generate_80mhz_test_args()
+        )
+
+        def generate_test_name(test: TestParams):
+            ret = []
+            for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
+                if cap in test.ac_capabilities:
+                    ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
+            return (
+                f"test_11ac_{test.vht_bandwidth_mhz}mhz_{test.security}_{''.join(ret)}"
+            )
+
+        self.generate_tests(
+            test_logic=self.setup_and_connect,
+            name_func=generate_test_name,
+            arg_sets=test_args,
+        )
+
+    def setup_class(self):
+        super().setup_class()
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("At least one access point is required")
+        self.access_point = self.access_points[0]
+
+        self.dut = self.get_dut(AssociationMode.POLICY)
+        self.access_point.stop_all_aps()
+
+    def setup_test(self):
+        for ad in self.android_devices:
+            ad.droid.wakeLockAcquireBright()
+            ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self):
+        for ad in self.android_devices:
+            ad.droid.wakeLockRelease()
+            ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+
+    def on_fail(self, record: TestResultRecord):
+        super().on_fail(record)
+        self.access_point.stop_all_aps()
+
+    def setup_and_connect(self, test: TestParams):
+        """Setup the AP and then attempt to associate a DUT.
+
+        Args:
+            test: Test parameters
+        """
+        ssid = rand_ascii_str(20)
+        password: str | None = None
+        target_security = SecurityMode.OPEN
+        if test.security:
+            password = test.security.password
+            target_security = test.security.security_mode
+
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            mode=hostapd_constants.MODE_11AC_MIXED,
+            channel=36,
+            n_capabilities=test.n_capabilities,
+            ac_capabilities=test.ac_capabilities,
+            force_wmm=True,
+            ssid=ssid,
+            security=test.security,
+            vht_bandwidth=test.vht_bandwidth_mhz,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_pwd=password, target_security=target_security
+            ),
+            "Failed to associate.",
+        )
+
+    # 1728 tests
+    def _generate_20mhz_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+
+        # 864 test cases for open security
+        # 864 test cases for wpa2 security
+        for combination in itertools.product(
+            SECURITIES,
+            VHT_MAX_MPDU_LEN,
+            RXLDPC,
+            RX_STBC,
+            TX_STBC,
+            MAX_A_MPDU,
+            RX_ANTENNA,
+            TX_ANTENNA,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        vht_bandwidth_mhz=20,
+                        security=combination[0],
+                        n_capabilities=N_CAPABS_20MHZ,
+                        ac_capabilities=list(combination[1:]),
+                    ),
+                )
+            )
+
+        return test_args
+
+    # 1728 tests
+    def _generate_40mhz_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+
+        # 864 test cases for open security
+        # 864 test cases for wpa2 security
+        for combination in itertools.product(
+            SECURITIES,
+            VHT_MAX_MPDU_LEN,
+            RXLDPC,
+            RX_STBC,
+            TX_STBC,
+            MAX_A_MPDU,
+            RX_ANTENNA,
+            TX_ANTENNA,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        vht_bandwidth_mhz=40,
+                        security=combination[0],
+                        n_capabilities=N_CAPABS_40MHZ,
+                        ac_capabilities=list(combination[1:]),
+                    ),
+                )
+            )
+
+        return test_args
+
+    # 3456 tests
+    def _generate_80mhz_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+
+        # 1728 test cases for open security
+        # 1728 test cases for wpa2 security
+        for combination in itertools.product(
+            SECURITIES,
+            VHT_MAX_MPDU_LEN,
+            RXLDPC,
+            SHORT_GI_80,
+            RX_STBC,
+            TX_STBC,
+            MAX_A_MPDU,
+            RX_ANTENNA,
+            TX_ANTENNA,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        vht_bandwidth_mhz=80,
+                        security=combination[0],
+                        n_capabilities=N_CAPABS_40MHZ,
+                        ac_capabilities=list(combination[1:]),
+                    ),
+                )
+            )
+        return test_args
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/compliance/WlanPhyCompliance11NTest.py b/tests/wlan/compliance/WlanPhyCompliance11NTest.py
new file mode 100644
index 0000000..4683350
--- /dev/null
+++ b/tests/wlan/compliance/WlanPhyCompliance11NTest.py
@@ -0,0 +1,544 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import logging
+from dataclasses import dataclass
+from typing import Any
+
+from mobly import asserts, signals, test_runner
+from mobly.config_parser import TestRunConfig
+from mobly.records import TestResultRecord
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+FREQUENCY_24 = ["2.4GHz"]
+FREQUENCY_5 = ["5GHz"]
+CHANNEL_BANDWIDTH_20 = ["HT20"]
+CHANNEL_BANDWIDTH_40_LOWER = ["HT40-"]
+CHANNEL_BANDWIDTH_40_UPPER = ["HT40+"]
+SECURITY_OPEN = "open"
+SECURITY_WPA2 = "wpa2"
+N_MODE = [hostapd_constants.MODE_11N_PURE, hostapd_constants.MODE_11N_MIXED]
+LDPC = [hostapd_constants.N_CAPABILITY_LDPC, ""]
+TX_STBC = [hostapd_constants.N_CAPABILITY_TX_STBC, ""]
+RX_STBC = [hostapd_constants.N_CAPABILITY_RX_STBC1, ""]
+SGI_20 = [hostapd_constants.N_CAPABILITY_SGI20, ""]
+SGI_40 = [hostapd_constants.N_CAPABILITY_SGI40, ""]
+DSSS_CCK = [hostapd_constants.N_CAPABILITY_DSSS_CCK_40, ""]
+INTOLERANT_40 = [hostapd_constants.N_CAPABILITY_40_INTOLERANT, ""]
+MAX_AMPDU_7935 = [hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, ""]
+SMPS = [hostapd_constants.N_CAPABILITY_SMPS_STATIC, ""]
+
+
+@dataclass
+class TestParams:
+    frequency: str
+    chbw: str
+    n_mode: str
+    security: SecurityMode
+    # TODO(http://b/290396383): Type AP capabilities as enums
+    n_capabilities: list[Any]
+
+
+class WlanPhyCompliance11NTest(base_test.WifiBaseTest):
+    """Tests for validating 11n PHYS.
+
+    Test Bed Requirement:
+    * One Android device or Fuchsia device
+    * One Access Point
+    """
+
+    def __init__(self, config: TestRunConfig) -> None:
+        super().__init__(config)
+
+    def setup_generated_tests(self) -> None:
+        test_args: list[tuple[TestParams]] = (
+            self._generate_24_HT20_test_args()
+            + self._generate_24_HT40_lower_test_args()
+            + self._generate_24_HT40_upper_test_args()
+            + self._generate_5_HT20_test_args()
+            + self._generate_5_HT40_lower_test_args()
+            + self._generate_5_HT40_upper_test_args()
+            + self._generate_24_HT20_wpa2_test_args()
+            + self._generate_24_HT40_lower_wpa2_test_args()
+            + self._generate_24_HT40_upper_wpa2_test_args()
+            + self._generate_5_HT20_wpa2_test_args()
+            + self._generate_5_HT40_lower_wpa2_test_args()
+            + self._generate_5_HT40_upper_wpa2_test_args()
+        )
+
+        def generate_test_name(test: TestParams) -> str:
+            ret = []
+            for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
+                if cap in test.n_capabilities:
+                    ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
+            # '+' is used by Mobile Harness as special character, don't use it in test names
+            if test.chbw == "HT40-":
+                chbw = "HT40Lower"
+            elif test.chbw == "HT40+":
+                chbw = "HT40Upper"
+            else:
+                chbw = test.chbw
+            return f"test_11n_{test.frequency}_{chbw}_{test.security}_{test.n_mode}_{''.join(ret)}"
+
+        self.generate_tests(
+            test_logic=self.setup_and_connect,
+            name_func=generate_test_name,
+            arg_sets=test_args,
+        )
+
+    def setup_class(self) -> None:
+        super().setup_class()
+
+        if len(self.access_points) < 1:
+            logging.error("At least one access point is required for this test")
+            raise signals.TestAbortClass("At least one access point is required")
+
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+        self.access_point.stop_all_aps()
+
+    def setup_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockAcquireBright()
+                ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockRelease()
+                ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        super().on_fail(record)
+        self.access_point.stop_all_aps()
+
+    def setup_and_connect(self, test: TestParams) -> None:
+        """Start hostapd and associate the DUT.
+
+        Args:
+               ap_settings: A dictionary of hostapd constant n_capabilities.
+        """
+        ssid = utils.rand_ascii_str(20)
+        security_profile = Security()
+        password: str | None = None
+        n_capabilities = []
+        for n_capability in test.n_capabilities:
+            if n_capability in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
+                n_capabilities.append(n_capability)
+
+        if test.chbw == "HT20" or test.chbw == "HT40+":
+            if test.frequency == "2.4GHz":
+                channel = 1
+            elif test.frequency == "5GHz":
+                channel = 36
+            else:
+                raise ValueError(f"Invalid frequence: {test.frequency}")
+
+        elif test.chbw == "HT40-":
+            if test.frequency == "2.4GHz":
+                channel = 11
+            elif test.frequency == "5GHz":
+                channel = 60
+            else:
+                raise ValueError(f"Invalid frequency: {test.frequency}")
+
+        else:
+            raise ValueError(f"Invalid channel bandwidth: {test.chbw}")
+
+        if test.chbw == "HT40-" or test.chbw == "HT40+":
+            if hostapd_config.ht40_plus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
+            elif hostapd_config.ht40_minus_allowed(channel):
+                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
+            else:
+                raise ValueError(f"Invalid channel: {channel}")
+            n_capabilities.append(extended_channel)
+
+        if test.security is SecurityMode.WPA2:
+            security_profile = Security(
+                security_mode=SecurityMode.WPA2,
+                password=generate_random_password(length=20),
+                wpa_cipher="CCMP",
+                wpa2_cipher="CCMP",
+            )
+            password = security_profile.password
+
+        if test.n_mode not in N_MODE:
+            raise ValueError(f"Invalid n-mode: {test.n_mode}")
+
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            mode=test.n_mode,
+            channel=channel,
+            n_capabilities=n_capabilities,
+            ac_capabilities=[],
+            force_wmm=True,
+            ssid=ssid,
+            security=security_profile,
+        )
+        asserts.assert_true(
+            self.dut.associate(
+                ssid,
+                target_pwd=password,
+                target_security=test.security,
+            ),
+            "Failed to connect.",
+        )
+
+    def _generate_24_HT20_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_20,
+            N_MODE,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            INTOLERANT_40,
+            MAX_AMPDU_7935,
+            SMPS,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=combination[2],
+                        security=SecurityMode.OPEN,
+                        n_capabilities=list(combination[3:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_24_HT40_lower_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_40_LOWER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.OPEN,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_24_HT40_upper_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_40_UPPER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.OPEN,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_5_HT20_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_20,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            INTOLERANT_40,
+            MAX_AMPDU_7935,
+            SMPS,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.OPEN,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_5_HT40_lower_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_40_LOWER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.OPEN,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_5_HT40_upper_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_40_UPPER,
+            N_MODE,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=combination[2],
+                        security=SecurityMode.OPEN,
+                        n_capabilities=list(combination[3:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_24_HT20_wpa2_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_20,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            INTOLERANT_40,
+            MAX_AMPDU_7935,
+            SMPS,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.WPA2,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_24_HT40_lower_wpa2_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_40_LOWER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.WPA2,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_24_HT40_upper_wpa2_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_24,
+            CHANNEL_BANDWIDTH_40_UPPER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.WPA2,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_5_HT20_wpa2_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_20,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            INTOLERANT_40,
+            MAX_AMPDU_7935,
+            SMPS,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.WPA2,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_5_HT40_lower_wpa2_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_40_LOWER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.WPA2,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+    def _generate_5_HT40_upper_wpa2_test_args(self) -> list[tuple[TestParams]]:
+        test_args: list[tuple[TestParams]] = []
+        for combination in itertools.product(
+            FREQUENCY_5,
+            CHANNEL_BANDWIDTH_40_UPPER,
+            LDPC,
+            TX_STBC,
+            RX_STBC,
+            SGI_20,
+            SGI_40,
+            MAX_AMPDU_7935,
+            SMPS,
+            DSSS_CCK,
+        ):
+            test_args.append(
+                (
+                    TestParams(
+                        frequency=combination[0],
+                        chbw=combination[1],
+                        n_mode=hostapd_constants.MODE_11N_MIXED,
+                        security=SecurityMode.WPA2,
+                        n_capabilities=list(combination[2:]),
+                    ),
+                )
+            )
+        return test_args
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/compliance/WlanPhyComplianceABGTest.py b/tests/wlan/compliance/WlanPhyComplianceABGTest.py
new file mode 100644
index 0000000..ec79e64
--- /dev/null
+++ b/tests/wlan/compliance/WlanPhyComplianceABGTest.py
@@ -0,0 +1,1912 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+class WlanPhyComplianceABGTest(base_test.WifiBaseTest):
+    """Tests for validating 11a, 11b, and 11g PHYS.
+
+    Test Bed Requirement:
+    * One Android device or Fuchsia device
+    * One Access Point
+    """
+
+    def setup_class(self) -> None:
+        super().setup_class()
+
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+        open_network = self.get_open_network(False, [])
+        open_network_min_len = self.get_open_network(
+            False,
+            [],
+            ssid_length_2g=hostapd_constants.AP_SSID_MIN_LENGTH_2G,
+            ssid_length_5g=hostapd_constants.AP_SSID_MIN_LENGTH_5G,
+        )
+        open_network_max_len = self.get_open_network(
+            False,
+            [],
+            ssid_length_2g=hostapd_constants.AP_SSID_MAX_LENGTH_2G,
+            ssid_length_5g=hostapd_constants.AP_SSID_MAX_LENGTH_5G,
+        )
+        self.open_network_2g = open_network["2g"]
+        self.open_network_5g = open_network["5g"]
+        self.open_network_max_len_2g = open_network_max_len["2g"]
+        self.open_network_max_len_2g["SSID"] = self.open_network_max_len_2g["SSID"][3:]
+        self.open_network_max_len_5g = open_network_max_len["5g"]
+        self.open_network_max_len_5g["SSID"] = self.open_network_max_len_5g["SSID"][3:]
+        self.open_network_min_len_2g = open_network_min_len["2g"]
+        self.open_network_min_len_2g["SSID"] = self.open_network_min_len_2g["SSID"][3:]
+        self.open_network_min_len_5g = open_network_min_len["5g"]
+        self.open_network_min_len_5g["SSID"] = self.open_network_min_len_5g["SSID"][3:]
+
+        self.utf8_ssid_2g = "2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
+        self.utf8_ssid_5g = "5𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
+
+        self.utf8_ssid_2g_french = "Château du Feÿ"
+        self.utf8_password_2g_french = "du Feÿ Château"
+
+        self.utf8_ssid_2g_german = "Rat für Straßenatlas"
+        self.utf8_password_2g_german = "für Straßenatlas Rat"
+
+        self.utf8_ssid_2g_dutch = "Die niet óúd, is níéuw!"
+        self.utf8_password_2g_dutch = "niet óúd, is níéuw! Die"
+
+        self.utf8_ssid_2g_swedish = "Det är femtioåtta"
+        self.utf8_password_2g_swedish = "femtioåtta Det är"
+
+        self.utf8_ssid_2g_norwegian = "Curaçao ØÆ æ å å å"
+        self.utf8_password_2g_norwegian = "ØÆ Curaçao æ å å å"
+
+        # Danish and Norwegian has the same alphabet
+        self.utf8_ssid_2g_danish = self.utf8_ssid_2g_norwegian
+        self.utf8_password_2g_danish = self.utf8_password_2g_norwegian
+
+        self.utf8_ssid_2g_japanese = "あなた はお母さん"
+        self.utf8_password_2g_japanese = "そっくりね。あな"
+
+        self.utf8_ssid_2g_spanish = "¡No á,é,í,ó,ú,ü,ñ,¿,¡"
+        self.utf8_password_2g_spanish = "á,é,í,ó,ú,ü,ñ,¿,¡ ¡No"
+
+        self.utf8_ssid_2g_italian = "caffè Pinocchio è italiano?"
+        self.utf8_password_2g_italian = "Pinocchio è italiano? caffè"
+
+        self.utf8_ssid_2g_korean = "ㅘㅙㅚㅛㅜㅝㅞㅟㅠ"
+        self.utf8_password_2g_korean = "ㅜㅝㅞㅟㅠㅘㅙㅚㅛ"
+
+        self.access_point.stop_all_aps()
+
+    def setup_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockAcquireBright()
+                ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockRelease()
+                ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        super().on_fail(record)
+        self.access_point.stop_all_aps()
+
+    def test_associate_11b_only_long_preamble(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=False,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_short_preamble(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_minimal_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=15,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_maximum_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=1024,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_frag_threshold_430(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_rts_threshold_256(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_rts_256_frag_430(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_high_dtim_low_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=3,
+            beacon_interval=100,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_low_dtim_high_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=1,
+            beacon_interval=300,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_with_default_values(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_with_non_default_values(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_ACM_on_BK(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS | hostapd_constants.WMM_ACM_BK
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_ACM_on_BE(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS | hostapd_constants.WMM_ACM_BE
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_ACM_on_VI(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS | hostapd_constants.WMM_ACM_VI
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_ACM_on_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VI(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VI
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_ACM_on_BK_BE_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_ACM_on_BK_VI_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_VI
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_WMM_ACM_on_BE_VI_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_11B_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VI
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_country_code(self):
+        country_info = (
+            hostapd_constants.ENABLE_IEEE80211D
+            | hostapd_constants.COUNTRY_STRING["ALL"]
+            | hostapd_constants.COUNTRY_CODE["UNITED_STATES"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_non_country_code(self):
+        country_info = (
+            hostapd_constants.ENABLE_IEEE80211D
+            | hostapd_constants.COUNTRY_STRING["ALL"]
+            | hostapd_constants.COUNTRY_CODE["NON_COUNTRY"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_hidden_ssid(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            hidden=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_vendor_ie_in_beacon_correct_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_vendor_ie_in_beacon_zero_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_vendor_ie_in_assoc_correct_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_association_response"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11b_only_with_vendor_ie_in_assoc_zero_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_association_" "response_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_long_preamble(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            preamble=False,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_short_preamble(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            preamble=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_minimal_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            beacon_interval=15,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_maximum_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            beacon_interval=1024,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_frag_threshold_430(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_rts_threshold_256(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            rts_threshold=256,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_rts_256_frag_430(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            rts_threshold=256,
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_high_dtim_low_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            dtim_period=3,
+            beacon_interval=100,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_low_dtim_high_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            dtim_period=1,
+            beacon_interval=300,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_with_default_values(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_with_non_default_values(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_ACM_on_BK(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_ACM_on_BE(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BE
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_ACM_on_VI(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_VI
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_ACM_on_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VI(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VI
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_ACM_on_BK_BE_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_ACM_on_BK_VI_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_VI
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_WMM_ACM_on_BE_VI_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VI
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_country_code(self):
+        country_info = (
+            hostapd_constants.ENABLE_IEEE80211D
+            | hostapd_constants.COUNTRY_STRING["ALL"]
+            | hostapd_constants.COUNTRY_CODE["UNITED_STATES"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_non_country_code(self):
+        country_info = (
+            hostapd_constants.ENABLE_IEEE80211D
+            | hostapd_constants.COUNTRY_STRING["ALL"]
+            | hostapd_constants.COUNTRY_CODE["NON_COUNTRY"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_hidden_ssid(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            hidden=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_vendor_ie_in_beacon_correct_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_vendor_ie_in_beacon_zero_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_vendor_ie_in_assoc_correct_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_association_response"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11a_only_with_vendor_ie_in_assoc_zero_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_association_" "response_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_5g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_long_preamble(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=False,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_short_preamble(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=True,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_minimal_beacon_interval(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=15,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_maximum_beacon_interval(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=1024,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_frag_threshold_430(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            frag_threshold=430,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_rts_threshold_256(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_rts_256_frag_430(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+            frag_threshold=430,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_high_dtim_low_beacon_interval(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=3,
+            beacon_interval=100,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_low_dtim_high_beacon_interval(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=1,
+            beacon_interval=300,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_with_default_values(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES
+            | hostapd_constants.OFDM_ONLY_BASIC_RATES
+            | hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_with_non_default_values(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES
+            | hostapd_constants.OFDM_ONLY_BASIC_RATES
+            | hostapd_constants.WMM_NON_DEFAULT_PARAMS
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_ACM_on_BK(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_ACM_on_BE(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BE
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_ACM_on_VI(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_VI
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_ACM_on_VO(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_VO
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VI(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VI
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_ACM_on_BK_BE_VO(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VO
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_ACM_on_BK_VI_VO(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_VI
+            | hostapd_constants.WMM_ACM_VO
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_WMM_ACM_on_BE_VI_VO(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VI
+            | hostapd_constants.WMM_ACM_VO
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_country_code(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        country_info = (
+            hostapd_constants.ENABLE_IEEE80211D
+            | hostapd_constants.COUNTRY_STRING["ALL"]
+            | hostapd_constants.COUNTRY_CODE["UNITED_STATES"]
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_non_country_code(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        country_info = (
+            hostapd_constants.ENABLE_IEEE80211D
+            | hostapd_constants.COUNTRY_STRING["ALL"]
+            | hostapd_constants.COUNTRY_CODE["NON_COUNTRY"]
+            | data_rates
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_hidden_ssid(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES | hostapd_constants.OFDM_ONLY_BASIC_RATES
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            hidden=True,
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_vendor_ie_in_beacon_correct_length(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES
+            | hostapd_constants.OFDM_ONLY_BASIC_RATES
+            | hostapd_constants.VENDOR_IE["correct_length_beacon"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_vendor_ie_in_beacon_zero_length(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES
+            | hostapd_constants.OFDM_ONLY_BASIC_RATES
+            | hostapd_constants.VENDOR_IE["zero_length_beacon_without_data"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_vendor_ie_in_assoc_correct_length(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES
+            | hostapd_constants.OFDM_ONLY_BASIC_RATES
+            | hostapd_constants.VENDOR_IE["correct_length_association_response"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11g_only_with_vendor_ie_in_assoc_zero_length(self):
+        data_rates = (
+            hostapd_constants.OFDM_DATA_RATES
+            | hostapd_constants.OFDM_ONLY_BASIC_RATES
+            | hostapd_constants.VENDOR_IE["correct_length_association_response"]
+            | hostapd_constants.VENDOR_IE[
+                "zero_length_association_" "response_without_data"
+            ]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=data_rates,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_only_long_preamble(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=False,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_short_preamble(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            preamble=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_minimal_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=15,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_maximum_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            beacon_interval=1024,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_frag_threshold_430(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_rts_threshold_256(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_rts_256_frag_430(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            rts_threshold=256,
+            frag_threshold=430,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_high_dtim_low_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=3,
+            beacon_interval=100,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_low_dtim_high_beacon_interval(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            dtim_period=1,
+            beacon_interval=300,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_with_default_values(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_with_non_default_values(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_NON_DEFAULT_PARAMS,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_ACM_on_BK(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_ACM_on_BE(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BE
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_ACM_on_VI(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_VI
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_ACM_on_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_ACM_on_BK_BE_VI(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VI
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_ACM_on_BK_BE_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_ACM_on_BK_VI_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BK
+            | hostapd_constants.WMM_ACM_VI
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_WMM_ACM_on_BE_VI_VO(self):
+        wmm_acm_bits_enabled = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_BE
+            | hostapd_constants.WMM_ACM_VI
+            | hostapd_constants.WMM_ACM_VO
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            force_wmm=True,
+            additional_ap_parameters=wmm_acm_bits_enabled,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_country_code(self):
+        country_info = (
+            hostapd_constants.ENABLE_IEEE80211D
+            | hostapd_constants.COUNTRY_STRING["ALL"]
+            | hostapd_constants.COUNTRY_CODE["UNITED_STATES"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_non_country_code(self):
+        country_info = (
+            hostapd_constants.ENABLE_IEEE80211D
+            | hostapd_constants.COUNTRY_STRING["ALL"]
+            | hostapd_constants.COUNTRY_CODE["NON_COUNTRY"]
+        )
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=country_info,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_only_with_hidden_ssid(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            hidden=True,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ag_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_2g["SSID"]), "Failed to associate."
+        )
+
+    def test_minimum_ssid_length_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_min_len_2g["SSID"],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_min_len_2g["SSID"]),
+            "Failed to associate.",
+        )
+
+    def test_minimum_ssid_length_5g_11ac_80mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_min_len_5g["SSID"],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_min_len_5g["SSID"]),
+            "Failed to associate.",
+        )
+
+    def test_maximum_ssid_length_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_max_len_2g["SSID"],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_max_len_2g["SSID"]),
+            "Failed to associate.",
+        )
+
+    def test_maximum_ssid_length_5g_11ac_80mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_max_len_5g["SSID"],
+        )
+        asserts.assert_true(
+            self.dut.associate(self.open_network_max_len_5g["SSID"]),
+            "Failed to associate.",
+        )
+
+    def test_ssid_with_UTF8_characters_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_5g_11ac_80mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.utf8_ssid_5g,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_5g), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_french_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_french,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_french), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_german_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_german,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_german), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_dutch_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_dutch,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_dutch), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_swedish_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_swedish,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_swedish), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_norwegian_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_norwegian,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_norwegian), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_danish_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_danish,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_danish), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_japanese_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_japanese,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_japanese), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_spanish_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_spanish,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_spanish), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_italian_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_italian,
+        )
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_italian), "Failed to associate."
+        )
+
+    def test_ssid_with_UTF8_characters_korean_2g_11n_20mhz(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind_11ab_legacy",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.utf8_ssid_2g_korean,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(self.utf8_ssid_2g_korean), "Failed to associate."
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/compliance/WlanSecurityComplianceABGTest.py b/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
new file mode 100644
index 0000000..77865af
--- /dev/null
+++ b/tests/wlan/compliance/WlanSecurityComplianceABGTest.py
@@ -0,0 +1,7961 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import re
+from functools import wraps
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+AP_11ABG_PROFILE_NAME = "whirlwind_11ag_legacy"
+SSID_LENGTH_DEFAULT = 15
+
+
+def create_security_profile(test_func):
+    """Decorator for generating hostapd security profile object based on the
+    test name.
+    Args:
+        test_func: The test function
+    Returns:
+        security_profile_generator: The function that generates the security
+            profile object
+    """
+
+    @wraps(test_func)
+    def security_profile_generator(self, *args, **kwargs):
+        """Function that looks at the name of the function and determines what
+        the security profile should be based on what items are in the name
+
+        Example: A function with the name sec_wpa_wpa2_ptk_ccmp_tkip would
+            return a security profile that has wpa and wpa2 configure with a
+            ptk cipher of ccmp or tkip. Removing one of those options would
+            drop it from the config.
+
+        Args:
+            self: The object of the WlanSecurityComplianceABGTest class.
+            *args: args that were sent to the original test function
+            **kwargs: kwargs that were sent to the original test function
+        Returns:
+            The original function that was called
+        """
+        utf8_password_2g = "2𝔤_𝔊𝔬𝔬𝔤𝔩𝔢"
+        utf8_password_2g_french = "du Feÿ Château"
+        utf8_password_2g_german = "für Straßenatlas Rat"
+        utf8_password_2g_dutch = "niet óúd, is níéuw! Die"
+        utf8_password_2g_swedish = "femtioåtta Det är"
+        utf8_password_2g_norwegian = "ØÆ Curaçao æ å å å"
+        # Danish and Norwegian has the same alphabet
+        utf8_password_2g_danish = utf8_password_2g_norwegian
+        utf8_password_2g_japanese = "そっくりね。あな"
+        utf8_password_2g_spanish = "á,é,í,ó,ú,ü,ñ,¿,¡ ¡No"
+        utf8_password_2g_italian = "Pinocchio è italiano? caffè"
+        utf8_password_2g_korean = "ㅜㅝㅞㅟㅠㅘㅙㅚㅛ"
+
+        security = re.search(r"sec(.*?)ptk_(.*)", test_func.__name__)
+        if security is None:
+            raise TypeError(
+                f'Test name does not match expected pattern: "{test_func.__name__}"'
+            )
+
+        security_mode_raw = security.group(1)
+        ptk_type = security.group(2)
+        wpa_cipher: str | None = None
+        wpa2_cipher: str | None = None
+
+        if "_wpa_wpa2_wpa3_" in security_mode_raw:
+            security_mode = SecurityMode.WPA_WPA2_WPA3
+        elif "_wpa_wpa2_" in security_mode_raw:
+            security_mode = SecurityMode.WPA_WPA2
+        elif "_wpa2_wpa3_" in security_mode_raw:
+            security_mode = SecurityMode.WPA2_WPA3
+        elif "_wep_" in security_mode_raw:
+            security_mode = SecurityMode.WEP
+        elif "_wpa_" in security_mode_raw:
+            security_mode = SecurityMode.WPA
+        elif "_wpa2_" in security_mode_raw:
+            security_mode = SecurityMode.WPA2
+        elif "_wpa3_" in security_mode_raw:
+            security_mode = SecurityMode.WPA3
+        else:
+            raise TypeError(f'Security mode "{security_mode_raw}" not supported')
+
+        if "tkip" in ptk_type and "ccmp" in ptk_type:
+            wpa_cipher = "TKIP CCMP"
+            wpa2_cipher = "TKIP CCMP"
+        elif "tkip" in ptk_type:
+            wpa_cipher = "TKIP"
+            wpa2_cipher = "TKIP"
+        elif "ccmp" in ptk_type:
+            wpa_cipher = "CCMP"
+            wpa2_cipher = "CCMP"
+        if "max_length_password" in test_func.__name__:
+            password = generate_random_password(
+                length=hostapd_constants.MAX_WPA_PASSWORD_LENGTH
+            )
+        elif "max_length_psk" in test_func.__name__:
+            password = str(
+                generate_random_password(
+                    length=hostapd_constants.MAX_WPA_PSK_LENGTH, hex=True
+                )
+            ).lower()
+        elif "wep_5_chars" in test_func.__name__:
+            password = generate_random_password(length=5)
+        elif "wep_13_chars" in test_func.__name__:
+            password = generate_random_password(length=13)
+        elif "wep_10_hex" in test_func.__name__:
+            password = str(generate_random_password(length=10, hex=True)).lower()
+        elif "wep_26_hex" in test_func.__name__:
+            password = str(generate_random_password(length=26, hex=True)).lower()
+        elif "utf8" in test_func.__name__:
+            if "french" in test_func.__name__:
+                password = utf8_password_2g_french
+            elif "german" in test_func.__name__:
+                password = utf8_password_2g_german
+            elif "dutch" in test_func.__name__:
+                password = utf8_password_2g_dutch
+            elif "swedish" in test_func.__name__:
+                password = utf8_password_2g_swedish
+            elif "norwegian" in test_func.__name__:
+                password = utf8_password_2g_norwegian
+            elif "danish" in test_func.__name__:
+                password = utf8_password_2g_danish
+            elif "japanese" in test_func.__name__:
+                password = utf8_password_2g_japanese
+            elif "spanish" in test_func.__name__:
+                password = utf8_password_2g_spanish
+            elif "italian" in test_func.__name__:
+                password = utf8_password_2g_italian
+            elif "korean" in test_func.__name__:
+                password = utf8_password_2g_korean
+            else:
+                password = utf8_password_2g
+        else:
+            password = generate_random_password()
+
+        self.security_profile = Security(
+            security_mode=security_mode,
+            password=password,
+            wpa_cipher=wpa_cipher,
+            wpa2_cipher=wpa2_cipher,
+        )
+        self.client_password = password
+        self.target_security = security_mode
+        self.ssid = utils.rand_ascii_str(SSID_LENGTH_DEFAULT)
+        return test_func(self, *args, *kwargs)
+
+    return security_profile_generator
+
+
+class WlanSecurityComplianceABGTest(base_test.WifiBaseTest):
+    """Tests for validating 11a, 11b, and 11g PHYS.
+
+    Test Bed Requirement:
+    * One Android device or Fuchsia device
+    * One Access Point
+    """
+
+    def setup_class(self):
+        super().setup_class()
+
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+        self.ssid: str
+        self.target_security: SecurityMode
+        self.security_profile: Security
+        self.client_password: str
+
+        self.access_point.stop_all_aps()
+
+    def setup_test(self):
+        super().setup_test()
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockAcquireBright()
+                ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self):
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockRelease()
+                ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_test()
+
+    def on_fail(self, record: TestResultRecord):
+        super().on_fail(record)
+        self.access_point.stop_all_aps()
+
+    @create_security_profile
+    def test_associate_11a_sec_open_wep_5_chars_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_open_wep_13_chars_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_open_wep_10_hex_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_open_wep_26_hex_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_shared_wep_5_chars_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_shared_wep_13_chars_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_shared_wep_10_hex_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_shared_wep_26_hex_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa3_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa3_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa3_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11a_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_open_wep_5_chars_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_open_wep_13_chars_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_open_wep_10_hex_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_open_wep_26_hex_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["open"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_shared_wep_5_chars_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_shared_wep_13_chars_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_shared_wep_10_hex_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_shared_wep_26_hex_ptk_none(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+            additional_ap_parameters=hostapd_constants.WEP_AUTH["shared"],
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_frag_430_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_WMM_with_default_values_sec_wpa_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_password_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_psk_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_false(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Expected failure to associate. This device must support TKIP and "
+            "PMF, which is not supported on Fuchsia. If this device is a "
+            "mainstream device, we need to reconsider adding support for TKIP "
+            "and PMF on Fuchsia.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_psk_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_psk_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa3_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_frag_430_sec_wpa3_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_WMM_with_default_values_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa3_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_password_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_max_length_password_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_rts_256_frag_430_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            rts_threshold=256,
+            frag_threshold=430,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_high_dtim_low_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.HIGH_DTIM,
+            beacon_interval=hostapd_constants.LOW_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_low_dtim_high_beacon_int_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            dtim_period=hostapd_constants.LOW_DTIM,
+            beacon_interval=hostapd_constants.HIGH_BEACON_INTERVAL,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_WMM_with_default_values_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            force_wmm=True,
+            additional_ap_parameters=hostapd_constants.WMM_11B_DEFAULT_PARAMS,
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_correct_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "correct_length_beacon"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_zero_length_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE[
+                "zero_length_beacon_without_data"
+            ],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_11bg_pmf_with_vendor_ie_in_beacon_similar_to_wpa_ie_sec_wpa_wpa2_wpa3_psk_sae_ptk_tkip_or_ccmp(
+        self,
+    ):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            additional_ap_parameters=hostapd_constants.VENDOR_IE["simliar_to_wpa"],
+            security=self.security_profile,
+            pmf_support=hostapd_constants.PMF_SUPPORT_REQUIRED,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_french_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_german_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_dutch_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_swedish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_norwegian_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_danish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_japanese_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_spanish_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_italian_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+    @create_security_profile
+    def test_associate_utf8_korean_password_11bg_sec_wpa2_psk_ptk_ccmp(self):
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=AP_11ABG_PROFILE_NAME,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            security=self.security_profile,
+            force_wmm=False,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                self.ssid,
+                target_security=self.target_security,
+                target_pwd=self.client_password,
+            ),
+            "Failed to associate.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/facade/BUILD.gn b/tests/wlan/facade/BUILD.gn
new file mode 100644
index 0000000..c62ce70
--- /dev/null
+++ b/tests/wlan/facade/BUILD.gn
@@ -0,0 +1,32 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("wlan_deprecated_configuration_test") {
+  main_source = "WlanDeprecatedConfigurationTest.py"
+  environments = display_envs
+}
+
+antlion_host_test("wlan_facade_test") {
+  main_source = "WlanFacadeTest.py"
+  environments = display_envs
+}
+
+antlion_host_test("wlan_status_test") {
+  main_source = "WlanStatusTest.py"
+  environments = display_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":wlan_deprecated_configuration_test($host_toolchain)",
+    ":wlan_facade_test($host_toolchain)",
+    ":wlan_status_test($host_toolchain)",
+  ]
+}
diff --git a/tests/wlan/facade/WlanDeprecatedConfigurationTest.py b/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
new file mode 100644
index 0000000..1b1b8f3
--- /dev/null
+++ b/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
@@ -0,0 +1,189 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from mobly import asserts, test_runner
+from mobly.config_parser import TestRunConfig
+
+from antlion import utils
+from antlion.controllers.ap_lib.hostapd_security import FuchsiaSecurityType
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import (
+    ConnectivityMode,
+    OperatingBand,
+)
+from antlion.controllers.fuchsia_lib.wlan_lib import WlanMacRole
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+AP_ROLE = "Ap"
+DEFAULT_SSID = "testssid"
+TEST_MAC_ADDR = "12:34:56:78:9a:bc"
+TEST_MAC_ADDR_SECONDARY = "bc:9a:78:56:34:12"
+
+
+class WlanDeprecatedConfigurationTest(base_test.WifiBaseTest):
+    """Tests for WlanDeprecatedConfigurationFacade"""
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+    def setup_test(self):
+        super().setup_test()
+        self._stop_soft_aps()
+
+    def teardown_test(self):
+        self._stop_soft_aps()
+        super().teardown_test()
+
+    def _get_ap_interface_mac_address(self):
+        """Retrieves mac address from wlan interface with role ap
+
+        Returns:
+            string, the mac address of the AP interface
+
+        Raises:
+            ConnectionError, if SL4F calls fail
+            AttributeError, if no interface has role 'Ap'
+        """
+        for wlan_iface in self.dut.get_wlan_interface_id_list():
+            result = self.fuchsia_device.sl4f.wlan_lib.query_iface(wlan_iface)
+            if result.role is WlanMacRole.AP:
+                return utils.mac_address_list_to_str(bytes(result.sta_addr))
+        raise AttributeError(
+            "Failed to get ap interface mac address. No AP interface found."
+        )
+
+    def _start_soft_ap(self):
+        """Starts SoftAP on DUT.
+
+        Raises:
+            ConnectionError, if SL4F call fails.
+        """
+        self.log.info(f"Starting SoftAP on device {self.dut.identifier}")
+        response = self.fuchsia_device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
+            DEFAULT_SSID,
+            FuchsiaSecurityType.NONE,
+            None,
+            ConnectivityMode.LOCAL_ONLY,
+            OperatingBand.ANY,
+        )
+        if response.get("error"):
+            raise ConnectionError(f"Failed to setup SoftAP: {response['error']}")
+
+    def _stop_soft_aps(self):
+        """Stops SoftAP on DUT.
+
+        Raises:
+            ConnectionError, if SL4F call fails.
+        """
+        self.log.info("Stopping SoftAP.")
+        response = self.fuchsia_device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
+        if response.get("error"):
+            raise ConnectionError(f"Failed to stop SoftAP: {response['error']}")
+
+    def _suggest_ap_mac_addr(self, mac_addr):
+        """Suggests mac address for AP interface.
+        Args:
+            mac_addr: string, mac address to suggest.
+
+        Raises:
+            TestFailure, if SL4F call fails.
+        """
+        self.log.info(
+            f"Suggesting AP mac addr ({mac_addr}) via wlan_deprecated_configuration_lib."
+        )
+        response = self.fuchsia_device.sl4f.wlan_deprecated_configuration_lib.wlanSuggestAccessPointMacAddress(
+            mac_addr
+        )
+        if response.get("error"):
+            asserts.fail(
+                f"Failed to suggest AP mac address ({mac_addr}): {response['error']}"
+            )
+
+    def _verify_mac_addr(self, expected_addr):
+        """Verifies mac address of ap interface is set to expected mac address.
+
+        Args:
+            Args:
+                expected_addr: string, expected mac address
+
+            Raises:
+                TestFailure, if actual mac address is not expected mac address.
+        """
+        set_mac_addr = self._get_ap_interface_mac_address()
+        if set_mac_addr != expected_addr:
+            asserts.fail(
+                f"Failed to set AP mac address via wlan_deprecated_configuration_lib. "
+                f"Expected mac addr: {expected_addr}, Actual mac addr: {set_mac_addr}"
+            )
+        else:
+            self.log.info(f"AP mac address successfully set to {expected_addr}")
+
+    def test_suggest_ap_mac_address(self):
+        """Tests suggest ap mac address SL4F call
+
+        1. Get initial mac address
+        2. Suggest new mac address
+        3. Verify new mac address is set successfully
+        4. Reset to initial mac address
+        5. Verify initial mac address is reset successfully
+
+
+        Raises:
+            TestFailure, if wlanSuggestAccessPointMacAddress call fails or
+                of mac address is not the suggest value
+            ConnectionError, if other SL4F calls fail
+        """
+        # Retrieve initial ap mac address
+        self._start_soft_ap()
+
+        self.log.info("Getting initial mac address.")
+        initial_mac_addr = self._get_ap_interface_mac_address()
+        self.log.info(f"Initial mac address: {initial_mac_addr}")
+
+        if initial_mac_addr != TEST_MAC_ADDR:
+            suggested_mac_addr = TEST_MAC_ADDR
+        else:
+            suggested_mac_addr = TEST_MAC_ADDR_SECONDARY
+
+        self._stop_soft_aps()
+
+        # Suggest and verify new mac address
+        self._suggest_ap_mac_addr(suggested_mac_addr)
+
+        self._start_soft_ap()
+
+        self._verify_mac_addr(suggested_mac_addr)
+
+        self._stop_soft_aps()
+
+        # Reset to initial mac address and verify
+        self.log.info(f"Resetting to initial mac address ({initial_mac_addr}).")
+        self._suggest_ap_mac_addr(initial_mac_addr)
+
+        self._start_soft_ap()
+
+        self._verify_mac_addr(initial_mac_addr)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/facade/WlanFacadeTest.py b/tests/wlan/facade/WlanFacadeTest.py
new file mode 100644
index 0000000..bb03a36
--- /dev/null
+++ b/tests/wlan/facade/WlanFacadeTest.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Script for verifying that we can invoke methods of the WlanFacade.
+
+"""
+import array
+import logging
+
+from mobly import signals, test_runner
+
+from antlion.test_utils.wifi import base_test
+
+
+class WlanFacadeTest(base_test.WifiBaseTest):
+    def setup_class(self):
+        super().setup_class()
+        self.log = logging.getLogger()
+        if len(self.fuchsia_devices) == 0:
+            raise signals.TestAbortClass("Requires at least one Fuchsia device")
+        self.fuchsia_device = self.fuchsia_devices[0]
+
+    def test_get_phy_id_list(self) -> None:
+        result = self.fuchsia_device.sl4f.wlan_lib.get_phy_id_list()
+        self.log.info(f"Got Phy IDs {result}")
+
+    def test_get_country(self) -> None:
+        wlan_lib = self.fuchsia_device.sl4f.wlan_lib
+        phy_id_list = wlan_lib.get_phy_id_list()
+        country_bytes = wlan_lib.get_country(phy_id_list[0])
+
+        country_string = str(array.array("b", country_bytes), encoding="us-ascii")
+        self.log.info(f"Got country {country_string} ({country_bytes})")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/facade/WlanStatusTest.py b/tests/wlan/facade/WlanStatusTest.py
new file mode 100644
index 0000000..c312c1b
--- /dev/null
+++ b/tests/wlan/facade/WlanStatusTest.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Test to verify that a DUT's client interface's status can be queried.
+"""
+
+import logging
+
+from mobly import signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion.controllers.fuchsia_lib.wlan_lib import WlanFailure
+from antlion.test_utils.wifi import base_test
+
+
+class WlanStatusTest(base_test.WifiBaseTest):
+    """WLAN status test class.
+
+    Test Bed Requirements:
+    * One or more Fuchsia devices with WLAN client capabilities.
+    """
+
+    def setup_class(self):
+        super().setup_class()
+        self.log = logging.getLogger()
+        for fd in self.fuchsia_devices:
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
+
+    def on_fail(self, record: TestResultRecord):
+        for fd in self.fuchsia_devices:
+            super().on_device_fail(fd, record)
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
+
+    def test_wlan_stopped_client_status(self):
+        """Queries WLAN status on DUTs with no WLAN ifaces.
+
+        Tests that DUTs without WLAN interfaces have empty results and return
+        an error when queried for status.
+        """
+        for fd in self.fuchsia_devices:
+            fd.deconfigure_wlan()
+            try:
+                _ = fd.sl4f.wlan_lib.status()
+                raise signals.TestFailure(
+                    "Calling WLAN status with no WLAN interfaces should throw WlanError"
+                )
+            except WlanFailure:
+                raise signals.TestPass("Success")
+
+    def test_wlan_started_client_status(self):
+        """Queries WLAN status on DUTs with WLAN ifaces.
+
+        Tests that, once WLAN client interfaces have been created, each one
+        returns a result and that none of them return errors when queried for
+        status.
+        """
+        for fd in self.fuchsia_devices:
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
+            _ = fd.sl4f.wlan_lib.status()
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/BUILD.gn b/tests/wlan/functional/BUILD.gn
new file mode 100644
index 0000000..5423302
--- /dev/null
+++ b/tests/wlan/functional/BUILD.gn
@@ -0,0 +1,143 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("beacon_loss_test") {
+  main_source = "BeaconLossTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("channel_switch_test") {
+  main_source = "ChannelSwitchTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("connection_stress_test") {
+  main_source = "ConnectionStressTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("download_stress_test") {
+  main_source = "DownloadStressTest.py"
+
+  # Requires external internet access. This is considered bad practice for an
+  # automated test due to reliance on external services. Will remain an at-desk
+  # dest until rewritten to remove dependance on external services.
+  # environments = display_ap_envs
+  environments = []
+}
+
+antlion_host_test("ping_stress_test") {
+  main_source = "PingStressTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("soft_ap_test") {
+  main_source = "SoftApTest.py"
+
+  # Requires one Fuchsia device and one Anddroid device. There are no
+  # infra-hosted environments to run this test on. Will likely remain an at-desk
+  # test for as long as it requires an Android device.
+  environments = []
+}
+
+antlion_host_test("wlan_driver_restart_test") {
+  main_source = "WlanDriverRestartTest.py"
+  environments = [
+    nuc7_env,
+    nuc11_env,
+  ]
+  test_data_deps =
+      [ "//src/developer/ffx/plugins/driver:ffx_driver_tool_test_data" ]
+}
+
+antlion_host_test("wlan_reboot_ap_test") {
+  main_source = "WlanRebootTest.py"
+  environments = display_ap_iperf_envs
+  test_cases = [ "test_.+_ap_.+" ]
+}
+
+antlion_host_test("wlan_reboot_ap_test_quick") {
+  main_source = "WlanRebootTest.py"
+  environments = display_ap_iperf_envs
+  test_cases = [ "test_soft_reboot_dut_5g_open_ipv4" ]
+}
+
+antlion_host_test("wlan_reboot_dut_test") {
+  main_source = "WlanRebootTest.py"
+  environments = display_ap_iperf_envs
+  test_cases = [ "test_.+_dut_.+" ]
+}
+
+antlion_host_test("wlan_reboot_dut_test_quick") {
+  main_source = "WlanRebootTest.py"
+  environments = display_ap_iperf_envs
+  test_cases = [ "test_soft_reboot_ap_5g_open_ipv4" ]
+}
+
+antlion_host_test("wlan_scan_test") {
+  main_source = "WlanScanTest.py"
+  environments = display_ap_envs
+}
+
+# iwlwifi AX201 does not support WPA2 yet.
+# TODO(b/328494216): Remove then add nuc11_env to wlan_scan_test.
+antlion_host_test("wlan_scan_test_without_wpa2") {
+  main_source = "WlanScanTest.py"
+  environments = [ nuc11_ap_env ]
+  test_cases = [
+    "test_basic_scan_request",
+    "test_scan_while_connected_open_network_2g",
+    "test_scan_while_connected_open_network_5g",
+  ]
+}
+
+antlion_host_test("wlan_target_security_test") {
+  main_source = "WlanTargetSecurityTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("wlan_wireless_network_management_test") {
+  main_source = "WlanWirelessNetworkManagementTest.py"
+  environments = display_ap_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":beacon_loss_test($host_toolchain)",
+    ":channel_switch_test($host_toolchain)",
+    ":ping_stress_test($host_toolchain)",
+    ":wlan_reboot_ap_test($host_toolchain)",
+    ":wlan_reboot_dut_test($host_toolchain)",
+    ":wlan_scan_test($host_toolchain)",
+    ":wlan_scan_test_without_wpa2($host_toolchain)",
+    ":wlan_target_security_test($host_toolchain)",
+    ":wlan_wireless_network_management_test($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    ":ping_stress_test($host_toolchain)",
+    ":wlan_driver_restart_test($host_toolchain)",
+    ":wlan_reboot_ap_test_quick($host_toolchain)",
+    ":wlan_reboot_dut_test_quick($host_toolchain)",
+    ":wlan_scan_test_without_wpa2($host_toolchain)",
+  ]
+}
+
+# Tests that are disabled in automation
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    ":download_stress_test($host_toolchain)",
+    ":soft_ap_test($host_toolchain)",
+  ]
+}
diff --git a/tests/wlan/functional/BeaconLossTest.py b/tests/wlan/functional/BeaconLossTest.py
new file mode 100644
index 0000000..4104fb1
--- /dev/null
+++ b/tests/wlan/functional/BeaconLossTest.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Script for testing WiFi recovery after rebooting the AP.
+
+Override default number of iterations using the following
+parameter in the test config file.
+
+"beacon_loss_test_iterations": "5"
+"""
+
+import logging
+import time
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str
+
+
+class BeaconLossTest(base_test.WifiBaseTest):
+    # Default number of test iterations here.
+    # Override using parameter in config file.
+    # Eg: "beacon_loss_test_iterations": "10"
+    num_of_iterations = 5
+
+    # Time to wait for AP to startup
+    wait_ap_startup_s = 15
+
+    # Default wait time in seconds for the AP radio to turn back on
+    wait_to_connect_after_ap_txon_s = 5
+
+    # Time to wait for device to disconnect after AP radio of
+    wait_after_ap_txoff_s = 15
+
+    # Time to wait for device to complete connection setup after
+    # given an associate command
+    wait_client_connection_setup_s = 15
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+        self.ssid = rand_ascii_str(10)
+
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+        self.num_of_iterations = int(
+            self.user_params.get("beacon_loss_test_iterations", self.num_of_iterations)
+        )
+        self.in_use_interface: str | None = None
+
+    def teardown_test(self) -> None:
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        # ensure radio is on, in case the test failed while the radio was off
+        if self.in_use_interface:
+            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower on")
+        self.download_logs()
+        self.access_point.stop_all_aps()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        super().on_fail(record)
+        self.access_point.stop_all_aps()
+
+    def beacon_loss(self, channel) -> None:
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=channel,
+            ssid=self.ssid,
+        )
+        time.sleep(self.wait_ap_startup_s)
+        if channel > 14:
+            self.in_use_interface = self.access_point.wlan_5g
+        else:
+            self.in_use_interface = self.access_point.wlan_2g
+
+        # TODO(b/144505723): [ACTS] update BeaconLossTest.py to handle client
+        # roaming, saved networks, etc.
+        self.log.info("sending associate command for ssid %s", self.ssid)
+        self.dut.associate(target_ssid=self.ssid)
+
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
+
+        time.sleep(self.wait_client_connection_setup_s)
+
+        for _ in range(0, self.num_of_iterations):
+            # Turn off AP radio
+            self.log.info("turning off radio")
+            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower off")
+            time.sleep(self.wait_after_ap_txoff_s)
+
+            # Did we disconnect from AP?
+            asserts.assert_false(self.dut.is_connected(), "Failed to disconnect.")
+
+            # Turn on AP radio
+            self.log.info("turning on radio")
+            self.access_point.iwconfig.ap_iwconfig(self.in_use_interface, "txpower on")
+            time.sleep(self.wait_to_connect_after_ap_txon_s)
+
+            # Tell the client to connect
+            self.log.info(f"sending associate command for ssid {self.ssid}")
+            self.dut.associate(target_ssid=self.ssid)
+            time.sleep(self.wait_client_connection_setup_s)
+
+            # Did we connect back to WiFi?
+            asserts.assert_true(self.dut.is_connected(), "Failed to connect back.")
+
+    def test_beacon_loss_2g(self) -> None:
+        self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G)
+
+    def test_beacon_loss_5g(self) -> None:
+        self.beacon_loss(channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/ChannelSwitchTest.py b/tests/wlan/functional/ChannelSwitchTest.py
new file mode 100644
index 0000000..27597f9
--- /dev/null
+++ b/tests/wlan/functional/ChannelSwitchTest.py
@@ -0,0 +1,396 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Tests STA handling of channel switch announcements.
+"""
+
+import logging
+import random
+import time
+from typing import Sequence
+
+from honeydew.typing.wlan import ClientStatusConnected
+from mobly import asserts, signals, test_runner
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import FuchsiaSecurityType
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import (
+    ConnectivityMode,
+    OperatingBand,
+)
+from antlion.controllers.fuchsia_lib.wlan_lib import WlanFailure, WlanMacRole
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str
+
+
+class ChannelSwitchTest(base_test.WifiBaseTest):
+    # Time to wait between issuing channel switches
+    WAIT_BETWEEN_CHANNEL_SWITCHES_S = 15
+
+    # For operating class 115 tests.
+    GLOBAL_OPERATING_CLASS_115_CHANNELS = [36, 40, 44, 48]
+    # A channel outside the operating class.
+    NON_GLOBAL_OPERATING_CLASS_115_CHANNEL = 52
+
+    # For operating class 124 tests.
+    GLOBAL_OPERATING_CLASS_124_CHANNELS = [149, 153, 157, 161]
+    # A channel outside the operating class.
+    NON_GLOBAL_OPERATING_CLASS_124_CHANNEL = 52
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+        self.ssid = rand_ascii_str(10)
+
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+        self._stop_all_soft_aps()
+        self.in_use_interface: str | None = None
+
+    def teardown_test(self) -> None:
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_test()
+
+    # TODO(fxbug.dev/42166670): Change band type to an enum.
+    def channel_switch(
+        self,
+        band: str,
+        starting_channel: int,
+        channel_switches: Sequence[int],
+        test_with_soft_ap: bool = False,
+    ) -> None:
+        """Setup and run a channel switch test with the given parameters.
+
+        Creates an AP, associates to it, and then issues channel switches
+        through the provided channels. After each channel switch, the test
+        checks that the DUT is connected for a period of time before considering
+        the channel switch successful. If directed to start a SoftAP, the test
+        will also check that the SoftAP is on the expected channel after each
+        channel switch.
+
+        Args:
+            band: band that AP will use, must be a valid band (e.g.
+                hostapd_constants.BAND_2G)
+            starting_channel: channel number that AP will use at startup
+            channel_switches: ordered list of channels that the test will
+                attempt to switch to
+            test_with_soft_ap: whether to start a SoftAP before beginning the
+                channel switches (default is False); note that if a SoftAP is
+                started, the test will also check that the SoftAP handles
+                channel switches correctly
+        """
+        asserts.assert_true(
+            band in [hostapd_constants.BAND_2G, hostapd_constants.BAND_5G],
+            f"Failed to setup AP, invalid band {band}",
+        )
+
+        self.current_channel_num = starting_channel
+        if band == hostapd_constants.BAND_5G:
+            self.in_use_interface = self.access_point.wlan_5g
+        elif band == hostapd_constants.BAND_2G:
+            self.in_use_interface = self.access_point.wlan_2g
+        else:
+            raise TypeError(f'Unknown band "{band}"')
+
+        asserts.assert_true(
+            self._channels_valid_for_band([self.current_channel_num], band),
+            (
+                f"starting channel {self.current_channel_num} not a valid channel "
+                f"for band {band}"
+            ),
+        )
+
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=self.current_channel_num,
+            ssid=self.ssid,
+        )
+        if test_with_soft_ap:
+            self._start_soft_ap()
+        self.log.info("sending associate command for ssid %s", self.ssid)
+        self.dut.associate(target_ssid=self.ssid)
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
+
+        asserts.assert_true(
+            channel_switches, "Cannot run test, no channels to switch to"
+        )
+        asserts.assert_true(
+            self._channels_valid_for_band(channel_switches, band),
+            (
+                f"channel_switches {channel_switches} includes invalid channels "
+                f"for band {band}"
+            ),
+        )
+
+        for channel_num in channel_switches:
+            if channel_num == self.current_channel_num:
+                continue
+            self.log.info(
+                f"channel switch: {self.current_channel_num} -> {channel_num}"
+            )
+            self.access_point.channel_switch(self.in_use_interface, channel_num)
+            channel_num_after_switch = self.access_point.get_current_channel(
+                self.in_use_interface
+            )
+            asserts.assert_equal(
+                channel_num_after_switch, channel_num, "AP failed to channel switch"
+            )
+            self.current_channel_num = channel_num
+
+            # Check periodically to see if DUT stays connected. Sometimes
+            # CSA-induced disconnects occur seconds after last channel switch.
+            for _ in range(self.WAIT_BETWEEN_CHANNEL_SWITCHES_S):
+                asserts.assert_true(
+                    self.dut.is_connected(),
+                    "Failed to stay connected after channel switch.",
+                )
+                status = self.fuchsia_device.sl4f.wlan_lib.status()
+                if isinstance(status, ClientStatusConnected):
+                    client_channel = status.channel.primary
+                    asserts.assert_equal(
+                        client_channel,
+                        channel_num,
+                        f"Client interface on wrong channel ({client_channel})",
+                    )
+                    if test_with_soft_ap:
+                        soft_ap_channel = self._soft_ap_channel()
+                        asserts.assert_equal(
+                            soft_ap_channel,
+                            channel_num,
+                            f"SoftAP interface on wrong channel ({soft_ap_channel})",
+                        )
+                    time.sleep(1)
+
+    def test_channel_switch_2g(self) -> None:
+        """Channel switch through all (US only) channels in the 2 GHz band."""
+        self.channel_switch(
+            band=hostapd_constants.BAND_2G,
+            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            channel_switches=hostapd_constants.US_CHANNELS_2G,
+        )
+
+    def test_channel_switch_2g_with_soft_ap(self) -> None:
+        """Channel switch through (US only) 2 Ghz channels with SoftAP up."""
+        self.channel_switch(
+            band=hostapd_constants.BAND_2G,
+            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            channel_switches=hostapd_constants.US_CHANNELS_2G,
+            test_with_soft_ap=True,
+        )
+
+    def test_channel_switch_2g_shuffled_with_soft_ap(self) -> None:
+        """Switch through shuffled (US only) 2 Ghz channels with SoftAP up."""
+        channels = hostapd_constants.US_CHANNELS_2G
+        random.shuffle(channels)
+        self.log.info(f"Shuffled channel switch sequence: {channels}")
+        self.channel_switch(
+            band=hostapd_constants.BAND_2G,
+            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            channel_switches=channels,
+            test_with_soft_ap=True,
+        )
+
+    # TODO(fxbug.dev/42165602): This test fails.
+    def test_channel_switch_5g(self) -> None:
+        """Channel switch through all (US only) channels in the 5 GHz band."""
+        self.channel_switch(
+            band=hostapd_constants.BAND_5G,
+            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            channel_switches=hostapd_constants.US_CHANNELS_5G,
+        )
+
+    # TODO(fxbug.dev/42165602): This test fails.
+    def test_channel_switch_5g_with_soft_ap(self) -> None:
+        """Channel switch through (US only) 5 GHz channels with SoftAP up."""
+        self.channel_switch(
+            band=hostapd_constants.BAND_5G,
+            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            channel_switches=hostapd_constants.US_CHANNELS_5G,
+            test_with_soft_ap=True,
+        )
+
+    def test_channel_switch_5g_shuffled_with_soft_ap(self) -> None:
+        """Switch through shuffled (US only) 5 Ghz channels with SoftAP up."""
+        channels = hostapd_constants.US_CHANNELS_5G
+        random.shuffle(channels)
+        self.log.info(f"Shuffled channel switch sequence: {channels}")
+        self.channel_switch(
+            band=hostapd_constants.BAND_5G,
+            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            channel_switches=channels,
+            test_with_soft_ap=True,
+        )
+
+    # TODO(fxbug.dev/42165602): This test fails.
+    def test_channel_switch_regression_global_operating_class_115(self) -> None:
+        """Channel switch into, through, and out of global op. class 115 channels.
+
+        Global operating class 115 is described in IEEE 802.11-2016 Table E-4.
+        Regression test for fxbug.dev/42165602.
+        """
+        channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [
+            self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL
+        ]
+        self.channel_switch(
+            band=hostapd_constants.BAND_5G,
+            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
+            channel_switches=channels,
+        )
+
+    # TODO(fxbug.dev/42165602): This test fails.
+    def test_channel_switch_regression_global_operating_class_115_with_soft_ap(
+        self,
+    ) -> None:
+        """Test global operating class 124 channel switches, with SoftAP.
+
+        Regression test for fxbug.dev/42165602.
+        """
+        channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [
+            self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL
+        ]
+        self.channel_switch(
+            band=hostapd_constants.BAND_5G,
+            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
+            channel_switches=channels,
+            test_with_soft_ap=True,
+        )
+
+    # TODO(fxbug.dev/42165602): This test fails.
+    def test_channel_switch_regression_global_operating_class_124(self) -> None:
+        """Switch into, through, and out of global op. class 124 channels.
+
+        Global operating class 124 is described in IEEE 802.11-2016 Table E-4.
+        Regression test for fxbug.dev/42142868.
+        """
+        channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [
+            self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL
+        ]
+        self.channel_switch(
+            band=hostapd_constants.BAND_5G,
+            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
+            channel_switches=channels,
+        )
+
+    # TODO(fxbug.dev/42165602): This test fails.
+    def test_channel_switch_regression_global_operating_class_124_with_soft_ap(
+        self,
+    ) -> None:
+        """Test global operating class 124 channel switches, with SoftAP.
+
+        Regression test for fxbug.dev/42142868.
+        """
+        channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [
+            self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL
+        ]
+        self.channel_switch(
+            band=hostapd_constants.BAND_5G,
+            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
+            channel_switches=channels,
+            test_with_soft_ap=True,
+        )
+
+    def _channels_valid_for_band(self, channels: Sequence[int], band: str) -> bool:
+        """Determine if the channels are valid for the band (US only).
+
+        Args:
+            channels: channel numbers
+            band: a valid band (e.g. hostapd_constants.BAND_2G)
+        """
+        if band == hostapd_constants.BAND_2G:
+            band_channels = frozenset(hostapd_constants.US_CHANNELS_2G)
+        elif band == hostapd_constants.BAND_5G:
+            band_channels = frozenset(hostapd_constants.US_CHANNELS_5G)
+        else:
+            asserts.fail(f"Invalid band {band}")
+        channels_set = frozenset(channels)
+        if channels_set <= band_channels:
+            return True
+        return False
+
+    def _start_soft_ap(self) -> None:
+        """Start a SoftAP on the DUT.
+
+        Raises:
+            EnvironmentError: if the SoftAP does not start
+        """
+        ssid = rand_ascii_str(10)
+        self.log.info(f'Starting SoftAP on DUT with ssid "{ssid}"')
+
+        response = self.fuchsia_device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
+            ssid,
+            FuchsiaSecurityType.NONE,
+            None,
+            ConnectivityMode.LOCAL_ONLY,
+            OperatingBand.ANY,
+        )
+        if response.get("error"):
+            raise EnvironmentError(
+                f"SL4F: Failed to setup SoftAP. Err: {response['error']}"
+            )
+        self.log.info(f"SoftAp network ({ssid}) is up.")
+
+    def _stop_all_soft_aps(self) -> None:
+        """Stops all SoftAPs on Fuchsia Device.
+
+        Raises:
+            EnvironmentError: if SoftAP stop call fails
+        """
+        response = self.fuchsia_device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
+        if response.get("error"):
+            raise EnvironmentError(
+                f"SL4F: Failed to stop all SoftAPs. Err: {response['error']}"
+            )
+
+    def _soft_ap_channel(self) -> int:
+        """Determine the channel of the DUT SoftAP interface.
+
+        If the interface is not connected, the method will assert a test
+        failure.
+
+        Returns: channel number
+
+        Raises:
+            EnvironmentError: if SoftAP interface channel cannot be determined.
+        """
+        iface_ids = self.dut.get_wlan_interface_id_list()
+        for iface_id in iface_ids:
+            try:
+                result = self.fuchsia_device.sl4f.wlan_lib.query_iface(iface_id)
+            except WlanFailure as e:
+                self.log.warn(f"Query iface {iface_id} failed: {e}")
+                continue
+            if result.role is WlanMacRole.AP:
+                status = self.fuchsia_device.sl4f.wlan_lib.status()
+                if not isinstance(status, ClientStatusConnected):
+                    raise EnvironmentError("Client not connected")
+                return status.channel.primary
+        raise EnvironmentError("Could not determine SoftAP channel")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/ConnectionStressTest.py b/tests/wlan/functional/ConnectionStressTest.py
new file mode 100644
index 0000000..8879dd7
--- /dev/null
+++ b/tests/wlan/functional/ConnectionStressTest.py
@@ -0,0 +1,189 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Script for testing WiFi connection and disconnection in a loop
+
+"""
+
+import logging
+import time
+from dataclasses import dataclass
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib.hostapd_constants import (
+    AP_DEFAULT_CHANNEL_2G,
+    AP_DEFAULT_CHANNEL_5G,
+)
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str
+
+
+@dataclass
+class TestParams:
+    profile: str
+    channel: int
+    security_mode: SecurityMode
+    ap_ssid: str
+    ap_password: str | None
+    dut_ssid: str
+    dut_password: str | None
+    expect_associated: bool
+
+
+class ConnectionStressTest(base_test.WifiBaseTest):
+    # Default number of test iterations here.
+    # Override using parameter in config file.
+    # Eg: "connection_stress_test_iterations": "50"
+    num_of_iterations = 10
+
+    def pre_run(self) -> None:
+        tests: list[TestParams] = []
+
+        # Successful associate
+        for profile in ["whirlwind", "whirlwind_11ab_legacy", "whirlwind_11ag_legacy"]:
+            for channel in [AP_DEFAULT_CHANNEL_2G, AP_DEFAULT_CHANNEL_5G]:
+                ssid = rand_ascii_str(10)
+                tests.append(
+                    TestParams(
+                        profile=profile,
+                        channel=channel,
+                        security_mode=SecurityMode.OPEN,
+                        ap_ssid=ssid,
+                        ap_password=None,
+                        dut_ssid=ssid,
+                        dut_password=None,
+                        expect_associated=True,
+                    )
+                )
+
+        # Wrong SSID
+        for channel in [AP_DEFAULT_CHANNEL_2G, AP_DEFAULT_CHANNEL_5G]:
+            ssid = rand_ascii_str(10)
+            tests.append(
+                TestParams(
+                    profile="whirlwind",
+                    channel=channel,
+                    security_mode=SecurityMode.OPEN,
+                    ap_ssid=ssid,
+                    ap_password=None,
+                    dut_ssid=f"wrong_{ssid}",
+                    dut_password=None,
+                    expect_associated=False,
+                )
+            )
+
+        # Wrong password
+        for channel in [AP_DEFAULT_CHANNEL_2G, AP_DEFAULT_CHANNEL_5G]:
+            ssid = rand_ascii_str(10)
+            password = rand_ascii_str(20)
+            tests.append(
+                TestParams(
+                    profile="whirlwind",
+                    channel=channel,
+                    security_mode=SecurityMode.WPA2,
+                    ap_ssid=ssid,
+                    ap_password=password,
+                    dut_ssid=ssid,
+                    dut_password=f"wrong_{password}",
+                    expect_associated=False,
+                )
+            )
+
+        def test_name(test: TestParams) -> str:
+            channel = "2g" if test.channel == AP_DEFAULT_CHANNEL_2G else "5g"
+            if test.expect_associated:
+                return f"test_{test.profile}_{channel}"
+            if test.ap_ssid != test.dut_ssid:
+                return f"test_{test.profile}_{channel}_wrong_ssid"
+            if test.ap_password != test.dut_password:
+                return f"test_{test.profile}_{channel}_wrong_password"
+            raise TypeError(f"Unknown name for {test}")
+
+        self.generate_tests(self.connect_disconnect, test_name, [(t,) for t in tests])
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+        self.ssid = rand_ascii_str(10)
+
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+        self.num_of_iterations = int(
+            self.user_params.get(
+                "connection_stress_test_iterations", self.num_of_iterations
+            )
+        )
+        self.log.info(f"iterations: {self.num_of_iterations}")
+
+    def teardown_test(self) -> None:
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        super().on_fail(record)
+        self.access_point.stop_all_aps()
+
+    def connect_disconnect(self, test: TestParams) -> None:
+        """Helper to start an AP, connect DUT to it and disconnect
+
+        Args:
+            ap_config: Dictionary containing profile name and channel
+            ssid: ssid to connect to
+            password: password for the ssid to connect to
+        """
+        setup_ap(
+            access_point=self.access_point,
+            profile_name=test.profile,
+            channel=test.channel,
+            ssid=test.ap_ssid,
+            security=Security(
+                security_mode=test.security_mode, password=test.ap_password
+            ),
+        )
+
+        for iteration in range(0, self.num_of_iterations):
+            associated = self.dut.associate(
+                test.dut_ssid,
+                target_pwd=test.dut_password,
+                target_security=test.security_mode,
+            )
+            asserts.assert_equal(
+                associated,
+                test.expect_associated,
+                (
+                    f"Attempt {iteration}/{self.num_of_iterations}: "
+                    f"associated={associated}, want {test.expect_associated}"
+                ),
+            )
+
+            self.dut.disconnect()
+
+            # Wait a second before trying again
+            time.sleep(1)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/DownloadStressTest.py b/tests/wlan/functional/DownloadStressTest.py
new file mode 100644
index 0000000..f11f5da
--- /dev/null
+++ b/tests/wlan/functional/DownloadStressTest.py
@@ -0,0 +1,203 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Script for testing various download stress scenarios.
+
+"""
+import logging
+import threading
+
+from mobly import signals, test_runner
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.fuchsia import utils
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str
+
+
+class DownloadStressTest(base_test.WifiBaseTest):
+    # Default number of test iterations here.
+    # Override using parameter in config file.
+    # Eg: "download_stress_test_iterations": "10"
+    num_of_iterations = 3
+
+    # Timeout for download thread in seconds
+    download_timeout_s = 60 * 5
+
+    # Download urls
+    url_20MB = "http://ipv4.download.thinkbroadband.com/20MB.zip"
+    url_40MB = "http://ipv4.download.thinkbroadband.com/40MB.zip"
+    url_60MB = "http://ipv4.download.thinkbroadband.com/60MB.zip"
+    url_512MB = "http://ipv4.download.thinkbroadband.com/512MB.zip"
+
+    # Constants used in test_one_large_multiple_small_downloads
+    download_small_url = url_20MB
+    download_large_url = url_512MB
+    num_of_small_downloads = 5
+    download_threads_result: list[bool] = []
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+        self.ssid = rand_ascii_str(10)
+
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+        self.num_of_iterations = int(
+            self.user_params.get(
+                "download_stress_test_iterations", self.num_of_iterations
+            )
+        )
+
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+        )
+        self.dut.associate(self.ssid)
+
+    def teardown_test(self) -> None:
+        self.download_threads_result.clear()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_test()
+
+    def test_download_small(self) -> bool:
+        self.log.info("Downloading small file")
+        return self.download_file(self.url_20MB)
+
+    def test_download_large(self) -> bool:
+        return self.download_file(self.url_512MB)
+
+    def test_continuous_download(self) -> bool:
+        for _ in range(0, self.num_of_iterations):
+            if not self.download_file(self.url_512MB):
+                return False
+        return True
+
+    def download_file(self, url: str) -> bool:
+        self.log.info(f"Start downloading: {url}")
+        return utils.http_file_download_by_curl(
+            self.fuchsia_device,
+            url,
+            additional_args=f"--max-time {self.download_timeout_s} --silent",
+        )
+
+    def download_thread(self, url: str) -> bool:
+        download_status = self.download_file(url)
+        if download_status:
+            self.log.info(f"Success downloading: {url}")
+        else:
+            self.log.info(f"Failure downloading: {url}")
+
+        self.download_threads_result.append(download_status)
+        return download_status
+
+    def test_multi_downloads(self):
+        download_urls = [self.url_20MB, self.url_40MB, self.url_60MB]
+        download_threads = []
+
+        try:
+            # Start multiple downloads at the same time
+            for index, url in enumerate(download_urls):
+                self.log.info(f"Create and start thread {index}.")
+                t = threading.Thread(target=self.download_thread, args=(url,))
+                download_threads.append(t)
+                t.start()
+
+            # Wait for all threads to complete or timeout
+            for t in download_threads:
+                t.join(self.download_timeout_s)
+
+        finally:
+            is_alive = False
+
+            for index, t in enumerate(download_threads):
+                if t.isAlive():
+                    t = None
+                    is_alive = True
+
+            if is_alive:
+                raise signals.TestFailure(f"Thread {index} timedout")
+
+        for index in range(0, len(self.download_threads_result)):
+            if not self.download_threads_result[index]:
+                self.log.info(f"Download failed for {index}")
+                raise signals.TestFailure(f"Thread {index} failed to download")
+                return False
+
+        return True
+
+    def test_one_large_multiple_small_downloads(self):
+        for index in range(self.num_of_iterations):
+            download_threads = []
+            try:
+                large_thread = threading.Thread(
+                    target=self.download_thread, args=(self.download_large_url,)
+                )
+                download_threads.append(large_thread)
+                large_thread.start()
+
+                for i in range(self.num_of_small_downloads):
+                    # Start small file download
+                    t = threading.Thread(
+                        target=self.download_thread, args=(self.download_small_url,)
+                    )
+                    download_threads.append(t)
+                    t.start()
+                    # Wait for thread to exit before starting the next iteration
+                    t.join(self.download_timeout_s)
+
+                # Wait for the large file download thread to complete
+                large_thread.join(self.download_timeout_s)
+
+            finally:
+                is_alive = False
+
+                for index, t in enumerate(download_threads):
+                    if t.isAlive():
+                        t = None
+                        is_alive = True
+
+                if is_alive:
+                    raise signals.TestFailure(f"Thread {index} timedout")
+
+            for index in range(0, len(self.download_threads_result)):
+                if not self.download_threads_result[index]:
+                    self.log.info(f"Download failed for {index}")
+                    raise signals.TestFailure(f"Thread {index} failed to download")
+                    return False
+
+            # Clear results before looping again
+            self.download_threads_result.clear()
+
+        return True
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/PingStressTest.py b/tests/wlan/functional/PingStressTest.py
new file mode 100644
index 0000000..5c04c4a
--- /dev/null
+++ b/tests/wlan/functional/PingStressTest.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+PingStressTest exercises sending ICMP and ICMPv6 pings to a wireless access
+router and another device behind the AP. Note, this does not reach out to the
+internet. The DUT is only responsible for sending a routable packet; any
+communication past the first-hop is not the responsibility of the DUT.
+"""
+
+import logging
+import multiprocessing
+from typing import Callable, NamedTuple
+
+from mobly import asserts, signals, test_runner
+
+from antlion import utils
+from antlion.controllers.access_point import AccessPoint, setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.iperf_server import IPerfServerOverSsh
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+from antlion.utils import PingResult, rand_ascii_str
+
+LOOPBACK_IPV4 = "127.0.0.1"
+LOOPBACK_IPV6 = "::1"
+PING_RESULT_TIMEOUT_SEC = 60 * 5
+
+
+class Addrs(NamedTuple):
+    gateway_ipv4: str
+    gateway_ipv6: str
+    remote_ipv4: str
+    remote_ipv6: str
+
+
+class Test(NamedTuple):
+    name: str
+    dest_ip: str | Callable[[Addrs], str]
+    packet_count: int = 3
+    interval: int = 1000
+    timeout: int = 1000
+    size: int = 25
+
+
+class PingStressTest(base_test.WifiBaseTest):
+    def setup_generated_tests(self) -> None:
+        self.generate_tests(
+            self.send_ping,
+            lambda test_name, *_: f"test_{test_name}",
+            [
+                Test("loopback_ipv4", LOOPBACK_IPV4),
+                Test("loopback_ipv6", LOOPBACK_IPV6),
+                Test("gateway_ipv4", lambda addrs: addrs.gateway_ipv4),
+                Test("gateway_ipv6", lambda addrs: addrs.gateway_ipv6),
+                Test("remote_ipv4_small_packet", lambda addrs: addrs.remote_ipv4),
+                Test("remote_ipv6_small_packet", lambda addrs: addrs.remote_ipv6),
+                Test(
+                    "remote_ipv4_small_packet_long",
+                    lambda addrs: addrs.remote_ipv4,
+                    packet_count=50,
+                ),
+                Test(
+                    "remote_ipv6_small_packet_long",
+                    lambda addrs: addrs.remote_ipv6,
+                    packet_count=50,
+                ),
+                Test(
+                    "remote_ipv4_medium_packet",
+                    lambda addrs: addrs.remote_ipv4,
+                    size=64,
+                ),
+                Test(
+                    "remote_ipv6_medium_packet",
+                    lambda addrs: addrs.remote_ipv6,
+                    size=64,
+                ),
+                Test(
+                    "remote_ipv4_medium_packet_long",
+                    lambda addrs: addrs.remote_ipv4,
+                    packet_count=50,
+                    timeout=1500,
+                    size=64,
+                ),
+                Test(
+                    "remote_ipv6_medium_packet_long",
+                    lambda addrs: addrs.remote_ipv6,
+                    packet_count=50,
+                    timeout=1500,
+                    size=64,
+                ),
+                Test(
+                    "remote_ipv4_large_packet",
+                    lambda addrs: addrs.remote_ipv4,
+                    size=500,
+                ),
+                Test(
+                    "remote_ipv6_large_packet",
+                    lambda addrs: addrs.remote_ipv6,
+                    size=500,
+                ),
+                Test(
+                    "remote_ipv4_large_packet_long",
+                    lambda addrs: addrs.remote_ipv4,
+                    packet_count=50,
+                    timeout=5000,
+                    size=500,
+                ),
+                Test(
+                    "remote_ipv6_large_packet_long",
+                    lambda addrs: addrs.remote_ipv6,
+                    packet_count=50,
+                    timeout=5000,
+                    size=500,
+                ),
+            ],
+        )
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+        self.ssid = rand_ascii_str(10)
+
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        if len(self.access_points) < 1:
+            raise signals.TestAbortClass("At least one access point is required")
+        self.access_point: AccessPoint = self.access_points[0]
+
+        if len(self.iperf_servers) < 1:
+            raise signals.TestAbortClass("At least one iPerf3 server is required")
+        self.iperf_server: IPerfServerOverSsh = self.iperf_servers[0]
+
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.ssid,
+            setup_bridge=True,
+            is_ipv6_enabled=True,
+            is_nat_enabled=False,
+        )
+
+        ap_bridges = self.access_point.interfaces.get_bridge_interface()
+        if ap_bridges and len(ap_bridges) > 0:
+            ap_bridge = ap_bridges[0]
+        else:
+            asserts.abort_class(
+                f"Expected one bridge interface on the AP, got {ap_bridges}"
+            )
+        self.ap_ipv4 = utils.get_addr(self.access_point.ssh, ap_bridge)
+        self.ap_ipv6 = utils.get_addr(
+            self.access_point.ssh, ap_bridge, addr_type="ipv6_link_local"
+        )
+        self.log.info(f"Gateway finished setup ({self.ap_ipv4} | {self.ap_ipv6})")
+
+        self.iperf_server.renew_test_interface_ip_address()
+        self.iperf_server_ipv4 = self.iperf_server.get_addr()
+        self.iperf_server_ipv6 = self.iperf_server.get_addr(
+            addr_type="ipv6_private_local"
+        )
+        self.log.info(
+            f"Remote finished setup ({self.iperf_server_ipv4} | {self.iperf_server_ipv6})"
+        )
+
+        self.dut.associate(self.ssid)
+
+        # Wait till the DUT has valid IP addresses after connecting.
+        self.fuchsia_device.wait_for_ipv4_addr(
+            self.dut.get_default_wlan_test_interface()
+        )
+        self.fuchsia_device.wait_for_ipv6_addr(
+            self.dut.get_default_wlan_test_interface()
+        )
+        self.log.info("DUT has valid IP addresses on test network")
+
+    def teardown_class(self) -> None:
+        if hasattr(self, "dut"):
+            self.dut.disconnect()
+            self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_class()
+
+    def send_ping(
+        self,
+        _: str,
+        get_addr_fn: str | Callable[[Addrs], str],
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+    ) -> None:
+        dest_ip = (
+            get_addr_fn(
+                Addrs(
+                    gateway_ipv4=self.ap_ipv4,
+                    # IPv6 link-local addresses require specification of the
+                    # outgoing interface as the scope ID when sending packets.
+                    gateway_ipv6=f"{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}",
+                    remote_ipv4=self.iperf_server_ipv4,
+                    # IPv6 global addresses do not require scope IDs.
+                    remote_ipv6=self.iperf_server_ipv6,
+                )
+            )
+            if callable(get_addr_fn)
+            else get_addr_fn
+        )
+
+        self.log.info(f"Attempting to ping {dest_ip}...")
+        ping_result = self.dut.ping(dest_ip, count, interval, timeout, size)
+        if ping_result.success:
+            self.log.info("Ping was successful.")
+        else:
+            raise signals.TestFailure(f"Ping was unsuccessful: {ping_result}")
+
+    def test_simultaneous_pings(self) -> None:
+        ping_urls = [
+            self.iperf_server_ipv4,
+            self.ap_ipv4,
+            self.iperf_server_ipv6,
+            f"{self.ap_ipv6}%{self.dut.get_default_wlan_test_interface()}",
+        ]
+        ping_processes: list[multiprocessing.Process] = []
+        ping_results: list[PingResult] = []
+
+        def ping_from_dut(
+            self: PingStressTest, dest_ip: str, ping_results: list[PingResult]
+        ) -> None:
+            self.log.info(f"Attempting to ping {dest_ip}...")
+            ping_result = self.dut.ping(dest_ip, count=10, size=50)
+            if ping_result.success:
+                self.log.info(f"Success pinging: {dest_ip}")
+            else:
+                self.log.info(f"Failure pinging: {dest_ip}")
+            ping_results.append(ping_result)
+
+        try:
+            # Start multiple ping at the same time
+            for index, url in enumerate(ping_urls):
+                p = multiprocessing.Process(
+                    target=ping_from_dut, args=(self, url, ping_results)
+                )
+                ping_processes.append(p)
+                p.start()
+
+            # Wait for all processes to complete or timeout
+            for p in ping_processes:
+                p.join(PING_RESULT_TIMEOUT_SEC)
+
+        finally:
+            is_alive = False
+
+            for index, p in enumerate(ping_processes):
+                if p.is_alive():
+                    p.terminate()
+                    is_alive = True
+
+            if is_alive:
+                raise signals.TestFailure(f"Timed out while pinging {ping_urls[index]}")
+
+        for i, ping_result in enumerate(ping_results):
+            if not ping_result.success:
+                raise signals.TestFailure(
+                    f"Failed to ping {ping_urls[i]}: {ping_result}"
+                )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/SoftApTest.py b/tests/wlan/functional/SoftApTest.py
new file mode 100644
index 0000000..018197b
--- /dev/null
+++ b/tests/wlan/functional/SoftApTest.py
@@ -0,0 +1,1534 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import multiprocessing as mp
+import random
+import time
+from dataclasses import dataclass
+from enum import Enum, StrEnum, auto, unique
+from typing import Any, Mapping, Type, TypeAlias, TypeVar
+
+from mobly import asserts, signals, test_runner
+from mobly.config_parser import TestRunConfig
+
+from antlion import utils
+from antlion.controllers import iperf_client, iperf_server
+from antlion.controllers.access_point import AccessPoint, setup_ap
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import (
+    ConnectivityMode,
+    OperatingBand,
+)
+from antlion.controllers.utils_lib.ssh import settings
+from antlion.controllers.utils_lib.ssh.connection import SshConnection
+from antlion.test_utils.abstract_devices.wlan_device import (
+    AndroidWlanDevice,
+    AssociationMode,
+    FuchsiaWlanDevice,
+    SupportsWLAN,
+    create_wlan_device,
+)
+from antlion.test_utils.wifi import base_test
+
+DEFAULT_AP_PROFILE = "whirlwind"
+DEFAULT_IPERF_PORT = 5201
+DEFAULT_TIMEOUT = 30
+DEFAULT_IPERF_TIMEOUT = 60
+DEFAULT_NO_ADDR_EXPECTED_TIMEOUT = 5
+STATE_UP = True
+STATE_DOWN = False
+
+ConfigValue: TypeAlias = str | int | bool | list["ConfigValue"] | "Config"
+Config: TypeAlias = dict[str, ConfigValue]
+
+T = TypeVar("T")
+
+
+def get_typed(map: Mapping[str, Any], key: str, value_type: Type[T], default: T) -> T:
+    value = map.get(key, default)
+    if not isinstance(value, value_type):
+        raise TypeError(f'"{key}" must be a {value_type.__name__}, got {type(value)}')
+    return value
+
+
+@unique
+class DeviceRole(Enum):
+    AP = auto()
+    CLIENT = auto()
+
+
+@unique
+class TestType(StrEnum):
+    ASSOCIATE_ONLY = auto()
+    ASSOCIATE_AND_PING = auto()
+    ASSOCIATE_AND_PASS_TRAFFIC = auto()
+
+
+@dataclass
+class TestParams:
+    test_type: TestType
+    security_type: SecurityMode
+    connectivity_mode: ConnectivityMode
+    operating_band: OperatingBand
+    ssid: str
+    password: str
+    iterations: int
+
+
+@dataclass
+class APParams:
+    profile: str
+    ssid: str
+    channel: int
+    security: Security
+    password: str
+
+    @staticmethod
+    def from_dict(d: dict[str, Any]) -> "APParams":
+        security_mode_str = get_typed(d, "security_mode", str, SecurityMode.OPEN.value)
+        security_mode = SecurityMode[security_mode_str]
+        password = get_typed(
+            d, "password", str, generate_random_password(security_mode=security_mode)
+        )
+
+        return APParams(
+            profile=get_typed(d, "profile", str, DEFAULT_AP_PROFILE),
+            ssid=get_typed(
+                d,
+                "ssid",
+                str,
+                utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            ),
+            channel=get_typed(
+                d, "channel", int, hostapd_constants.AP_DEFAULT_CHANNEL_2G
+            ),
+            security=Security(security_mode, password),
+            password=password,
+        )
+
+    def setup_ap(
+        self, access_point: AccessPoint, timeout_sec: int = DEFAULT_TIMEOUT
+    ) -> str:
+        """Setup access_point and return the IPv4 address of its test interface."""
+        setup_ap(
+            access_point=access_point,
+            profile_name=self.profile,
+            channel=self.channel,
+            ssid=self.ssid,
+            security=self.security,
+        )
+
+        interface = access_point.wlan_2g if self.channel < 36 else access_point.wlan_5g
+
+        end_time = time.time() + timeout_sec
+        while time.time() < end_time:
+            ips = utils.get_interface_ip_addresses(access_point.ssh, interface)
+            if len(ips["ipv4_private"]) > 0:
+                return ips["ipv4_private"][0]
+            time.sleep(1)
+        raise ConnectionError(
+            f"After {timeout_sec}s, device {access_point.identifier} still does not have "
+            f"an ipv4 address on interface {interface}."
+        )
+
+
+@dataclass
+class SoftAPParams:
+    ssid: str
+    security_type: SecurityMode
+    password: str | None
+    connectivity_mode: ConnectivityMode
+    operating_band: OperatingBand
+
+    def __str__(self) -> str:
+        if self.operating_band is OperatingBand.ANY:
+            band = "any"
+        elif self.operating_band is OperatingBand.ONLY_2G:
+            band = "2g"
+        elif self.operating_band is OperatingBand.ONLY_5G:
+            band = "5g"
+        else:
+            raise TypeError(f'Unknown OperatingBand "{self.operating_band}"')
+        return f'{band}_{self.security_type.replace("/", "_")}_{self.connectivity_mode}'
+
+    @staticmethod
+    def from_dict(d: dict[str, Any]) -> "SoftAPParams":
+        security_type = get_typed(d, "security_type", str, SecurityMode.OPEN.value)
+        security_mode = SecurityMode[security_type]
+
+        password = d.get("password")
+        if password is None and security_mode is not SecurityMode.OPEN:
+            password = generate_random_password(security_mode=security_mode)
+        if password is not None and not isinstance(password, str):
+            raise TypeError(f'"password" must be a str or None, got {type(password)}')
+        if password is not None and security_mode is SecurityMode.OPEN:
+            raise TypeError(
+                f'"password" must be None if "security_type" is "{SecurityMode.OPEN}"'
+            )
+
+        connectivity_mode = get_typed(
+            d, "connectivity_mode", str, str(ConnectivityMode.LOCAL_ONLY)
+        )
+        operating_band = get_typed(d, "operating_band", str, str(OperatingBand.ONLY_2G))
+
+        return SoftAPParams(
+            ssid=get_typed(
+                d,
+                "ssid",
+                str,
+                utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
+            ),
+            security_type=security_mode,
+            password=password,
+            connectivity_mode=ConnectivityMode[connectivity_mode],
+            operating_band=OperatingBand[operating_band],
+        )
+
+
+@dataclass
+class AssociationStressTestParams:
+    test_type: TestType
+    soft_ap_params: SoftAPParams
+    iterations: int
+
+    def __str__(self) -> str:
+        return f"{self.soft_ap_params}_{self.test_type}_{self.iterations}_iterations"
+
+    @staticmethod
+    def from_dict(d: dict[str, Any]) -> "AssociationStressTestParams":
+        test_type = get_typed(
+            d, "test_type", str, TestType.ASSOCIATE_AND_PASS_TRAFFIC.value
+        )
+        return AssociationStressTestParams(
+            test_type=TestType[test_type],
+            soft_ap_params=SoftAPParams.from_dict(d.get("soft_ap_params", {})),
+            iterations=get_typed(d, "iterations", int, 10),
+        )
+
+
+@dataclass
+class ClientModeAlternatingTestParams:
+    ap_params: APParams
+    soft_ap_params: SoftAPParams
+    iterations: int
+
+    def __str__(self) -> str:
+        return (
+            f"ap_{self.ap_params.security.security_mode}_"
+            f"soft_ap_{self.soft_ap_params.security_type}_"
+            f"{self.iterations}_iterations"
+        )
+
+    @staticmethod
+    def from_dict(d: dict[str, Any]) -> "ClientModeAlternatingTestParams":
+        return ClientModeAlternatingTestParams(
+            ap_params=APParams.from_dict(d.get("ap_params", {})),
+            soft_ap_params=SoftAPParams.from_dict(d.get("soft_ap_params", {})),
+            iterations=get_typed(d, "iterations", int, 10),
+        )
+
+
+@dataclass
+class ToggleTestParams:
+    soft_ap_params: SoftAPParams
+    iterations: int
+
+    def __str__(self) -> str:
+        return f"{self.soft_ap_params}_{self.iterations}_iterations"
+
+    @staticmethod
+    def from_dict(d: dict[str, Any]) -> "ToggleTestParams":
+        return ToggleTestParams(
+            soft_ap_params=SoftAPParams.from_dict(d.get("soft_ap_params", {})),
+            iterations=get_typed(d, "iterations", int, 10),
+        )
+
+
+@dataclass
+class ClientModeToggleTestParams:
+    ap_params: APParams
+    iterations: int
+
+    def __str__(self) -> str:
+        return f"{self.ap_params}_{self.iterations}_iterations"
+
+    @staticmethod
+    def from_dict(d: dict[str, Any]) -> "ClientModeToggleTestParams":
+        return ClientModeToggleTestParams(
+            ap_params=APParams.from_dict(d.get("ap_params", {})),
+            iterations=get_typed(d, "iterations", int, 10),
+        )
+
+
+class StressTestIterationFailure(Exception):
+    """Used to differentiate a subtest failure from an actual exception"""
+
+
+class SoftApTest(base_test.WifiBaseTest):
+    """Tests for Fuchsia SoftAP
+
+    Testbed requirement:
+    * One Fuchsia device
+    * At least one client (Android) device
+        * For multi-client tests, at least two client (Android) devices are
+          required. Test will be skipped if less than two client devices are
+          present.
+    * For any tests that exercise client-mode (e.g. toggle tests, simultaneous
+        tests), a physical AP (whirlwind) is also required. Those tests will be
+        skipped if physical AP is not present.
+    """
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+        self.soft_ap_test_params = configs.user_params.get("soft_ap_test_params", {})
+
+    def pre_run(self):
+        self.generate_soft_ap_tests()
+        self.generate_association_stress_tests()
+        self.generate_soft_ap_and_client_mode_alternating_stress_tests()
+        self.generate_soft_ap_toggle_stress_tests()
+        self.generate_client_mode_toggle_stress_tests()
+        self.generate_soft_ap_toggle_stress_with_client_mode_tests()
+        self.generate_client_mode_toggle_stress_with_soft_ap_tests()
+        self.generate_soft_ap_and_client_mode_random_toggle_stress_tests()
+
+    def generate_soft_ap_tests(self):
+        tests: list[SoftAPParams] = []
+
+        for operating_band in OperatingBand:
+            for security_mode in [
+                SecurityMode.OPEN,
+                SecurityMode.WEP,
+                SecurityMode.WPA,
+                SecurityMode.WPA2,
+                SecurityMode.WPA3,
+            ]:
+                for connectivity_mode in ConnectivityMode:
+                    if security_mode is SecurityMode.OPEN:
+                        ssid_length = hostapd_constants.AP_SSID_LENGTH_2G
+                        password = None
+                    else:
+                        ssid_length = hostapd_constants.AP_SSID_LENGTH_5G
+                        password = generate_random_password()
+
+                    tests.append(
+                        SoftAPParams(
+                            ssid=utils.rand_ascii_str(ssid_length),
+                            security_type=security_mode,
+                            password=password,
+                            connectivity_mode=connectivity_mode,
+                            operating_band=operating_band,
+                        )
+                    )
+
+        def generate_name(test: SoftAPParams) -> str:
+            return f"test_soft_ap_{test}"
+
+        self.generate_tests(
+            self.associate_with_soft_ap_test,
+            generate_name,
+            tests,
+        )
+
+    def associate_with_soft_ap_test(self, soft_ap_params: SoftAPParams):
+        self.start_soft_ap(soft_ap_params)
+        self.associate_with_soft_ap(self.primary_client, soft_ap_params)
+        self.assert_connected_to_ap(self.primary_client, self.dut, check_traffic=True)
+
+    def setup_class(self):
+        super().setup_class()
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        # TODO(fxb/51313): Add in device agnosticity for clients
+        # Create a wlan device and iperf client for each Android client
+        self.clients: list[SupportsWLAN] = []
+        self.iperf_clients_map: dict[Any, Any] = {}
+        for device in self.android_devices:
+            client_wlan_device = create_wlan_device(device, AssociationMode.POLICY)
+            self.clients.append(client_wlan_device)
+            self.iperf_clients_map[client_wlan_device] = (
+                client_wlan_device.create_iperf_client()
+            )
+        self.primary_client = self.clients[0]
+
+        # Create an iperf server on the DUT, which will be used for any streaming.
+        self.iperf_server_settings = settings.from_config(
+            {
+                "user": self.fuchsia_device.ssh_username,
+                "host": self.fuchsia_device.ip,
+                "ssh_config": self.fuchsia_device.ssh_config,
+            }
+        )
+        self.iperf_server = iperf_server.IPerfServerOverSsh(
+            self.iperf_server_settings, DEFAULT_IPERF_PORT, use_killall=True
+        )
+        self.iperf_server.start()
+
+        # Attempt to create an ap iperf server. AP is only required for tests
+        # that use client mode.
+        self.access_point: AccessPoint | None = None
+        self.ap_iperf_client: iperf_client.IPerfClientOverSsh | None = None
+
+        try:
+            self.access_point = self.access_points[0]
+            self.ap_iperf_client = iperf_client.IPerfClientOverSsh(
+                self.access_point.ssh_provider,
+            )
+            self.iperf_clients_map[self.access_point] = self.ap_iperf_client
+        except AttributeError:
+            pass
+
+    def teardown_class(self):
+        # Because this is using killall, it will stop all iperf processes
+        self.iperf_server.stop()
+        super().teardown_class()
+
+    def setup_test(self):
+        super().setup_test()
+        for ad in self.android_devices:
+            ad.droid.wakeLockAcquireBright()
+            ad.droid.wakeUpNow()
+        for client in self.clients:
+            client.disconnect()
+            client.reset_wifi()
+            client.wifi_toggle_state(True)
+        self.stop_all_soft_aps()
+        if self.access_point:
+            self.access_point.stop_all_aps()
+        self.dut.disconnect()
+
+    def teardown_test(self):
+        for client in self.clients:
+            client.disconnect()
+        for ad in self.android_devices:
+            ad.droid.wakeLockRelease()
+            ad.droid.goToSleepNow()
+        self.stop_all_soft_aps()
+        self.download_logs()
+        if self.access_point:
+            self.access_point.stop_all_aps()
+        self.dut.disconnect()
+        super().teardown_test()
+
+    def start_soft_ap(self, params: SoftAPParams) -> None:
+        """Starts a softAP on Fuchsia device.
+
+        Args:
+            settings: a dict containing softAP configuration params
+                ssid: string, SSID of softAP network
+                security_type: string, security type of softAP network
+                    - 'none', 'wep', 'wpa', 'wpa2', 'wpa3'
+                password: string, password if applicable
+                connectivity_mode: string, connecitivity_mode for softAP
+                    - 'local_only', 'unrestricted'
+                operating_band: string, band for softAP network
+                    - 'any', 'only_5_ghz', 'only_2_4_ghz'
+        """
+        self.log.info(f"Starting SoftAP on DUT with settings: {params}")
+        response = self.fuchsia_device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
+            params.ssid,
+            params.security_type.fuchsia_security_type(),
+            params.password,
+            params.connectivity_mode,
+            params.operating_band,
+        )
+        if response.get("error"):
+            raise EnvironmentError(
+                f"SL4F: Failed to setup SoftAP. Err: {response['error']}"
+            )
+        self.log.info(f"SoftAp network ({params.ssid}) is up.")
+
+    def stop_soft_ap(self, params: SoftAPParams) -> None:
+        """Stops a specific SoftAP On Fuchsia device.
+
+        Args:
+            settings: a dict containing softAP config params (see start_soft_ap)
+                for details
+
+        Raises:
+            EnvironmentError, if StopSoftAP call fails.
+        """
+        response = self.fuchsia_device.sl4f.wlan_ap_policy_lib.wlanStopAccessPoint(
+            params.ssid, params.security_type.fuchsia_security_type(), params.password
+        )
+        if response.get("error"):
+            raise EnvironmentError(
+                f"SL4F: Failed to stop SoftAP. Err: {response['error']}"
+            )
+
+    def stop_all_soft_aps(self) -> None:
+        """Stops all SoftAPs on Fuchsia Device.
+
+        Raises:
+            EnvironmentError, if StopAllAps call fails.
+        """
+        response = self.fuchsia_device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
+        if response.get("error"):
+            raise EnvironmentError(
+                f"SL4F: Failed to stop all SoftAPs. Err: {response['error']}"
+            )
+
+    def associate_with_soft_ap(self, device: SupportsWLAN, params: SoftAPParams):
+        """Associates client device with softAP on Fuchsia device.
+
+        Args:
+            device: wlan_device to associate with the softAP
+            params: soft AP configuration
+
+        Raises:
+            TestFailure if association fails
+        """
+        self.log.info(
+            f'Associating {device.identifier} to SoftAP on {self.dut.identifier} called "{params.ssid}'
+        )
+
+        associated = device.associate(
+            params.ssid,
+            target_pwd=params.password,
+            target_security=params.security_type,
+            check_connectivity=params.connectivity_mode
+            is ConnectivityMode.UNRESTRICTED,
+        )
+
+        asserts.assert_true(
+            associated,
+            f'Failed to associate "{device.identifier}" to SoftAP "{params.ssid}"',
+        )
+
+    def disconnect_from_soft_ap(self, device: SupportsWLAN) -> None:
+        """Disconnects client device from SoftAP.
+
+        Args:
+            device: wlan_device to disconnect from SoftAP
+        """
+        self.log.info(f"Disconnecting device {device.identifier} from SoftAP.")
+        device.disconnect()
+
+    def get_ap_test_interface(self, ap: AccessPoint, channel: int) -> str:
+        if channel < 36:
+            return ap.wlan_2g
+        else:
+            return ap.wlan_5g
+
+    def get_device_test_interface(
+        self, device: SupportsWLAN | FuchsiaDevice, role: DeviceRole
+    ) -> str:
+        """Retrieves test interface from a provided device, which can be the
+        FuchsiaDevice DUT, the AccessPoint, or an AndroidClient.
+
+        Args:
+            device: the device do get the test interface from. Either
+                FuchsiaDevice (DUT), Android client, or AccessPoint.
+            role: str, either "client" or "ap". Required for FuchsiaDevice (DUT)
+
+        Returns:
+            String, name of test interface on given device.
+        """
+
+        if isinstance(device, FuchsiaDevice):
+            device.update_wlan_interfaces()
+            if role is DeviceRole.CLIENT:
+                if device.wlan_client_test_interface_name is None:
+                    raise TypeError(
+                        "Expected wlan_client_test_interface_name to be str"
+                    )
+                return device.wlan_client_test_interface_name
+            if role is DeviceRole.AP:
+                if device.wlan_ap_test_interface_name is None:
+                    raise TypeError("Expected wlan_ap_test_interface_name to be str")
+                return device.wlan_ap_test_interface_name
+            raise ValueError(f"Unsupported interface role: {role}")
+        else:
+            return device.get_default_wlan_test_interface()
+
+    def wait_for_ipv4_address(
+        self,
+        device: SupportsWLAN | AccessPoint,
+        interface_name: str,
+        timeout: int = DEFAULT_TIMEOUT,
+    ):
+        """Waits for interface on a wlan_device to get an ipv4 address.
+
+        Args:
+            device: wlan_device or AccessPoint to check interface
+            interface_name: name of the interface to check
+            timeout: seconds to wait before raising an error
+
+        Raises:
+            ConnectionError, if interface does not have an ipv4 address after timeout
+        """
+        comm_channel: SshConnection | FuchsiaDevice | AndroidDevice
+        if isinstance(device, AccessPoint):
+            comm_channel = device.ssh
+        elif isinstance(device, FuchsiaWlanDevice):
+            comm_channel = device.device
+        elif isinstance(device, AndroidWlanDevice):
+            comm_channel = device.device
+        else:
+            raise TypeError(f"Invalid device type {type(device)}")
+
+        end_time = time.time() + timeout
+        while time.time() < end_time:
+            ips = utils.get_interface_ip_addresses(comm_channel, interface_name)
+            if len(ips["ipv4_private"]) > 0:
+                self.log.info(
+                    f"Device {device.identifier} interface {interface_name} has "
+                    f"ipv4 address {ips['ipv4_private'][0]}"
+                )
+                return ips["ipv4_private"][0]
+            else:
+                time.sleep(1)
+        raise ConnectionError(
+            f"After {timeout} seconds, device {device.identifier} still does not have "
+            f"an ipv4 address on interface {interface_name}."
+        )
+
+    def run_iperf_traffic(
+        self,
+        ip_client: iperf_client.IPerfClientOverAdb | iperf_client.IPerfClientOverSsh,
+        server_address: str,
+        server_port: int = 5201,
+    ) -> None:
+        """Runs traffic between client and ap an verifies throughput.
+
+        Args:
+            ip_client: iperf client to use
+            server_address: ipv4 address of the iperf server to use
+            server_port: port of the iperf server
+
+        Raises:
+            ConnectionError if no traffic passes in either direction
+        """
+        ip_client_identifier = self.get_iperf_client_identifier(ip_client)
+
+        self.log.info(
+            f"Running traffic from iperf client {ip_client_identifier} to "
+            f"iperf server {server_address}."
+        )
+        client_to_ap_path = ip_client.start(
+            server_address, f"-i 1 -t 10 -J -p {server_port}", "client_to_soft_ap"
+        )
+
+        client_to_ap_result = iperf_server.IPerfResult(client_to_ap_path)
+        if not client_to_ap_result.avg_receive_rate:
+            raise ConnectionError(
+                f"Failed to pass traffic from iperf client {ip_client_identifier} to "
+                f"iperf server {server_address}."
+            )
+
+        self.log.info(
+            f"Passed traffic from iperf client {ip_client_identifier} to "
+            f"iperf server {server_address} with avg rate of "
+            f"{client_to_ap_result.avg_receive_rate} MB/s."
+        )
+
+        self.log.info(
+            f"Running traffic from iperf server {server_address} to "
+            f"iperf client {ip_client_identifier}."
+        )
+        ap_to_client_path = ip_client.start(
+            server_address, f"-i 1 -t 10 -R -J -p {server_port}", "soft_ap_to_client"
+        )
+
+        ap_to_client_result = iperf_server.IPerfResult(ap_to_client_path)
+        if not ap_to_client_result.avg_receive_rate:
+            raise ConnectionError(
+                f"Failed to pass traffic from iperf server {server_address} to "
+                f"iperf client {ip_client_identifier}."
+            )
+
+        self.log.info(
+            f"Passed traffic from iperf server {server_address} to "
+            f"iperf client {ip_client_identifier} with avg rate of "
+            f"{ap_to_client_result.avg_receive_rate} MB/s."
+        )
+
+    def run_iperf_traffic_parallel_process(
+        self, ip_client, server_address, error_queue, server_port=5201
+    ):
+        """Executes run_iperf_traffic using a queue to capture errors. Used
+        when running iperf in a parallel process.
+
+        Args:
+            ip_client: iperf client to use
+            server_address: ipv4 address of the iperf server to use
+            error_queue: multiprocessing queue to capture errors
+            server_port: port of the iperf server
+        """
+        try:
+            self.run_iperf_traffic(ip_client, server_address, server_port=server_port)
+        except ConnectionError as err:
+            error_queue.put(
+                f"In iperf process from {self.get_iperf_client_identifier(ip_client)} to {server_address}: {err}"
+            )
+
+    def get_iperf_client_identifier(
+        self,
+        ip_client: iperf_client.IPerfClientOverAdb | iperf_client.IPerfClientOverSsh,
+    ) -> str:
+        """Retrieves an identifier string from iperf client, for logging.
+
+        Args:
+            ip_client: iperf client to grab identifier from
+        """
+        if type(ip_client) == iperf_client.IPerfClientOverAdb:
+            assert hasattr(ip_client._android_device, "serial")
+            assert isinstance(ip_client._android_device.serial, str)
+            return ip_client._android_device.serial
+        if type(ip_client) == iperf_client.IPerfClientOverSsh:
+            return ip_client._ssh_provider.config.host_name
+        raise TypeError(f'Unknown "ip_client" type {type(ip_client)}')
+
+    def assert_connected_to_ap(
+        self,
+        client: SupportsWLAN,
+        ap: SupportsWLAN | AccessPoint,
+        channel: int | None = None,
+        check_traffic: bool = False,
+        timeout_sec: int = DEFAULT_TIMEOUT,
+    ) -> None:
+        """Assert the client device has L3 connectivity to the AP."""
+        device_interface = self.get_device_test_interface(client, DeviceRole.CLIENT)
+
+        if isinstance(ap, AccessPoint):
+            if channel is None:
+                raise TypeError("channel must not be None when ap is an AccessPoint")
+            ap_interface = self.get_ap_test_interface(ap, channel)
+        else:
+            ap_interface = self.get_device_test_interface(ap, DeviceRole.AP)
+
+        client_ipv4 = self.wait_for_ipv4_address(
+            client, device_interface, timeout=timeout_sec
+        )
+        ap_ipv4 = self.wait_for_ipv4_address(ap, ap_interface, timeout=timeout_sec)
+
+        client_ping = client.ping(ap_ipv4, timeout=DEFAULT_TIMEOUT * 1000)
+        asserts.assert_true(
+            client_ping.success,
+            f"Failed to ping from client to ap: {client_ping}",
+        )
+
+        ap_ping = ap.ping(client_ipv4, timeout=DEFAULT_TIMEOUT * 1000)
+        asserts.assert_true(
+            ap_ping.success,
+            f"Failed to ping from ap to client: {ap_ping}",
+        )
+
+        if not check_traffic:
+            return
+
+        if client is self.dut:
+            self.run_iperf_traffic(self.iperf_clients_map[ap], client_ipv4)
+        else:
+            self.run_iperf_traffic(self.iperf_clients_map[client], ap_ipv4)
+
+    def assert_disconnected_to_ap(
+        self,
+        client: SupportsWLAN,
+        ap: SupportsWLAN | AccessPoint,
+        channel: int | None = None,
+        timeout_sec: int = DEFAULT_NO_ADDR_EXPECTED_TIMEOUT,
+    ) -> None:
+        """Assert the client device does not have ping connectivity to the AP."""
+        device_interface = self.get_device_test_interface(client, DeviceRole.CLIENT)
+
+        if isinstance(ap, AccessPoint):
+            if channel is None:
+                raise TypeError("channel must not be None when ap is an AccessPoint")
+            ap_interface = self.get_ap_test_interface(ap, channel)
+        else:
+            ap_interface = self.get_device_test_interface(ap, DeviceRole.AP)
+
+        try:
+            client_ipv4 = self.wait_for_ipv4_address(
+                client, device_interface, timeout=timeout_sec
+            )
+            ap_ipv4 = self.wait_for_ipv4_address(ap, ap_interface, timeout=timeout_sec)
+        except ConnectionError:
+            # When disconnected, IP addresses aren't always available.
+            return
+
+        asserts.assert_false(
+            client.ping(ap_ipv4, timeout=DEFAULT_TIMEOUT * 1000).success,
+            "Unexpectedly succeeded to ping from client to ap",
+        )
+        asserts.assert_false(
+            ap.ping(client_ipv4, timeout=DEFAULT_TIMEOUT * 1000).success,
+            "Unexpectedly succeeded to ping from ap to client",
+        )
+
+    # Runners for Generated Test Cases
+
+    def run_soft_ap_association_stress_test(self, test: AssociationStressTestParams):
+        """Sets up a SoftAP, and repeatedly associates and disassociates a client."""
+        self.log.info(
+            f"Running association stress test type {test.test_type} in "
+            f"iteration {test.iterations} times"
+        )
+
+        self.start_soft_ap(test.soft_ap_params)
+
+        passed_count = 0
+        for run in range(test.iterations):
+            try:
+                self.log.info(f"Starting SoftAp association run {str(run + 1)}")
+
+                if test.test_type == TestType.ASSOCIATE_ONLY:
+                    self.associate_with_soft_ap(
+                        self.primary_client, test.soft_ap_params
+                    )
+
+                elif test.test_type == TestType.ASSOCIATE_AND_PING:
+                    self.associate_with_soft_ap(
+                        self.primary_client, test.soft_ap_params
+                    )
+                    self.assert_connected_to_ap(self.primary_client, self.dut)
+
+                elif test.test_type == TestType.ASSOCIATE_AND_PASS_TRAFFIC:
+                    self.associate_with_soft_ap(
+                        self.primary_client, test.soft_ap_params
+                    )
+                    self.assert_connected_to_ap(
+                        self.primary_client, self.dut, check_traffic=True
+                    )
+
+                else:
+                    raise AttributeError(f"Invalid test type: {test.test_type}")
+
+            except signals.TestFailure as err:
+                self.log.error(
+                    f"SoftAp association stress run {str(run + 1)} failed. "
+                    f"Err: {err.details}"
+                )
+            else:
+                self.log.info(
+                    f"SoftAp association stress run {str(run + 1)} successful."
+                )
+                passed_count += 1
+
+        if passed_count < test.iterations:
+            asserts.fail(
+                "SoftAp association stress test failed after "
+                f"{passed_count}/{test.iterations} runs."
+            )
+
+        asserts.explicit_pass(
+            f"SoftAp association stress test passed after {passed_count}/{test.iterations} "
+            "runs."
+        )
+
+    # Alternate SoftAP and Client mode test
+
+    def run_soft_ap_and_client_mode_alternating_test(
+        self, test: ClientModeAlternatingTestParams
+    ):
+        """Runs a single soft_ap and client alternating stress test.
+
+        See test_soft_ap_and_client_mode_alternating_stress for details.
+        """
+        if self.access_point is None:
+            raise signals.TestSkip("No access point provided")
+
+        test.ap_params.setup_ap(self.access_point)
+
+        for _ in range(test.iterations):
+            # Toggle SoftAP on then off.
+            self.toggle_soft_ap(test.soft_ap_params, STATE_DOWN)
+            self.toggle_soft_ap(test.soft_ap_params, STATE_UP)
+
+            # Toggle client mode on then off.
+            self.toggle_client_mode(self.access_point, test.ap_params, STATE_DOWN)
+            self.toggle_client_mode(self.access_point, test.ap_params, STATE_UP)
+
+    # Toggle Stress Test Helper Functions
+
+    # Stress Test Toggle Functions
+
+    def start_soft_ap_and_verify_connected(
+        self, client: SupportsWLAN, soft_ap_params: SoftAPParams
+    ):
+        """Sets up SoftAP, associates a client, then verifies connection.
+
+        Args:
+            client: SoftApClient, client to use to verify SoftAP
+            soft_ap_params: dict, containing parameters to setup softap
+
+        Raises:
+            StressTestIterationFailure, if toggle occurs, but connection
+            is not functioning as expected
+        """
+        # Change SSID every time, to avoid client connection issues.
+        soft_ap_params.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        self.start_soft_ap(soft_ap_params)
+        self.associate_with_soft_ap(client, soft_ap_params)
+        self.assert_connected_to_ap(client, self.dut)
+
+    def stop_soft_ap_and_verify_disconnected(self, client, soft_ap_params):
+        """Tears down SoftAP, and verifies connection is down.
+
+        Args:
+            client: SoftApClient, client to use to verify SoftAP
+            soft_ap_params: dict, containing parameters of SoftAP to teardown
+
+        Raise:
+            EnvironmentError, if client and AP can still communicate
+        """
+        self.log.info("Stopping SoftAP on DUT.")
+        self.stop_soft_ap(soft_ap_params)
+        self.assert_disconnected_to_ap(client, self.dut)
+
+    def start_client_mode_and_verify_connected(
+        self, access_point: AccessPoint, ap_params: APParams
+    ):
+        """Connects DUT to AP in client mode and verifies connection
+
+        Args:
+            ap_params: dict, containing parameters of the AP network
+
+        Raises:
+            EnvironmentError, if DUT fails to associate altogether
+            StressTestIterationFailure, if DUT associates but connection is not
+                functioning as expected.
+        """
+        self.log.info(f"Associating DUT with AP network: {ap_params.ssid}")
+        associated = self.dut.associate(
+            target_ssid=ap_params.ssid,
+            target_pwd=ap_params.password,
+            target_security=ap_params.security.security_mode,
+        )
+        if not associated:
+            raise EnvironmentError("Failed to associate DUT in client mode.")
+        else:
+            self.log.info("Association successful.")
+
+        self.assert_connected_to_ap(self.dut, access_point, channel=ap_params.channel)
+
+    def stop_client_mode_and_verify_disconnected(
+        self, access_point: AccessPoint, ap_params: APParams
+    ):
+        """Disconnects DUT from AP and verifies connection is down.
+
+        Args:
+            ap_params: containing parameters of the AP network
+
+        Raises:
+            EnvironmentError, if DUT and AP can still communicate
+        """
+        self.log.info("Disconnecting DUT from AP.")
+        self.dut.disconnect()
+        self.assert_disconnected_to_ap(
+            self.dut, access_point, channel=ap_params.channel
+        )
+
+    # Toggle Stress Test Iteration and Pre-Test Functions
+
+    # SoftAP Toggle Stress Test Helper Functions
+
+    def soft_ap_toggle_test(self, test: ToggleTestParams) -> None:
+        current_state = STATE_DOWN
+        for i in range(test.iterations):
+            self.toggle_soft_ap(test.soft_ap_params, current_state)
+            current_state = not current_state
+
+    def toggle_soft_ap(self, soft_ap_params: SoftAPParams, current_state: bool):
+        """Runs a single iteration of SoftAP toggle stress test
+
+        Args:
+            settings: dict, containing test settings
+            current_state: bool, current state of SoftAP (True if up,
+                else False)
+
+        Raises:
+            StressTestIterationFailure, if toggle occurs but mode isn't
+                functioning correctly.
+            EnvironmentError, if toggle fails to occur at all
+        """
+        self.log.info(f"Toggling SoftAP {'down' if current_state else 'up'}.")
+        if current_state == STATE_DOWN:
+            self.start_soft_ap_and_verify_connected(self.primary_client, soft_ap_params)
+        else:
+            self.stop_soft_ap_and_verify_disconnected(
+                self.primary_client, soft_ap_params
+            )
+
+    # Client Mode Toggle Stress Test Helper Functions
+
+    def client_mode_toggle_test(self, test: ClientModeToggleTestParams) -> None:
+        if self.access_point is None:
+            raise signals.TestSkip("No access point provided")
+
+        test.ap_params.setup_ap(self.access_point)
+
+        current_state = STATE_DOWN
+        for i in range(test.iterations):
+            self.log.info(
+                f"Iteration {i}: toggling client mode {'off' if current_state else 'on'}."
+            )
+            self.toggle_client_mode(self.access_point, test.ap_params, current_state)
+            current_state = not current_state
+
+    def toggle_client_mode(
+        self, access_point: AccessPoint, ap_params: APParams, current_state: bool
+    ) -> None:
+        if current_state == STATE_DOWN:
+            self.start_client_mode_and_verify_connected(access_point, ap_params)
+        else:
+            self.stop_client_mode_and_verify_disconnected(access_point, ap_params)
+
+    # TODO: Remove
+    def client_mode_toggle_test_iteration(
+        self,
+        test: ClientModeToggleTestParams,
+        access_point: AccessPoint,
+        current_state: bool,
+    ):
+        """Runs a single iteration of client mode toggle stress test
+
+        Args:
+            settings: dict, containing test settings
+            current_state: bool, current state of client mode (True if up,
+                else False)
+
+        Raises:
+            StressTestIterationFailure, if toggle occurs but mode isn't
+                functioning correctly.
+            EnvironmentError, if toggle fails to occur at all
+        """
+        self.log.info(f"Toggling client mode {'off' if current_state else 'on'}")
+        if current_state == STATE_DOWN:
+            self.start_client_mode_and_verify_connected(access_point, test.ap_params)
+        else:
+            self.stop_client_mode_and_verify_disconnected(access_point, test.ap_params)
+
+    # Toggle SoftAP with Client Mode Up Test Helper Functions
+
+    def soft_ap_toggle_with_client_mode_test(
+        self, test: ClientModeAlternatingTestParams
+    ) -> None:
+        if self.access_point is None:
+            raise signals.TestSkip("No access point provided")
+
+        test.ap_params.setup_ap(self.access_point)
+        self.start_client_mode_and_verify_connected(self.access_point, test.ap_params)
+
+        current_state = STATE_DOWN
+        for i in range(test.iterations):
+            self.toggle_soft_ap(test.soft_ap_params, current_state)
+            self.assert_connected_to_ap(
+                self.dut, self.access_point, channel=test.ap_params.channel
+            )
+            current_state = not current_state
+
+    # Toggle Client Mode with SoftAP Up Test Helper Functions
+
+    def client_mode_toggle_with_soft_ap_test(
+        self, test: ClientModeAlternatingTestParams
+    ) -> None:
+        if self.access_point is None:
+            raise signals.TestSkip("No access point provided")
+
+        test.ap_params.setup_ap(self.access_point)
+        self.start_soft_ap_and_verify_connected(
+            self.primary_client, test.soft_ap_params
+        )
+
+        current_state = STATE_DOWN
+        for i in range(test.iterations):
+            self.toggle_client_mode(self.access_point, test.ap_params, current_state)
+            self.assert_connected_to_ap(self.primary_client, self.dut)
+            current_state = not current_state
+
+    # Toggle SoftAP and Client Mode Randomly
+
+    def soft_ap_and_client_mode_random_toggle_test(
+        self, test: ClientModeAlternatingTestParams
+    ) -> None:
+        if self.access_point is None:
+            raise signals.TestSkip("No access point provided")
+
+        test.ap_params.setup_ap(self.access_point)
+
+        current_soft_ap_state = STATE_DOWN
+        current_client_mode_state = STATE_DOWN
+        for i in range(test.iterations):
+            # Randomly determine if softap, client mode, or both should
+            # be toggled.
+            rand_toggle_choice = random.randrange(0, 3)
+            if rand_toggle_choice <= 1:
+                self.toggle_soft_ap(test.soft_ap_params, current_soft_ap_state)
+                current_soft_ap_state = not current_soft_ap_state
+            if rand_toggle_choice >= 1:
+                self.toggle_client_mode(
+                    self.access_point, test.ap_params, current_client_mode_state
+                )
+                current_client_mode_state = not current_client_mode_state
+
+            if current_soft_ap_state == STATE_UP:
+                self.assert_connected_to_ap(self.primary_client, self.dut)
+            else:
+                self.assert_disconnected_to_ap(self.primary_client, self.dut)
+
+            if current_client_mode_state == STATE_UP:
+                self.assert_connected_to_ap(
+                    self.dut, self.access_point, channel=test.ap_params.channel
+                )
+            else:
+                self.assert_disconnected_to_ap(
+                    self.dut, self.access_point, channel=test.ap_params.channel
+                )
+
+    # Test Cases
+
+    def test_multi_client(self):
+        """Tests multi-client association with a single soft AP network.
+
+        This tests associates a variable length list of clients, verfying it can
+        can ping the SoftAP and pass traffic, and then verfies all previously
+        associated clients can still ping and pass traffic.
+
+        The same occurs in reverse for disassocations.
+
+        SoftAP parameters can be changed from default via ACTS config:
+        Example Config
+        "soft_ap_test_params" : {
+            "multi_client_test_params": {
+                "ssid": "testssid",
+                "security_type": "wpa2",
+                "password": "password",
+                "connectivity_mode": "local_only",
+                "operating_band": "only_2_4_ghz"
+            }
+        }
+        """
+        asserts.skip_if(len(self.clients) < 2, "Test requires at least 2 SoftAPClients")
+
+        test_params = self.soft_ap_test_params.get("multi_client_test_params", {})
+        soft_ap_params = SoftAPParams.from_dict(test_params.get("soft_ap_params", {}))
+
+        self.start_soft_ap(soft_ap_params)
+
+        associated: list[dict[str, Any]] = []
+
+        for client in self.clients:
+            # Associate new client
+            self.associate_with_soft_ap(client, soft_ap_params)
+            self.assert_connected_to_ap(client, self.dut)
+
+            # Verify previously associated clients still behave as expected
+            for associated_client in associated:
+                id = associated_client["device"].identifier
+                self.log.info(
+                    f"Verifying previously associated client {id} still "
+                    "functions correctly."
+                )
+                self.assert_connected_to_ap(
+                    associated_client["device"], self.dut, check_traffic=True
+                )
+
+            client_interface = self.get_device_test_interface(client, DeviceRole.CLIENT)
+            client_ipv4 = self.wait_for_ipv4_address(client, client_interface)
+            associated.append({"device": client, "address": client_ipv4})
+
+        self.log.info("All devices successfully associated.")
+
+        self.log.info("Verifying all associated clients can ping eachother.")
+        for transmitter in associated:
+            for receiver in associated:
+                if transmitter != receiver:
+                    if not transmitter["device"].can_ping(receiver["address"]):
+                        asserts.fail(
+                            "Could not ping from one associated client "
+                            f"({transmitter['address']}) to another "
+                            f"({receiver['address']})."
+                        )
+                    else:
+                        self.log.info(
+                            "Successfully pinged from associated client "
+                            f"({transmitter['address']}) to another "
+                            f"({receiver['address']})"
+                        )
+
+        self.log.info(
+            "All associated clients can ping each other. Beginning disassociations."
+        )
+
+        while len(associated) > 0:
+            # Disassociate client
+            client = associated.pop()["device"]
+            self.disconnect_from_soft_ap(client)
+
+            # Verify still connected clients still behave as expected
+            for associated_client in associated:
+                id = associated_client["device"].identifier
+                self.log.info(
+                    f"Verifying still associated client {id} still functions correctly."
+                )
+                self.assert_connected_to_ap(
+                    associated_client["device"], self.dut, check_traffic=True
+                )
+
+        self.log.info("All disassociations occurred smoothly.")
+
+    def test_simultaneous_soft_ap_and_client(self):
+        """Tests FuchsiaDevice DUT can act as a client and a SoftAP
+        simultaneously.
+
+        Raises:
+            ConnectionError: if DUT fails to connect as client
+            RuntimeError: if parallel processes fail to join
+            TestFailure: if DUT fails to pass traffic as either a client or an
+                AP
+        """
+        if self.access_point is None:
+            raise signals.TestSkip("No access point provided")
+
+        self.log.info("Setting up AP using hostapd.")
+        test_params = self.soft_ap_test_params.get("soft_ap_and_client_test_params", {})
+
+        # Configure AP
+        ap_params = APParams.from_dict(test_params.get("ap_params", {}))
+
+        # Setup AP and associate DUT
+        ap_params.setup_ap(self.access_point)
+        try:
+            self.start_client_mode_and_verify_connected(self.access_point, ap_params)
+        except Exception as err:
+            asserts.fail(f"Failed to set up client mode. Err: {err}")
+
+        # Setup SoftAP
+        soft_ap_params = SoftAPParams.from_dict(test_params.get("soft_ap_params", {}))
+        self.start_soft_ap_and_verify_connected(self.primary_client, soft_ap_params)
+
+        # Get FuchsiaDevice test interfaces
+        dut_ap_interface = self.get_device_test_interface(self.dut, role=DeviceRole.AP)
+        dut_client_interface = self.get_device_test_interface(
+            self.dut, role=DeviceRole.CLIENT
+        )
+
+        # Get FuchsiaDevice addresses
+        dut_ap_ipv4 = self.wait_for_ipv4_address(self.dut, dut_ap_interface)
+        dut_client_ipv4 = self.wait_for_ipv4_address(self.dut, dut_client_interface)
+
+        # Set up secondary iperf server of FuchsiaDevice
+        self.log.info("Setting up second iperf server on FuchsiaDevice DUT.")
+        secondary_iperf_server = iperf_server.IPerfServerOverSsh(
+            self.iperf_server_settings, DEFAULT_IPERF_PORT + 1, use_killall=True
+        )
+        secondary_iperf_server.start()
+
+        # Set up iperf client on AP
+        self.log.info("Setting up iperf client on AP.")
+        ap_iperf_client = iperf_client.IPerfClientOverSsh(
+            self.access_point.ssh_provider,
+        )
+
+        # Setup iperf processes:
+        #     Primary client <-> SoftAP interface on FuchsiaDevice
+        #     AP <-> Client interface on FuchsiaDevice
+        process_errors: mp.Queue = mp.Queue()
+        iperf_soft_ap = mp.Process(
+            target=self.run_iperf_traffic_parallel_process,
+            args=[
+                self.iperf_clients_map[self.primary_client],
+                dut_ap_ipv4,
+                process_errors,
+            ],
+        )
+
+        iperf_fuchsia_client = mp.Process(
+            target=self.run_iperf_traffic_parallel_process,
+            args=[ap_iperf_client, dut_client_ipv4, process_errors],
+            kwargs={"server_port": 5202},
+        )
+
+        # Run iperf processes simultaneously
+        self.log.info(
+            "Running simultaneous iperf traffic: between AP and DUT "
+            "client interface, and DUT AP interface and client."
+        )
+
+        iperf_soft_ap.start()
+        iperf_fuchsia_client.start()
+
+        # Block until processes can join or timeout
+        for proc in [iperf_soft_ap, iperf_fuchsia_client]:
+            proc.join(timeout=DEFAULT_IPERF_TIMEOUT)
+            if proc.is_alive():
+                proc.terminate()
+                proc.join()
+                raise RuntimeError(f"Failed to join process {proc}")
+
+        # Stop iperf server (also stopped in teardown class as failsafe)
+        secondary_iperf_server.stop()
+
+        # Check errors from parallel processes
+        if process_errors.empty():
+            asserts.explicit_pass(
+                "FuchsiaDevice was successfully able to pass traffic as a "
+                "client and an AP simultaneously."
+            )
+        else:
+            while not process_errors.empty():
+                self.log.error(f"Error in iperf process: {process_errors.get()}")
+            asserts.fail(
+                "FuchsiaDevice failed to pass traffic as a client and an AP "
+                "simultaneously."
+            )
+
+    def generate_association_stress_tests(self):
+        """Repeatedly associate and disassociate a client.
+
+        Creates one SoftAP and uses one client.
+
+        Example config:
+
+        soft_ap_test_params:
+          soft_ap_association_stress_tests:
+          - soft_ap_params:
+              ssid: "test_network"
+              security_type: "wpa2"
+              password: "password"
+              connectivity_mode: "local_only"
+              operating_band: "only_2_4_ghz"
+            iterations: 10
+        """
+        test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
+            "test_soft_ap_association_stress",
+            [],
+        )
+
+        tests = [AssociationStressTestParams.from_dict(spec) for spec in test_specs]
+
+        if len(tests) == 0:
+            # Add default test
+            tests.append(AssociationStressTestParams.from_dict({}))
+
+        def generate_name(test: AssociationStressTestParams) -> str:
+            return f"test_association_stress_{test}"
+
+        self.generate_tests(
+            self.run_soft_ap_association_stress_test,
+            generate_name,
+            tests,
+        )
+
+    def generate_soft_ap_and_client_mode_alternating_stress_tests(self):
+        """Alternate between SoftAP and Client modes.
+
+        Each tests sets up an AP. Then, for each iteration:
+            - DUT starts up SoftAP, client associates with SoftAP,
+                connection is verified, then disassociates
+            - DUT associates to the AP, connection is verified, then
+                disassociates
+
+        Example Config:
+
+        soft_ap_test_params:
+          toggle_soft_ap_and_client_tests:
+          - ap_params:
+              ssid: "test-ap-network"
+              security_mode: "wpa2"
+              password: "password"
+              channel: 6
+            soft_ap_params:
+              ssid: "test-soft-ap-network"
+              security_type: "wpa2"
+              password: "other-password"
+              connectivity_mode: "local_only"
+              operating_band: "only_2_4_ghz"
+            iterations: 5
+        """
+        test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
+            "toggle_soft_ap_and_client_tests",
+            [],
+        )
+
+        tests = [ClientModeAlternatingTestParams.from_dict(spec) for spec in test_specs]
+
+        if len(tests) == 0:
+            # Add default test
+            tests.append(ClientModeAlternatingTestParams.from_dict({}))
+
+        def generate_name(test: ClientModeAlternatingTestParams) -> str:
+            return f"test_soft_ap_and_client_mode_alternating_stress_{test}"
+
+        self.generate_tests(
+            self.run_soft_ap_and_client_mode_alternating_test,
+            generate_name,
+            tests,
+        )
+
+    def generate_soft_ap_toggle_stress_tests(self):
+        """Toggle SoftAP up and down.
+
+        If toggled up, a client is associated and connection is verified
+        If toggled down, test verifies client is not connected
+
+        Will run with default params, but custom tests can be provided in the
+        Mobly config.
+
+        Example Config
+
+        soft_ap_test_params:
+          test_soft_ap_toggle_stress:
+            soft_ap_params:
+              security_type: "wpa2"
+              password: "password"
+              connectivity_mode: "local_only"
+              operating_band: "only_2_4_ghz"
+            iterations: 5
+        """
+        test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
+            "test_soft_ap_toggle_stress",
+            [],
+        )
+
+        tests = [ToggleTestParams.from_dict(spec) for spec in test_specs]
+
+        if len(tests) == 0:
+            # Add default test
+            tests.append(ToggleTestParams.from_dict({}))
+
+        def generate_name(test: ToggleTestParams) -> str:
+            return f"test_soft_ap_toggle_stress_{test}"
+
+        self.generate_tests(
+            self.soft_ap_toggle_test,
+            generate_name,
+            tests,
+        )
+
+    def generate_client_mode_toggle_stress_tests(self):
+        """Toggles client mode up and down.
+
+        If toggled up, DUT associates to AP, and connection is verified
+        If toggled down, test verifies DUT is not connected to AP
+
+        Will run with default params, but custom tests can be provided in the
+        Mobly config.
+
+        Example Config
+
+        soft_ap_test_params:
+          test_client_mode_toggle_stress:
+            soft_ap_params:
+              security_type: "wpa2"
+              password: "password"
+              connectivity_mode: "local_only"
+              operating_band: "only_2_4_ghz"
+            iterations: 10
+        """
+        test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
+            "test_client_mode_toggle_stress",
+            [],
+        )
+
+        tests = [ClientModeToggleTestParams.from_dict(spec) for spec in test_specs]
+
+        if len(tests) == 0:
+            # Add default test
+            tests.append(ClientModeToggleTestParams.from_dict({}))
+
+        def generate_name(test: ClientModeToggleTestParams) -> str:
+            return f"test_client_mode_toggle_stress_{test}"
+
+        self.generate_tests(
+            self.client_mode_toggle_test,
+            generate_name,
+            tests,
+        )
+
+    def generate_soft_ap_toggle_stress_with_client_mode_tests(self):
+        """Same as test_soft_ap_toggle_stress, but client mode is set up
+        at test start and verified after every toggle."""
+
+        test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
+            "test_soft_ap_toggle_stress_with_client_mode",
+            [],
+        )
+
+        tests = [ClientModeAlternatingTestParams.from_dict(spec) for spec in test_specs]
+
+        if len(tests) == 0:
+            # Add default test
+            tests.append(ClientModeAlternatingTestParams.from_dict({}))
+
+        def generate_name(test: ClientModeAlternatingTestParams) -> str:
+            return f"test_soft_ap_toggle_stress_with_client_mode_{test}"
+
+        self.generate_tests(
+            self.soft_ap_toggle_with_client_mode_test,
+            generate_name,
+            tests,
+        )
+
+    def generate_client_mode_toggle_stress_with_soft_ap_tests(self):
+        """Same as test_client_mode_toggle_stress, but softap is set up at
+        test start and verified after every toggle."""
+        test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
+            "test_client_mode_toggle_stress_with_soft_ap",
+            [],
+        )
+
+        tests = [ClientModeAlternatingTestParams.from_dict(spec) for spec in test_specs]
+
+        if len(tests) == 0:
+            # Add default test
+            tests.append(ClientModeAlternatingTestParams.from_dict({}))
+
+        def generate_name(test: ClientModeAlternatingTestParams) -> str:
+            return f"test_client_mode_toggle_stress_with_soft_ap_{test}"
+
+        self.generate_tests(
+            self.soft_ap_toggle_with_client_mode_test,
+            generate_name,
+            tests,
+        )
+
+    def generate_soft_ap_and_client_mode_random_toggle_stress_tests(self):
+        """Same as above toggle stres tests, but each iteration, either softap,
+        client mode, or both are toggled, then states are verified."""
+        test_specs: list[dict[str, Any]] = self.soft_ap_test_params.get(
+            "test_soft_ap_and_client_mode_random_toggle_stress",
+            [],
+        )
+
+        tests = [ClientModeAlternatingTestParams.from_dict(spec) for spec in test_specs]
+
+        if len(tests) == 0:
+            # Add default test
+            tests.append(ClientModeAlternatingTestParams.from_dict({}))
+
+        def generate_name(test: ClientModeAlternatingTestParams) -> str:
+            return f"test_soft_ap_and_client_mode_random_toggle_stress_{test}"
+
+        self.generate_tests(
+            self.soft_ap_and_client_mode_random_toggle_test,
+            generate_name,
+            tests,
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/WlanDriverRestartTest.py b/tests/wlan/functional/WlanDriverRestartTest.py
new file mode 100644
index 0000000..8fcd12f
--- /dev/null
+++ b/tests/wlan/functional/WlanDriverRestartTest.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+
+from mobly import asserts, signals, test_runner
+from mobly.config_parser import TestRunConfig
+
+from antlion import base_test, controllers
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+
+# Time to wait until an interface is recreated after the softmac WLAN driver
+# restarts.
+DELAY_FOR_DRIVER_RESTART_SEC = 2.0
+
+
+class WlanDriverRestartTest(base_test.AntlionBaseTest):
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+
+    def setup_class(self) -> None:
+        super().setup_class()
+
+        fuchsia_devices: list[FuchsiaDevice] = self.register_controller(
+            controllers.fuchsia_device
+        )
+        self.fuchsia_device = fuchsia_devices[0]
+
+        # Skip this test suite if the device isn't running a softmac WLAN driver.
+        driver_list_resp = self.fuchsia_device.ffx.run(["driver", "list"])
+        driver_list = str(driver_list_resp.stdout, "UTF-8")
+        if not driver_list.find("iwlwifi"):
+            raise signals.TestSkip(
+                "No intel WiFi driver found on this device, skipping test"
+            )
+
+    def test_driver_restart_recreates_interface(self) -> None:
+        """Verify the WLAN interface gets recreated after its driver restarts."""
+        # Store existing phy and interface identifiers.
+        phys = self.fuchsia_device.sl4f.wlan_lib.get_phy_id_list()
+        asserts.assert_equal(len(phys), 1, "Expected one phy_id")
+        old_interfaces = self.fuchsia_device.sl4f.wlan_lib.get_iface_id_list()
+        asserts.assert_not_equal(old_interfaces, [], "Iface not found.")
+
+        # Restarting should replace the old interface with a new one.
+        self.fuchsia_device.ffx.run(
+            [
+                "driver",
+                "restart",
+                "fuchsia-pkg://fuchsia.com/iwlwifi#meta/iwlwifi.cm",
+            ]
+        )
+
+        # Check for new phy and interface identifiers.
+        timeout = time.time() + DELAY_FOR_DRIVER_RESTART_SEC
+        while time.time() < timeout:
+            new_interfaces = self.fuchsia_device.sl4f.wlan_lib.get_iface_id_list()
+
+            if new_interfaces == old_interfaces:
+                # Interface has not been deleted yet. Keep waiting.
+                time.sleep(0.1)
+                continue
+            if len(new_interfaces) == 0:
+                # Interface has not come back up yet. Keep waiting.
+                time.sleep(0.1)
+                continue
+            if len(new_interfaces) == 1:
+                # New interface has been added! All done here
+                break
+
+            asserts.fail(
+                "More interfaces exist than before! \n"
+                f"Old: {old_interfaces}\n"
+                f"New: {new_interfaces}"
+            )
+        else:
+            asserts.fail(
+                f"New interface not created within {DELAY_FOR_DRIVER_RESTART_SEC}s"
+            )
+
+        phys = self.fuchsia_device.sl4f.wlan_lib.get_phy_id_list()
+        asserts.assert_equal(len(phys), 1, "Expected one phy_id")
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/WlanRebootTest.py b/tests/wlan/functional/WlanRebootTest.py
new file mode 100644
index 0000000..cf76f64
--- /dev/null
+++ b/tests/wlan/functional/WlanRebootTest.py
@@ -0,0 +1,657 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import logging
+import os
+import time
+from dataclasses import dataclass
+from enum import Enum, StrEnum, auto, unique
+from multiprocessing import Process
+
+from mobly import asserts, signals, test_runner
+
+from antlion import utils
+from antlion.controllers import iperf_client, iperf_server
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib.hostapd_constants import AP_SSID_LENGTH_2G, BandType
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.logger import LogLevel
+from antlion.net import wait_for_port
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+DEFAULT_IPERF_TIMEOUT = 30
+DUT_NETWORK_CONNECTION_TIMEOUT = 60
+
+
+@unique
+class DeviceType(StrEnum):
+    AP = auto()
+    DUT = auto()
+
+
+@unique
+class RebootType(StrEnum):
+    SOFT = auto()
+    HARD = auto()
+
+
+@unique
+class IpVersionType(Enum):
+    IPV4 = auto()
+    IPV6 = auto()
+    DUAL_IPV4_IPV6 = auto()
+
+    def ipv4(self) -> bool:
+        match self:
+            case IpVersionType.IPV4:
+                return True
+            case IpVersionType.IPV6:
+                return False
+            case IpVersionType.DUAL_IPV4_IPV6:
+                return True
+
+    def ipv6(self) -> bool:
+        match self:
+            case IpVersionType.IPV4:
+                return False
+            case IpVersionType.IPV6:
+                return True
+            case IpVersionType.DUAL_IPV4_IPV6:
+                return True
+
+    @staticmethod
+    def all() -> list["IpVersionType"]:
+        return [IpVersionType.IPV4, IpVersionType.IPV6, IpVersionType.DUAL_IPV4_IPV6]
+
+
+@dataclass
+class TestParams:
+    reboot_device: DeviceType
+    reboot_type: RebootType
+    band: BandType
+    security_mode: SecurityMode
+    ip_version: IpVersionType
+
+
+class WlanRebootTest(base_test.WifiBaseTest):
+    """Tests wlan reconnects in different reboot scenarios.
+
+    Testbed Requirement:
+    * One ACTS compatible device (dut)
+    * One Whirlwind Access Point (will also serve as iperf server)
+    * One PduDevice
+    """
+
+    def pre_run(self) -> None:
+        test_params: list[tuple[TestParams]] = []
+        for (
+            device_type,
+            reboot_type,
+            band,
+            security_mode,
+            ip_version,
+        ) in itertools.product(
+            # DeviceType,
+            # RebootType,
+            # BandType,
+            # SecurityMode,
+            # IpVersionType,
+            #
+            # TODO(https://github.com/python/mypy/issues/14688): Replace the code below
+            # with the commented code above once the bug affecting StrEnum resolves.
+            [e for e in DeviceType],
+            [e for e in RebootType],
+            [e for e in BandType],
+            [e for e in SecurityMode],
+            [e for e in IpVersionType],
+        ):
+            test_params.append(
+                (
+                    TestParams(
+                        device_type,
+                        reboot_type,
+                        band,
+                        security_mode,
+                        ip_version,
+                    ),
+                )
+            )
+
+        def generate_test_name(t: TestParams) -> str:
+            test_name = (
+                "test"
+                f"_{t.reboot_type}_reboot"
+                f"_{t.reboot_device}"
+                f"_{t.band}"
+                f"_{t.security_mode}"
+            )
+            if t.ip_version.ipv4():
+                test_name += "_ipv4"
+            if t.ip_version.ipv6():
+                test_name += "_ipv6"
+            return test_name
+
+        self.generate_tests(
+            test_logic=self.run_reboot_test,
+            name_func=generate_test_name,
+            arg_sets=test_params,
+        )
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        if len(self.iperf_clients) > 0:
+            self.iperf_client_on_dut = self.iperf_clients[0]
+        else:
+            self.iperf_client_on_dut = self.dut.create_iperf_client()
+
+    def setup_test(self) -> None:
+        super().setup_test()
+        self.access_point.stop_all_aps()
+        self.dut.wifi_toggle_state(True)
+        for ad in self.android_devices:
+            ad.droid.wakeLockAcquireBright()
+            ad.droid.wakeUpNow()
+        self.dut.disconnect()
+        if self.fuchsia_device:
+            self.fuchsia_device.configure_wlan()
+
+    def teardown_test(self) -> None:
+        # TODO(b/273923552): We take a snapshot here and before rebooting the
+        # DUT for every test because the persistence component does not make the
+        # inspect logs available for 120 seconds. This helps for debugging
+        # issues where we need previous state.
+        self.dut.take_bug_report(self.current_test_info.record)
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        self.dut.disconnect()
+        for ad in self.android_devices:
+            ad.droid.wakeLockRelease()
+            ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.reset_wifi()
+        super().teardown_test()
+
+    def setup_ap(
+        self,
+        ssid: str,
+        band: BandType,
+        ip_version: IpVersionType,
+        security_mode: SecurityMode,
+        password: str | None = None,
+    ) -> None:
+        """Setup ap with basic config.
+
+        Args:
+            ssid: The ssid to setup on ap
+            band: The type of band to set up the iperf server with ('2g' or '5g').
+            ip_version: The type of ip to use (ipv4 or ipv6)
+            security_mode: The type of security mode.
+            password: The PSK or passphase.
+        """
+        # TODO(fxb/63719): Add varying AP parameters
+        security_profile = Security(security_mode=security_mode, password=password)
+        channel: int
+
+        match band:
+            case BandType.BAND_2G:
+                channel = 11
+            case BandType.BAND_5G:
+                channel = 36
+
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=channel,
+            ssid=ssid,
+            security=security_profile,
+            is_ipv6_enabled=ip_version.ipv6(),
+        )
+
+        if not ip_version.ipv4():
+            self.access_point.stop_dhcp()
+
+        self.log.info(f"Network (SSID: {ssid}) is up.")
+
+    def setup_iperf_server_on_ap(
+        self, band: BandType
+    ) -> iperf_server.IPerfServerOverSsh:
+        """Configures iperf server based on the tests band.
+
+        Args:
+            band: The type of band to set up the iperf server with ('2g' or '5g').
+        """
+        test_interface: str
+
+        if band is BandType.BAND_2G:
+            test_interface = self.access_point.wlan_2g
+        elif band is BandType.BAND_5G:
+            test_interface = self.access_point.wlan_5g
+        else:
+            raise TypeError(f'Unknown band type: "{band}"')
+
+        return iperf_server.IPerfServerOverSsh(
+            self.access_point.ssh_settings, 5201, test_interface=test_interface
+        )
+
+    def get_iperf_server_address(
+        self,
+        iperf_server_on_ap: iperf_server.IPerfServerOverSsh,
+        ip_version: IpVersionType,
+    ) -> str:
+        """Retrieves the ip address of the iperf server.
+
+        Args:
+            iperf_server_on_ap: IPerfServer object, linked to AP
+            ip_version: The ip version (ipv4 or ipv6)
+
+        Returns:
+            The ip address of the iperf_server
+        """
+        # TODO(http://b/286449352): Remove this check once iperf_client has been refactored.
+        assert isinstance(
+            self.iperf_client_on_dut,
+            (iperf_client.IPerfClientOverSsh, iperf_client.IPerfClientOverAdb),
+        )
+
+        iperf_server_addresses = iperf_server_on_ap.get_interface_ip_addresses(
+            iperf_server_on_ap.test_interface
+        )
+        if ip_version == IpVersionType.IPV4:
+            iperf_server_ip_address = iperf_server_addresses["ipv4_private"][0]
+        elif ip_version == IpVersionType.IPV6:
+            if iperf_server_addresses["ipv6_private_local"]:
+                iperf_server_ip_address = iperf_server_addresses["ipv6_private_local"][
+                    0
+                ]
+            else:
+                iperf_server_ip_address = (
+                    f"{iperf_server_addresses['ipv6_link_local'][0]}%"
+                    f"{self.iperf_client_on_dut.test_interface}"
+                )
+        else:
+            raise TypeError(f"Invalid IP type: {ip_version}")
+
+        return iperf_server_ip_address
+
+    def verify_traffic_between_dut_and_ap(
+        self,
+        iperf_server_on_ap: iperf_server.IPerfServerOverSsh,
+        iperf_client_on_dut: iperf_client.IPerfClientBase,
+        ip_version: IpVersionType,
+    ) -> None:
+        """Runs IPerf traffic from the iperf client (dut) and the iperf
+        server (and vice versa) and verifies traffic was able to pass
+        successfully.
+
+        Args:
+            iperf_server_on_ap: IPerfServer object, linked to AP
+            iperf_client_on_dut: IPerfClient object, linked to DUT
+            ip_version: The ip version (ipv4 or ipv6)
+
+        Raises:
+            ConnectionError, if traffic is not passed successfully in both
+                directions.
+        """
+        iperf_server_ip_address = self.get_iperf_server_address(
+            iperf_server_on_ap, ip_version
+        )
+
+        self.log.info(
+            f"Attempting to pass traffic from DUT to IPerf server ({iperf_server_ip_address})."
+        )
+        tx_file = iperf_client_on_dut.start(
+            iperf_server_ip_address,
+            "-i 1 -t 3 -J",
+            "reboot_tx",
+            timeout=DEFAULT_IPERF_TIMEOUT,
+        )
+        tx_results = iperf_server.IPerfResult(tx_file)
+        if not tx_results.avg_receive_rate or tx_results.avg_receive_rate == 0:
+            raise ConnectionError(
+                f"Failed to pass IPerf traffic from DUT to server ({iperf_server_ip_address}). "
+                f"TX average receive rate: {tx_results.avg_receive_rate}"
+            )
+        else:
+            self.log.info(
+                f"Success: Traffic passed from DUT to IPerf server ({iperf_server_ip_address})."
+            )
+        self.log.info(
+            f"Attempting to pass traffic from IPerf server ({iperf_server_ip_address}) to DUT."
+        )
+        rx_file = iperf_client_on_dut.start(
+            iperf_server_ip_address,
+            "-i 1 -t 3 -R -J",
+            "reboot_rx",
+            timeout=DEFAULT_IPERF_TIMEOUT,
+        )
+        rx_results = iperf_server.IPerfResult(rx_file)
+        if not rx_results.avg_receive_rate or rx_results.avg_receive_rate == 0:
+            raise ConnectionError(
+                f"Failed to pass IPerf traffic from server ({iperf_server_ip_address}) to DUT. "
+                f"RX average receive rate: {rx_results.avg_receive_rate}"
+            )
+        else:
+            self.log.info(
+                f"Success: Traffic passed from IPerf server ({iperf_server_ip_address}) to DUT."
+            )
+
+    def start_dut_ping_process(
+        self,
+        iperf_server_on_ap: iperf_server.IPerfServerOverSsh,
+        ip_version: IpVersionType,
+    ) -> None:
+        """Creates a  process that pings the AP from the DUT.
+
+        Runs in parallel for 15 seconds, so it can be interrupted by a reboot.
+        Sleeps for a few seconds to ensure pings have started.
+
+        Args:
+            iperf_server_on_ap: IPerfServer object, linked to AP
+            ip_version: The ip version (ipv4 or ipv6)
+        """
+        ap_address = self.get_iperf_server_address(iperf_server_on_ap, ip_version)
+        if ap_address:
+            self.log.info(
+                f"Starting ping process to {ap_address} in parallel. Logs from this "
+                "process will be suppressed, since it will be intentionally "
+                "interrupted."
+            )
+            ping_proc = Process(
+                target=self.dut.ping, args=[ap_address], kwargs={"count": 15}
+            )
+            with LogLevel(self.log, logging.WARNING):
+                ping_proc.start()
+            # Allow for a few seconds of pinging before allowing it to be
+            # interrupted.
+            time.sleep(3)
+        else:
+            raise ConnectionError("Failed to retrieve APs iperf address.")
+
+    def prepare_dut_for_reconnection(self) -> None:
+        """Perform any actions to ready DUT for reconnection.
+
+        These actions will vary depending on the DUT. eg. android devices may
+        need to be woken up, ambient devices should not require any interaction,
+        etc.
+        """
+        self.dut.wifi_toggle_state(True)
+        for ad in self.android_devices:
+            ad.droid.wakeUpNow()
+
+    def wait_for_dut_network_connection(self, ssid: str) -> None:
+        """Checks if device is connected to given network. Sleeps 1 second
+        between retries.
+
+        Args:
+            ssid: ssid to check connection to.
+        Raises:
+            ConnectionError, if DUT is not connected after all timeout.
+        """
+        self.log.info(
+            f"Checking if DUT is connected to {ssid} network. Will retry for "
+            f"{DUT_NETWORK_CONNECTION_TIMEOUT} seconds."
+        )
+        timeout = time.time() + DUT_NETWORK_CONNECTION_TIMEOUT
+        while time.time() < timeout:
+            try:
+                is_connected = self.dut.is_connected(ssid=ssid)
+            except Exception as err:
+                self.log.debug(f"SL4* call failed. Retrying in 1 second. Error: {err}")
+                is_connected = False
+            finally:
+                if is_connected:
+                    self.log.info("Success: DUT has connected.")
+                    break
+                else:
+                    self.log.debug(
+                        f"DUT not connected to network {ssid}...retrying in 1 second."
+                    )
+                    time.sleep(1)
+        else:
+            raise ConnectionError("DUT failed to connect to the network.")
+
+    def write_csv_time_to_reconnect(
+        self,
+        test_name: str,
+        reconnect_success: bool,
+        time_to_reconnect: float = 0.0,
+    ) -> None:
+        """Writes the time to reconnect to a csv file.
+        Args:
+            test_name: the name of the test case
+            reconnect_success: whether the test successfully reconnected or not
+            time_to_reconnect: the time from when the rebooted device came back
+                up to when it reassociated (or 'FAIL'), if it failed to
+                reconnect.
+        """
+        csv_file_name = os.path.join(self.log_path, "time_to_reconnect.csv")
+        self.log.info(f"Writing to {csv_file_name}")
+        with open(csv_file_name, "a") as csv_file:
+            if reconnect_success:
+                csv_file.write(f"{test_name},{time_to_reconnect}\n")
+            else:
+                csv_file.write(f"{test_name},'FAIL'\n")
+
+    def log_and_continue(
+        self, ssid: str, time_to_reconnect: float = 0.0, error: Exception | None = None
+    ) -> None:
+        """Writes the time to reconnect to the csv file before continuing, used
+        in stress tests runs.
+
+        Args:
+            time_to_reconnect: the time from when the rebooted device came back
+                ip to when reassociation occurred.
+            error: error message to log before continuing with the test
+        """
+        if error:
+            self.log.info(
+                f"Device failed to reconnect to network {ssid}. Error: {error}"
+            )
+            self.write_csv_time_to_reconnect(f"{self.current_test_info.name}", False)
+
+        else:
+            self.log.info(
+                f"Device successfully reconnected to network {ssid} after "
+                f"{time_to_reconnect} seconds."
+            )
+            self.write_csv_time_to_reconnect(
+                f"{self.current_test_info.name}", True, time_to_reconnect
+            )
+
+    def run_reboot_test(self, settings: TestParams) -> None:
+        """Runs a reboot test based on a given config.
+            1. Setups up a network, associates the dut, and saves the network.
+            2. Verifies the dut receives ip address(es).
+            3. Verifies traffic between DUT and AP (IPerf client and server).
+            4. Reboots (hard or soft) the device (dut or ap).
+                - If the ap was rebooted, setup the same network again.
+            5. Wait for reassociation or timeout.
+            6. If reassocation occurs:
+                - Verifies the dut receives ip address(es).
+                - Verifies traffic between DUT and AP (IPerf client and server).
+            7. Logs time to reconnect (or failure to reconnect)
+
+        Args:
+            settings: TestParams dataclass containing the following values:
+                reboot_device: the device to reboot either DUT or AP.
+                reboot_type: how to reboot the reboot_device either hard or soft.
+                band: band to setup either 2g or 5g
+                security_mode: security mode to set up either OPEN, WPA2, or WPA3.
+                ip_version: the ip version (ipv4 or ipv6)
+        """
+        # TODO(b/286443517): Properly support WLAN on android devices.
+        assert (
+            self.fuchsia_device is not None
+        ), "Fuchsia device not found, test currently does not support android devices."
+
+        # TODO(b/286449352): Remove this check once iperf_client has been refactored.
+        assert isinstance(
+            self.iperf_client_on_dut,
+            (iperf_client.IPerfClientOverSsh, iperf_client.IPerfClientOverAdb),
+        )
+        assert isinstance(self.iperf_client_on_dut.test_interface, str)
+
+        ssid = utils.rand_ascii_str(AP_SSID_LENGTH_2G)
+        reboot_device: DeviceType = settings.reboot_device
+        reboot_type: RebootType = settings.reboot_type
+        band: BandType = settings.band
+        ip_version: IpVersionType = settings.ip_version
+        security_mode: SecurityMode = settings.security_mode
+        password: str | None = None
+        if security_mode is not SecurityMode.OPEN:
+            password = generate_random_password(security_mode=security_mode)
+
+        # Skip hard reboots if no PDU present
+        asserts.skip_if(
+            reboot_type is RebootType.HARD and len(self.pdu_devices) == 0,
+            "Hard reboots require a PDU device.",
+        )
+
+        self.setup_ap(
+            ssid,
+            band,
+            ip_version,
+            security_mode,
+            password,
+        )
+
+        if not self.dut.associate(
+            ssid,
+            target_security=security_mode,
+            target_pwd=password,
+        ):
+            raise EnvironmentError("Initial network connection failed.")
+
+        # Run iperf to verify traffic between DUT and AP
+        if ip_version.ipv4():
+            self.fuchsia_device.wait_for_ipv4_addr(
+                self.iperf_client_on_dut.test_interface
+            )
+        if ip_version.ipv6():
+            self.fuchsia_device.wait_for_ipv6_addr(
+                self.iperf_client_on_dut.test_interface
+            )
+
+        self.iperf_server_on_ap = self.setup_iperf_server_on_ap(band)
+        self.iperf_server_on_ap.start()
+        wait_for_port(self.iperf_server_on_ap.hostname, 5201)
+
+        if ip_version.ipv4():
+            self.verify_traffic_between_dut_and_ap(
+                self.iperf_server_on_ap,
+                self.iperf_client_on_dut,
+                IpVersionType.IPV4,
+            )
+        if ip_version.ipv6():
+            self.verify_traffic_between_dut_and_ap(
+                self.iperf_server_on_ap,
+                self.iperf_client_on_dut,
+                IpVersionType.IPV6,
+            )
+
+        # Ping from DUT to AP during AP reboot. This is interrupt testing that we do not
+        # do for DUT reboots because they are prone to threading issues and not
+        # supported.
+        if reboot_device is DeviceType.AP:
+            if ip_version.ipv4():
+                self.start_dut_ping_process(self.iperf_server_on_ap, IpVersionType.IPV4)
+            if ip_version.ipv6():
+                self.start_dut_ping_process(self.iperf_server_on_ap, IpVersionType.IPV6)
+
+        # TODO(b/273923552): We take a snapshot here and during test
+        # teardown for every test because the persistence component does not
+        # make the inspect logs available for 120 seconds. This helps for
+        # debugging issues where we need previous state.
+        self.dut.take_bug_report(self.current_test_info.record)
+
+        # DUT reboots
+        if reboot_device is DeviceType.DUT:
+            if reboot_type is RebootType.SOFT:
+                self.fuchsia_device.reboot()
+            elif reboot_type is RebootType.HARD:
+                self.dut.hard_power_cycle(self.pdu_devices)
+            self.iperf_client_on_dut = self.dut.create_iperf_client()
+
+        # AP reboots
+        elif reboot_device is DeviceType.AP:
+            self.iperf_server_on_ap.close_ssh()
+            if reboot_type is RebootType.SOFT:
+                self.log.info("Cleanly stopping ap.")
+                self.access_point.stop_all_aps()
+            elif reboot_type is RebootType.HARD:
+                self.access_point.hard_power_cycle(self.pdu_devices)
+            self.setup_ap(ssid, band, ip_version, security_mode, password)
+            self.iperf_server_on_ap = self.setup_iperf_server_on_ap(band)
+
+        # TODO(b/286449352): Remove this check once iperf_client has been refactored.
+        assert isinstance(
+            self.iperf_client_on_dut,
+            (iperf_client.IPerfClientOverSsh, iperf_client.IPerfClientOverAdb),
+        )
+        assert isinstance(self.iperf_client_on_dut.test_interface, str)
+
+        self.prepare_dut_for_reconnection()
+        uptime = time.time()
+        try:
+            self.wait_for_dut_network_connection(ssid)
+            time_to_reconnect = time.time() - uptime
+
+            if ip_version.ipv4():
+                self.fuchsia_device.wait_for_ipv4_addr(
+                    self.iperf_client_on_dut.test_interface
+                )
+            if ip_version.ipv6():
+                self.fuchsia_device.wait_for_ipv6_addr(
+                    self.iperf_client_on_dut.test_interface
+                )
+
+            self.iperf_server_on_ap.start()
+
+            if ip_version.ipv4():
+                self.verify_traffic_between_dut_and_ap(
+                    self.iperf_server_on_ap,
+                    self.iperf_client_on_dut,
+                    IpVersionType.IPV4,
+                )
+            if ip_version.ipv6():
+                self.verify_traffic_between_dut_and_ap(
+                    self.iperf_server_on_ap,
+                    self.iperf_client_on_dut,
+                    IpVersionType.IPV6,
+                )
+        except ConnectionError as err:
+            self.log_and_continue(ssid, error=err)
+            raise signals.TestFailure(f"Failed to reconnect to {ssid} after reboot.")
+        else:
+            self.log_and_continue(ssid, time_to_reconnect=time_to_reconnect)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/WlanScanTest.py b/tests/wlan/functional/WlanScanTest.py
new file mode 100644
index 0000000..2dcc85c
--- /dev/null
+++ b/tests/wlan/functional/WlanScanTest.py
@@ -0,0 +1,259 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""
+This test exercises basic scanning functionality to confirm expected behavior
+related to wlan scanning
+"""
+
+import logging
+from datetime import datetime
+
+from mobly import asserts, signals, test_runner
+from mobly.config_parser import TestRunConfig
+from mobly.records import TestResultRecord
+
+from antlion.controllers.ap_lib import (
+    hostapd_ap_preset,
+    hostapd_bss_settings,
+    hostapd_constants,
+    hostapd_security,
+)
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.wifi import base_test
+
+
+class WlanScanTest(base_test.WifiBaseTest):
+    """WLAN scan test class.
+
+    Test Bed Requirement:
+    * One or more Fuchsia devices
+    * Several Wi-Fi networks visible to the device, including an open Wi-Fi
+      network or a onHub/GoogleWifi
+    """
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+
+        if len(self.access_points) < 1:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+        self.start_access_point = False
+
+        # This section sets up the config that could be sent to the AP if
+        # the AP is needed. The reasoning is since ACTS already connects
+        # to the AP if it is in the config, generating the config in memory
+        # has no over head is used if need by the test if one of the ssids
+        # needed for the test is not included in the config.  The logic
+        # here creates 2 ssids on each radio, 5ghz and 2.4ghz, with an
+        # open, no security network and one that is wpa2, for a total of 4
+        # networks.  However, if all of the ssids are specified in the
+        # the config will never be written to the AP and the AP will not be
+        # brought up.  For more information about how to configure the
+        # hostapd config info, see the hostapd libraries, which have more
+        # documentation.
+        bss_settings_2g = []
+        bss_settings_5g = []
+        open_network = self.get_open_network(False, [])
+        self.open_network_2g = open_network["2g"]
+        self.open_network_5g = open_network["5g"]
+        wpa2_settings = self.get_psk_network(False, [])
+        self.wpa2_network_2g = wpa2_settings["2g"]
+        self.wpa2_network_5g = wpa2_settings["5g"]
+        bss_settings_2g.append(
+            hostapd_bss_settings.BssSettings(
+                name=self.wpa2_network_2g["SSID"],
+                ssid=self.wpa2_network_2g["SSID"],
+                security=hostapd_security.Security(
+                    security_mode=self.wpa2_network_2g["security"],
+                    password=self.wpa2_network_2g["password"],
+                ),
+            )
+        )
+        bss_settings_5g.append(
+            hostapd_bss_settings.BssSettings(
+                name=self.wpa2_network_5g["SSID"],
+                ssid=self.wpa2_network_5g["SSID"],
+                security=hostapd_security.Security(
+                    security_mode=self.wpa2_network_5g["security"],
+                    password=self.wpa2_network_5g["password"],
+                ),
+            )
+        )
+        self.ap_2g = hostapd_ap_preset.create_ap_preset(
+            iface_wlan_2g=self.access_point.wlan_2g,
+            iface_wlan_5g=self.access_point.wlan_5g,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            bss_settings=bss_settings_2g,
+        )
+        self.ap_5g = hostapd_ap_preset.create_ap_preset(
+            iface_wlan_2g=self.access_point.wlan_2g,
+            iface_wlan_5g=self.access_point.wlan_5g,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            bss_settings=bss_settings_5g,
+        )
+
+        if "wlan_open_network_2g" in self.user_params:
+            self.open_network_2g = self.user_params.get("wlan_open_network_2g")
+        elif self.access_point:
+            self.start_access_point_2g = True
+        else:
+            raise TypeError("Missing parameter in config (wlan_open_network_2g)")
+
+        if "wlan_open_network_5g" in self.user_params:
+            self.open_network_5g = self.user_params.get("wlan_open_network_5g")
+        elif self.access_point:
+            self.start_access_point_5g = True
+        else:
+            raise TypeError("Missing parameter in config (wlan_open_network_5g)")
+
+        if "wlan_wpa2_network_2g" in self.user_params:
+            self.wpa2_network_2g = self.user_params.get("wlan_wpa2_network_2g")
+        elif self.access_point:
+            self.start_access_point_2g = True
+        else:
+            raise TypeError("Missing parameter in config (wlan_wpa2_network_2g)")
+
+        if "wlan_wpa2_network_5g" in self.user_params:
+            self.wpa2_network_5g = self.user_params.get("wlan_wpa2_network_5g")
+        elif self.access_point:
+            self.start_access_point_5g = True
+        else:
+            raise TypeError("Missing parameter in config (wlan_wpa2_network_5g)")
+
+    def setup_class(self) -> None:
+        super().setup_class()
+
+        for fd in self.fuchsia_devices:
+            fd.configure_wlan(association_mechanism="drivers")
+
+        # Only bring up the APs that are needed for the test.  Each ssid is
+        # randomly generated so there is no chance of re associating to a
+        # previously saved ssid on the device.
+        if self.start_access_point_2g:
+            self.start_access_point = True
+            self.access_point.start_ap(hostapd_config=self.ap_2g)
+        if self.start_access_point_5g:
+            self.start_access_point = True
+            self.access_point.start_ap(hostapd_config=self.ap_5g)
+
+    def teardown_test(self) -> None:
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_lib.disconnect()
+
+    def teardown_class(self) -> None:
+        self.download_logs()
+        if self.start_access_point:
+            self.access_point.stop_all_aps()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        for fd in self.fuchsia_devices:
+            super().on_device_fail(fd, record)
+            fd.configure_wlan(association_mechanism="drivers")
+
+    #
+    # Helper Functions
+    #
+
+    def scan_while_connected(
+        self, wlan_network_params: base_test.Network, fd: FuchsiaDevice
+    ) -> None:
+        """Connects to as specified network and initiates a scan
+        Args:
+            wlan_network_params: A dictionary containing wlan
+                infomation.
+            fd: The fuchsia device to connect to the wlan.
+        """
+        target_ssid = wlan_network_params["SSID"]
+        self.log.info("got the ssid! %s", target_ssid)
+        target_pwd = None
+        if "password" in wlan_network_params:
+            target_pwd = wlan_network_params["password"]
+
+        bss_info = fd.sl4f.wlan_lib.scan_for_bss_info()
+        connect_success = fd.sl4f.wlan_lib.connect(
+            target_ssid,
+            target_pwd,
+            bss_info[target_ssid][0],
+        )
+        asserts.assert_true(
+            connect_success, f"Expected connect to {target_ssid} to succeed"
+        )
+        self.basic_scan_request(fd)
+
+    def basic_scan_request(self, fd: FuchsiaDevice) -> None:
+        """Initiates a basic scan on a Fuchsia device
+        Args:
+            fd: A fuchsia device
+        """
+        start_time = datetime.now()
+
+        scan_response = fd.sl4f.wlan_lib.scan_for_bss_info()
+
+        # first check if we received an error
+        if scan_response.get("error") is None:
+            # the scan command did not get an error response - go ahead
+            # and check for scan results
+            scan_results = scan_response["result"]
+        else:
+            # the response indicates an error - log and raise failure
+            raise signals.TestFailure(
+                f"Aborting test - scan failed with error: {scan_response.get('error')}"
+            )
+
+        self.log.info("scan contained %d results", len(scan_results))
+
+        total_time_ms = (datetime.now() - start_time).total_seconds() * 1000
+        self.log.info(f"scan time: {total_time_ms:.2f} ms")
+
+        if len(scan_results) > 0:
+            raise signals.TestPass(
+                details="", extras={"Scan time": f"{total_time_ms:.2f}"}
+            )
+        else:
+            raise signals.TestFailure("Scan failed or did not " "find any networks")
+
+    #
+    # Test
+    #
+
+    def test_basic_scan_request(self) -> None:
+        """Verify a general scan trigger returns at least one result"""
+        for fd in self.fuchsia_devices:
+            self.basic_scan_request(fd)
+
+    def test_scan_while_connected_open_network_2g(self) -> None:
+        for fd in self.fuchsia_devices:
+            self.scan_while_connected(self.open_network_2g, fd)
+
+    def test_scan_while_connected_wpa2_network_2g(self) -> None:
+        for fd in self.fuchsia_devices:
+            self.scan_while_connected(self.wpa2_network_2g, fd)
+
+    def test_scan_while_connected_open_network_5g(self) -> None:
+        for fd in self.fuchsia_devices:
+            self.scan_while_connected(self.open_network_5g, fd)
+
+    def test_scan_while_connected_wpa2_network_5g(self) -> None:
+        for fd in self.fuchsia_devices:
+            self.scan_while_connected(self.wpa2_network_5g, fd)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/WlanTargetSecurityTest.py b/tests/wlan/functional/WlanTargetSecurityTest.py
new file mode 100644
index 0000000..7c369e4
--- /dev/null
+++ b/tests/wlan/functional/WlanTargetSecurityTest.py
@@ -0,0 +1,386 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib.hostapd_constants import (
+    AP_DEFAULT_CHANNEL_5G,
+    AP_SSID_LENGTH_5G,
+)
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+# TODO(fxb/68956): Add security protocol check to mixed mode tests when info is
+# available.
+class WlanTargetSecurityTest(base_test.WifiBaseTest):
+    """Tests Fuchsia's target security concept and security upgrading
+
+    Testbed Requirements:
+    * One Fuchsia device
+    * One Whirlwind Access Point
+    """
+
+    def setup_class(self):
+        super().setup_class()
+
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+    def teardown_class(self):
+        self.dut.disconnect()
+        self.access_point.stop_all_aps()
+        super().teardown_class()
+
+    def teardown_test(self):
+        self.dut.disconnect()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_test()
+
+    def on_fail(self, record: TestResultRecord):
+        self.dut.disconnect()
+        self.access_point.stop_all_aps()
+        super().on_fail(record)
+
+    def setup_ap(self, security_mode: SecurityMode = SecurityMode.OPEN):
+        """Sets up an AP using the provided security mode.
+
+        Args:
+            security_mode: string, security mode for AP
+        Returns:
+            Tuple, (ssid, password). Returns a password even if for open
+                security, since non-open target securities require a credential
+                to attempt a connection.
+        """
+        ssid = utils.rand_ascii_str(AP_SSID_LENGTH_5G)
+        # Length 13, so it can be used for WEP or WPA
+        password = utils.rand_ascii_str(13)
+        security_profile = Security(security_mode=security_mode, password=password)
+
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=AP_DEFAULT_CHANNEL_5G,
+            ssid=ssid,
+            security=security_profile,
+        )
+
+        return (ssid, password)
+
+    # Open Security on AP
+    def test_associate_open_ap_with_open_target_security(self):
+        ssid, _ = self.setup_ap()
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+
+    def test_reject_open_ap_with_wep_target_security(self):
+        ssid, password = self.setup_ap()
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WEP, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_reject_open_ap_with_wpa_target_security(self):
+        ssid, password = self.setup_ap()
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_reject_open_ap_with_wpa2_target_security(self):
+        ssid, password = self.setup_ap()
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA2, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_reject_open_ap_with_wpa3_target_security(self):
+        ssid, password = self.setup_ap()
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA3, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    # WEP Security on AP
+    def test_reject_wep_ap_with_open_target_security(self):
+        ssid, _ = self.setup_ap(SecurityMode.WEP)
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
+
+    def test_associate_wep_ap_with_wep_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WEP)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WEP, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    def test_reject_wep_ap_with_wpa_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WEP)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_reject_wep_ap_with_wpa2_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WEP)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA2, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_reject_wep_ap_with_wpa3_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WEP)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA3, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    # WPA Security on AP
+    def test_reject_wpa_ap_with_open_target_security(self):
+        ssid, _ = self.setup_ap(SecurityMode.WPA)
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
+
+    def test_reject_wpa_ap_with_wep_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WEP, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_associate_wpa_ap_with_wpa_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    def test_reject_wpa_ap_with_wpa2_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA2, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_reject_wpa_ap_with_wpa3_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA3, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    # WPA2 Security on AP
+    def test_reject_wpa2_ap_with_open_target_security(self):
+        ssid, _ = self.setup_ap(SecurityMode.WPA2)
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
+
+    def test_reject_wpa2_ap_with_wep_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA2)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WEP, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_associate_wpa2_ap_with_wpa_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA2)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    def test_associate_wpa2_ap_with_wpa2_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA2)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA2, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    def test_reject_wpa2_ap_with_wpa3_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA2)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA3, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    # WPA/WPA2 Security on AP
+    def test_reject_wpa_wpa2_ap_with_open_target_security(self):
+        ssid, _ = self.setup_ap(SecurityMode.WPA_WPA2)
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
+
+    def test_reject_wpa_wpa2_ap_with_wep_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA_WPA2)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WEP, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_associate_wpa_wpa2_ap_with_wpa_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA_WPA2)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    def test_associate_wpa_wpa2_ap_with_wpa2_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA_WPA2)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA2, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    def test_reject_wpa_wpa2_ap_with_wpa3_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA_WPA2)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA3, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    # WPA3 Security on AP
+    def test_reject_wpa3_ap_with_open_target_security(self):
+        ssid, _ = self.setup_ap(SecurityMode.WPA3)
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
+
+    def test_reject_wpa3_ap_with_wep_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA3)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WEP, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_associate_wpa3_ap_with_wpa_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA3)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA, target_pwd=password
+            ),
+            "Expected failure to associate. WPA credentials for WPA3 was "
+            "temporarily disabled, see https://fxbug.dev/42166758 for context. "
+            "If this feature was reenabled, please update this test's "
+            "expectation.",
+        )
+
+    def test_associate_wpa3_ap_with_wpa2_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA3)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA2, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    def test_associate_wpa3_ap_with_wpa3_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA3)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA3, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    # WPA2/WPA3 Security on AP
+    def test_reject_wpa2_wpa3_ap_with_open_target_security(self):
+        ssid, _ = self.setup_ap(SecurityMode.WPA2_WPA3)
+        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
+
+    def test_reject_wpa2_wpa3_ap_with_wep_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA2_WPA3)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WEP, target_pwd=password
+            ),
+            "Should not have associated.",
+        )
+
+    def test_associate_wpa2_wpa3_ap_with_wpa_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA2_WPA3)
+        asserts.assert_false(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA, target_pwd=password
+            ),
+            "Expected failure to associate. WPA credentials for WPA3 was "
+            "temporarily disabled, see https://fxbug.dev/42166758 for context. "
+            "If this feature was reenabled, please update this test's "
+            "expectation.",
+        )
+
+    def test_associate_wpa2_wpa3_ap_with_wpa2_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA2_WPA3)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA2, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+    def test_associate_wpa2_wpa3_ap_with_wpa3_target_security(self):
+        ssid, password = self.setup_ap(SecurityMode.WPA2_WPA3)
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_security=SecurityMode.WPA3, target_pwd=password
+            ),
+            "Failed to associate.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/functional/WlanWirelessNetworkManagementTest.py b/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
new file mode 100644
index 0000000..fd8a965
--- /dev/null
+++ b/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
@@ -0,0 +1,486 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+from dataclasses import dataclass
+from datetime import datetime, timedelta, timezone
+from typing import FrozenSet
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.radio_measurement import (
+    BssidInformation,
+    BssidInformationCapabilities,
+    NeighborReportElement,
+    PhyType,
+)
+from antlion.controllers.ap_lib.wireless_network_management import (
+    BssTransitionManagementRequest,
+)
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.wlan_lib import WlanMacRole
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+@dataclass
+class TestParams:
+    security_mode: SecurityMode
+
+
+# Antlion can see (via the wlan_features config directive) whether WNM features
+# are enabled, and runs or skips tests depending on presence of WNM features.
+class WlanWirelessNetworkManagementTest(base_test.WifiBaseTest):
+    """Tests Fuchsia's Wireless Network Management (AKA 802.11v) support.
+
+    Testbed Requirements:
+    * One Fuchsia device
+    * One Whirlwind access point
+
+    Existing Fuchsia drivers do not yet support WNM features out-of-the-box, so this
+    suite skips certain tests depending on whether specific WNM features are enabled.
+    """
+
+    def pre_run(self):
+        test_args: list[tuple[TestParams]] = []
+
+        SECURITY_MODES = (
+            SecurityMode.OPEN,
+            SecurityMode.WEP,
+            SecurityMode.WPA,
+            SecurityMode.WPA2,
+            SecurityMode.WPA3,
+        )
+        for security_mode in SECURITY_MODES:
+            test_args.append(
+                (
+                    TestParams(
+                        security_mode=security_mode,
+                    ),
+                )
+            )
+
+        def generate_roam_on_btm_req_test_name(test: TestParams):
+            return f"test_roam_on_btm_req_from_{test.security_mode}_2g_to_{test.security_mode}_5g"
+
+        self.generate_tests(
+            test_logic=self.setup_connect_roam_on_btm_req,
+            name_func=generate_roam_on_btm_req_test_name,
+            arg_sets=test_args,
+        )
+
+    def setup_class(self):
+        super().setup_class()
+
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+    def teardown_class(self):
+        self.dut.disconnect()
+        self.access_point.stop_all_aps()
+        super().teardown_class()
+
+    def teardown_test(self):
+        self.dut.disconnect()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_test()
+
+    def on_fail(self, record: TestResultRecord):
+        self.dut.disconnect()
+        self.access_point.stop_all_aps()
+        super().on_fail(record)
+
+    def setup_ap(
+        self,
+        ssid: str,
+        security: Security = None,
+        additional_ap_parameters: dict | None = None,
+        channel: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+    ):
+        """Sets up an AP using the provided parameters.
+
+        Args:
+            ssid: SSID for the AP.
+            security: security config for AP, defaults to None (open network
+                with no password).
+            additional_ap_parameters: A dictionary of parameters that can be set
+                directly in the hostapd config file.
+            channel: which channel number to set the AP to (default is
+                AP_DEFAULT_CHANNEL_2G).
+            wnm_features: Wireless Network Management features to enable
+                (default is no WNM features).
+        """
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=channel,
+            ssid=ssid,
+            security=security,
+            additional_ap_parameters=additional_ap_parameters,
+            wnm_features=wnm_features,
+        )
+
+    def _get_client_mac(self) -> str:
+        """Get the MAC address of the DUT client interface.
+
+        Returns:
+            str, MAC address of the DUT client interface.
+        Raises:
+            ValueError if there is no DUT client interface.
+            WlanError if the DUT interface query fails.
+        """
+        for wlan_iface in self.dut.get_wlan_interface_id_list():
+            result = self.fuchsia_device.sl4f.wlan_lib.query_iface(wlan_iface)
+            if result.role is WlanMacRole.CLIENT:
+                return utils.mac_address_list_to_str(bytes(result.sta_addr))
+        raise ValueError(
+            "Failed to get client interface mac address. No client interface found."
+        )
+
+    def test_bss_transition_is_not_advertised_when_ap_supported_dut_unsupported(self):
+        if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+        self.setup_ap(ssid, wnm_features=wnm_features)
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
+        client_mac = self._get_client_mac()
+        # Verify that DUT is actually associated (as seen from AP).
+        asserts.assert_true(
+            self.access_point.sta_associated(self.access_point.wlan_2g, client_mac),
+            "DUT is not associated on the 2.4GHz band",
+        )
+
+        ext_capabilities = self.access_point.get_sta_extended_capabilities(
+            self.access_point.wlan_2g, client_mac
+        )
+        asserts.assert_false(
+            ext_capabilities.bss_transition,
+            "DUT is incorrectly advertising BSS Transition Management support",
+        )
+
+    def test_bss_transition_is_advertised_when_ap_supported_dut_supported(self):
+        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is not present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+        self.setup_ap(ssid, wnm_features=wnm_features)
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
+        client_mac = self._get_client_mac()
+        # Verify that DUT is actually associated (as seen from AP).
+        asserts.assert_true(
+            self.access_point.sta_associated(self.access_point.wlan_2g, client_mac),
+            "DUT is not associated on the 2.4GHz band",
+        )
+
+        ext_capabilities = self.access_point.get_sta_extended_capabilities(
+            self.access_point.wlan_2g, client_mac
+        )
+        asserts.assert_true(
+            ext_capabilities.bss_transition,
+            "DUT is not advertising BSS Transition Management support",
+        )
+
+    def test_wnm_sleep_mode_is_not_advertised_when_ap_supported_dut_unsupported(self):
+        if self.dut.feature_is_present("WNM_SLEEP_MODE"):
+            raise signals.TestSkip("skipping test because WNM feature is present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        wnm_features = frozenset([hostapd_constants.WnmFeature.WNM_SLEEP_MODE])
+        self.setup_ap(ssid, wnm_features=wnm_features)
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
+        client_mac = self._get_client_mac()
+        # Verify that DUT is actually associated (as seen from AP).
+        asserts.assert_true(
+            self.access_point.sta_associated(self.access_point.wlan_2g, client_mac),
+            "DUT is not associated on the 2.4GHz band",
+        )
+
+        ext_capabilities = self.access_point.get_sta_extended_capabilities(
+            self.access_point.wlan_2g, client_mac
+        )
+        asserts.assert_false(
+            ext_capabilities.wnm_sleep_mode,
+            "DUT is incorrectly advertising WNM Sleep Mode support",
+        )
+
+    # This is called in generate_tests.
+    def setup_connect_roam_on_btm_req(self, test: TestParams):
+        """Setup the APs, associate a DUT, amd roam when BTM request is received.
+
+        Args:
+            test: Test parameters
+        """
+        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is not present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        password = None
+        if test.security_mode is not SecurityMode.OPEN:
+            # Length 13, so it can be used for WEP or WPA
+            password = utils.rand_ascii_str(13)
+
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+
+        # Setup 2.4 GHz AP.
+        security = Security(test.security_mode, password)
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+        # Setup 2.4 GHz AP.
+        self.setup_ap(
+            ssid,
+            security=security,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            wnm_features=wnm_features,
+        )
+
+        asserts.assert_true(
+            self.dut.associate(
+                ssid, target_pwd=password, target_security=test.security_mode
+            ),
+            "Failed to associate.",
+        )
+        # Verify that DUT is actually associated (as seen from AP).
+        client_mac = self._get_client_mac()
+        asserts.assert_true(
+            self.access_point.sta_associated(self.access_point.wlan_2g, client_mac),
+            "DUT is not associated on the 2.4GHz band",
+        )
+
+        # Setup 5 GHz AP with same SSID.
+        self.setup_ap(
+            ssid,
+            security=security,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            wnm_features=wnm_features,
+        )
+
+        # Construct a BTM request.
+        dest_bssid = self.access_point.get_bssid_from_ssid(
+            ssid, self.access_point.wlan_5g
+        )
+        dest_bssid_info = BssidInformation(
+            security=True, capabilities=BssidInformationCapabilities()
+        )
+        neighbor_5g_ap = NeighborReportElement(
+            dest_bssid,
+            dest_bssid_info,
+            operating_class=116,
+            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            phy_type=PhyType.VHT,
+        )
+        btm_req = BssTransitionManagementRequest(
+            preferred_candidate_list_included=True,
+            disassociation_imminent=True,
+            candidate_list=[neighbor_5g_ap],
+        )
+
+        # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug.
+        # TODO(fxbug.dev/42068735) Remove when fixed, or when non-firmware BTM support is merged.
+        time.sleep(5)
+
+        # Send BTM request from 2.4 GHz AP to DUT
+        self.access_point.send_bss_transition_management_req(
+            self.access_point.wlan_2g, client_mac, btm_req
+        )
+
+        # Give DUT time to roam.
+        ROAM_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
+        while datetime.now(timezone.utc) < ROAM_DEADLINE:
+            if self.access_point.sta_authorized(self.access_point.wlan_5g, client_mac):
+                break
+            else:
+                time.sleep(0.25)
+
+        # Verify that DUT roamed (as seen from AP).
+        asserts.assert_true(
+            self.access_point.sta_authenticated(self.access_point.wlan_5g, client_mac),
+            "DUT is not authenticated on the 5GHz band",
+        )
+        asserts.assert_true(
+            self.access_point.sta_associated(self.access_point.wlan_5g, client_mac),
+            "DUT is not associated on the 5GHz band",
+        )
+        asserts.assert_true(
+            self.access_point.sta_authorized(self.access_point.wlan_5g, client_mac),
+            "DUT is not 802.1X authorized on the 5GHz band",
+        )
+
+    def test_btm_req_ignored_dut_unsupported(self):
+        if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+        # Setup 2.4 GHz AP.
+        self.setup_ap(
+            ssid,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            wnm_features=wnm_features,
+        )
+
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        # Verify that DUT is actually associated (as seen from AP).
+        client_mac = self._get_client_mac()
+        asserts.assert_true(
+            self.access_point.sta_associated(self.access_point.wlan_2g, client_mac),
+            "DUT is not associated on the 2.4GHz band",
+        )
+
+        # Setup 5 GHz AP with same SSID.
+        self.setup_ap(
+            ssid,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            wnm_features=wnm_features,
+        )
+
+        # Construct a BTM request.
+        dest_bssid = self.access_point.get_bssid_from_ssid(
+            ssid, self.access_point.wlan_5g
+        )
+        dest_bssid_info = BssidInformation(
+            security=True, capabilities=BssidInformationCapabilities()
+        )
+        neighbor_5g_ap = NeighborReportElement(
+            dest_bssid,
+            dest_bssid_info,
+            operating_class=126,
+            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            phy_type=PhyType.VHT,
+        )
+        btm_req = BssTransitionManagementRequest(
+            disassociation_imminent=True, candidate_list=[neighbor_5g_ap]
+        )
+
+        # Send BTM request from 2.4 GHz AP to DUT
+        self.access_point.send_bss_transition_management_req(
+            self.access_point.wlan_2g, client_mac, btm_req
+        )
+
+        # Check that DUT has not roamed.
+        ROAM_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
+        while datetime.now(timezone.utc) < ROAM_DEADLINE:
+            # Fail if DUT has reassociated to 5 GHz AP (as seen from AP).
+            if self.access_point.sta_associated(self.access_point.wlan_5g, client_mac):
+                raise signals.TestFailure(
+                    "DUT unexpectedly roamed to target BSS after BTM request"
+                )
+            else:
+                time.sleep(0.25)
+
+        # DUT should have stayed associated to original AP.
+        asserts.assert_true(
+            self.access_point.sta_associated(self.access_point.wlan_2g, client_mac),
+            "DUT unexpectedly lost association on the 2.4GHz band after BTM request",
+        )
+
+    def test_btm_req_target_ap_rejects_reassoc(self):
+        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
+            raise signals.TestSkip("skipping test because BTM feature is not present")
+
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        wnm_features = frozenset(
+            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
+        )
+        # Setup 2.4 GHz AP.
+        self.setup_ap(
+            ssid,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            wnm_features=wnm_features,
+        )
+
+        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
+        # Verify that DUT is actually associated (as seen from AP).
+        client_mac = self._get_client_mac()
+        asserts.assert_true(
+            self.access_point.sta_associated(self.access_point.wlan_2g, client_mac),
+            "DUT is not associated on the 2.4GHz band",
+        )
+
+        # Setup 5 GHz AP with same SSID, but reject all STAs.
+        reject_all_sta_param = {"max_num_sta": 0}
+        self.setup_ap(
+            ssid,
+            additional_ap_parameters=reject_all_sta_param,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            wnm_features=wnm_features,
+        )
+
+        # Construct a BTM request.
+        dest_bssid = self.access_point.get_bssid_from_ssid(
+            ssid, self.access_point.wlan_5g
+        )
+        dest_bssid_info = BssidInformation(
+            security=True, capabilities=BssidInformationCapabilities()
+        )
+        neighbor_5g_ap = NeighborReportElement(
+            dest_bssid,
+            dest_bssid_info,
+            operating_class=116,
+            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            phy_type=PhyType.VHT,
+        )
+        btm_req = BssTransitionManagementRequest(
+            disassociation_imminent=True, candidate_list=[neighbor_5g_ap]
+        )
+
+        # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug.
+        # TODO(fxbug.dev/42068735) Remove when fixed, or when non-firmware BTM support is merged.
+        time.sleep(5)
+
+        # Send BTM request from 2.4 GHz AP to DUT
+        self.access_point.send_bss_transition_management_req(
+            self.access_point.wlan_2g, client_mac, btm_req
+        )
+
+        # Check that DUT has not reassociated.
+        ROAM_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
+        while datetime.now(timezone.utc) < ROAM_DEADLINE:
+            # Check that DUT has not reassociated to 5 GHz AP (as seen from AP).
+            if self.access_point.sta_associated(self.access_point.wlan_5g, client_mac):
+                raise signals.TestFailure("DUT unexpectedly roamed to 5GHz band")
+            else:
+                time.sleep(0.25)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/misc/BUILD.gn b/tests/wlan/misc/BUILD.gn
new file mode 100644
index 0000000..a0250c8
--- /dev/null
+++ b/tests/wlan/misc/BUILD.gn
@@ -0,0 +1,26 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("wlan_interface_test") {
+  main_source = "WlanInterfaceTest.py"
+  environments = display_envs
+}
+
+antlion_host_test("wlan_misc_scenario") {
+  main_source = "WlanMiscScenarioTest.py"
+  environments = display_ap_envs
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":wlan_interface_test($host_toolchain)",
+    ":wlan_misc_scenario($host_toolchain)",
+  ]
+}
diff --git a/tests/wlan/misc/WlanInterfaceTest.py b/tests/wlan/misc/WlanInterfaceTest.py
new file mode 100644
index 0000000..b2bd20f
--- /dev/null
+++ b/tests/wlan/misc/WlanInterfaceTest.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mobly import test_runner
+
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+class WlanInterfaceTest(base_test.WifiBaseTest):
+    def setup_class(self):
+        super().setup_class()
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+    def test_destroy_iface(self):
+        """Test that we don't error out when destroying the WLAN interface.
+
+        Steps:
+        1. Find a wlan interface
+        2. Destroy it
+
+        Expected Result:
+        Verify there are no errors in destroying the wlan interface.
+
+        Returns:
+          signals.TestPass if no errors
+          signals.TestFailure if there are any errors during the test.
+
+        TAGS: WLAN
+        Priority: 1
+        """
+        wlan_interfaces = self.dut.get_wlan_interface_id_list()
+        self.dut.destroy_wlan_interface(wlan_interfaces[0])
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/misc/WlanMiscScenarioTest.py b/tests/wlan/misc/WlanMiscScenarioTest.py
new file mode 100644
index 0000000..588b5b0
--- /dev/null
+++ b/tests/wlan/misc/WlanMiscScenarioTest.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+class WlanMiscScenarioTest(base_test.WifiBaseTest):
+    """Random scenario tests, usually to reproduce certain bugs, that do not
+    fit into a specific test category, but should still be run in CI to catch
+    regressions.
+    """
+
+    def setup_class(self):
+        super().setup_class()
+        self.log = logging.getLogger()
+        self.dut = self.get_dut(AssociationMode.POLICY)
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+    def teardown_class(self):
+        self.dut.disconnect()
+        self.access_point.stop_all_aps()
+
+    def teardown_test(self):
+        self.dut.disconnect()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+
+    def on_fail(self, record: TestResultRecord):
+        super().on_fail(record)
+        self.dut.disconnect()
+        self.access_point.stop_all_aps()
+
+    def test_connect_to_wpa2_after_wpa3_rejection(self):
+        """Test association to non-WPA3 network after receiving a WPA3
+        rejection, which was triggering a firmware hang.
+
+        Bug: https://bugs.fuchsia.dev/p/fuchsia/issues/detail?id=71233
+        """
+        # Setup a WPA3 network
+        wpa3_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=wpa3_ssid,
+            security=Security(
+                security_mode=SecurityMode.WPA3,
+                password=generate_random_password(SecurityMode.WPA3),
+            ),
+        )
+        # Attempt to associate with wrong password, expecting failure
+        self.log.info("Attempting to associate WPA3 with wrong password.")
+        asserts.assert_false(
+            self.dut.associate(
+                wpa3_ssid, target_pwd="wrongpass", target_security=SecurityMode.WPA3
+            ),
+            "Associated with WPA3 network using the wrong password",
+        )
+
+        self.access_point.stop_all_aps()
+
+        # Setup a WPA2 Network
+        wpa2_ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
+        wpa2_password = generate_random_password(SecurityMode.WPA2)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=wpa2_ssid,
+            security=Security(security_mode=SecurityMode.WPA2, password=wpa2_password),
+        )
+
+        # Attempt to associate, expecting success
+        self.log.info("Attempting to associate with WPA2 network.")
+        asserts.assert_true(
+            self.dut.associate(
+                wpa2_ssid, target_pwd=wpa2_password, target_security=SecurityMode.WPA2
+            ),
+            "Failed to associate with WPA2 network after a WPA3 rejection.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/performance/BUILD.gn b/tests/wlan/performance/BUILD.gn
new file mode 100644
index 0000000..123fd1c
--- /dev/null
+++ b/tests/wlan/performance/BUILD.gn
@@ -0,0 +1,88 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("channel_sweep_test") {
+  main_source = "ChannelSweepTest.py"
+  environments = display_ap_iperf_envs
+}
+
+antlion_host_test("channel_sweep_test_quick") {
+  main_source = "ChannelSweepTest.py"
+  environments = display_ap_iperf_envs
+  test_cases = [
+    "test_US_wpa2_channel_8_20mhz",
+    "test_US_wpa2_channel_40_80mhz", # non-DFS 5GHz channel
+    "test_US_wpa2_channel_100_80mhz", # DFS 5GHz channel
+    "test_US_wpa2_channel_165_20mhz",
+  ]
+}
+
+antlion_host_test("wlan_rvr_test_2g_open") {
+  main_source = "WlanRvrTest.py"
+  test_params = "rvr_settings.yaml"
+  environments = display_ap_iperf_attenuator_envs
+  test_cases = [ "test_rvr_11n_2g_20mhz_open_.*"]
+}
+
+antlion_host_test("wlan_rvr_test_2g_wpa2") {
+  main_source = "WlanRvrTest.py"
+  test_params = "rvr_settings.yaml"
+  environments = display_ap_iperf_attenuator_envs
+  test_cases = [ "test_rvr_11n_2g_20mhz_wpa2_.*"]
+}
+
+antlion_host_test("wlan_rvr_test_5g_open") {
+  main_source = "WlanRvrTest.py"
+  test_params = "rvr_settings.yaml"
+  environments = display_ap_iperf_attenuator_envs
+  test_cases = [ "test_rvr_11ac_5g_80mhz_open_.*" ]
+}
+
+antlion_host_test("wlan_rvr_test_5g_wpa2") {
+  main_source = "WlanRvrTest.py"
+  test_params = "rvr_settings.yaml"
+  environments = display_ap_iperf_attenuator_envs
+  test_cases = [ "test_rvr_11ac_5g_80mhz_wpa2_.*"]
+}
+
+antlion_host_test("wlan_wmm_test") {
+  main_source = "WmmTest.py"
+
+  # Requires a second station and custom configuration. There are no available
+  # testbeds to support this toplogy. This will remain an at-desk test until an
+  # infra-hosted testbed matching this topology is supported.
+  environments = []
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":wlan_rvr_test_2g_open($host_toolchain)",
+    ":wlan_rvr_test_2g_wpa2($host_toolchain)",
+    ":wlan_rvr_test_5g_open($host_toolchain)",
+    ":wlan_rvr_test_5g_wpa2($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    ":channel_sweep_test_quick($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    # Running ChannelSweepTest is usually only necessary when verifying new WLAN
+    # firmware patches. Take it out of automation; it takes too long otherwise.
+    ":channel_sweep_test($host_toolchain)",
+    ":wlan_wmm_test($host_toolchain)",
+  ]
+}
diff --git a/tests/wlan/performance/ChannelSweepTest.py b/tests/wlan/performance/ChannelSweepTest.py
new file mode 100644
index 0000000..04e2329
--- /dev/null
+++ b/tests/wlan/performance/ChannelSweepTest.py
@@ -0,0 +1,653 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import time
+from dataclasses import dataclass
+from pathlib import Path
+from statistics import pstdev
+
+from honeydew.typing.wlan import CountryCode
+from mobly import asserts, signals, test_runner
+from mobly.config_parser import TestRunConfig
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.regulatory_channels import COUNTRY_CHANNELS
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.iperf_client import IPerfClientOverAdb, IPerfClientOverSsh
+from antlion.controllers.iperf_server import IPerfResult, IPerfServerOverSsh
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+DEFAULT_MIN_THROUGHPUT = 0.0
+DEFAULT_MAX_STD_DEV = 1.0
+DEFAULT_IPERF_TIMEOUT = 30
+
+DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR = 30
+GRAPH_CIRCLE_SIZE = 10
+MAX_2_4_CHANNEL = 14
+TIME_TO_SLEEP_BETWEEN_RETRIES = 1
+WEP_HEX_STRING_LENGTH = 10
+
+MEGABITS_PER_SECOND = "Mbps"
+
+
+@dataclass
+class TestParams:
+    country_code: str
+    """Country code for the DUT to set before running the test."""
+
+    security_mode: SecurityMode
+    """Security type of the network to create. None represents an open network."""
+
+    channel: int
+    """Channel for the AP to broadcast on"""
+
+    channel_bandwidth: int
+    """Channel bandwidth in MHz for the AP to broadcast with"""
+
+    expect_min_rx_throughput_mbps: float = DEFAULT_MIN_THROUGHPUT
+    """Expected minimum receive throughput in Mb/s"""
+
+    expect_min_tx_throughput_mbps: float = DEFAULT_MIN_THROUGHPUT
+    """Expected minimum transmit throughput in Mb/s"""
+
+    # TODO: Use this value
+    expect_max_std_dev: float = DEFAULT_MAX_STD_DEV
+    """Expected maximum standard deviation of throughput in Mb/s"""
+
+
+@dataclass(frozen=True)
+class ThroughputKey:
+    country_code: str
+    security_mode: SecurityMode
+    channel_bandwidth: int
+
+    @staticmethod
+    def from_test(test: TestParams) -> "ThroughputKey":
+        return ThroughputKey(
+            country_code=test.country_code,
+            security_mode=test.security_mode,
+            channel_bandwidth=test.channel_bandwidth,
+        )
+
+
+@dataclass
+class ThroughputValue:
+    channel: int
+    tx_throughput_mbps: float | None
+    rx_throughput_mbps: float | None
+
+
+ChannelThroughputMap = dict[ThroughputKey, list[ThroughputValue]]
+
+
+class ChannelSweepTest(base_test.WifiBaseTest):
+    """Tests channel performance.
+
+    Testbed Requirement:
+    * 1 x Fuchsia device (dut)
+    * 1 x access point
+    * 1 x Linux Machine used as IPerfServer
+
+    Note: Performance tests should be done in isolated testbed.
+    """
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.channel_throughput: ChannelThroughputMap = {}
+
+    def pre_run(self):
+        tests: list[tuple[TestParams]] = []
+
+        def generate_test_name(test: TestParams):
+            return f"test_{test.country_code}_{test.security_mode}_channel_{test.channel}_{test.channel_bandwidth}mhz"
+
+        def test_params(test_name):
+            return self.user_params.get("channel_sweep_test_params", {}).get(
+                test_name, {}
+            )
+
+        for country_channels in [COUNTRY_CHANNELS["United States of America"]]:
+            for security_mode in [
+                SecurityMode.OPEN,
+                SecurityMode.WEP,
+                SecurityMode.WPA,
+                SecurityMode.WPA2,
+                SecurityMode.WPA_WPA2,
+                SecurityMode.WPA3,
+            ]:
+                for channel, bandwidths in country_channels.allowed_channels.items():
+                    for bandwidth in bandwidths:
+                        test = TestParams(
+                            country_code=country_channels.country_code,
+                            security_mode=security_mode,
+                            channel=channel,
+                            channel_bandwidth=bandwidth,
+                        )
+                        name = generate_test_name(test)
+                        test.expect_min_rx_throughput_mbps = test_params(name).get(
+                            "min_rx_throughput", DEFAULT_MIN_THROUGHPUT
+                        )
+                        test.expect_min_tx_throughput_mbps = test_params(name).get(
+                            "min_tx_throughput", DEFAULT_MIN_THROUGHPUT
+                        )
+                        test.expect_max_std_dev = test_params(name).get(
+                            "max_std_dev", DEFAULT_MAX_STD_DEV
+                        )
+                        tests.append((test,))
+
+        self.generate_tests(self.run_channel_performance, generate_test_name, tests)
+
+    def get_existing_test_names(self) -> list[str]:
+        test_names: list[str] = super().get_existing_test_names()
+        # Verify standard deviation last since it depends on the throughput results from
+        # all other tests.
+        test_names.sort(key=lambda n: n == "test_standard_deviation")
+        return test_names
+
+    def setup_class(self):
+        super().setup_class()
+        self.log = logging.getLogger()
+
+        self.time_to_wait_for_ip_addr = self.user_params.get(
+            "channel_sweep_test_params", {}
+        ).get("time_to_wait_for_ip_addr", DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR)
+
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+        self.access_point.stop_all_aps()
+
+        if len(self.iperf_servers) == 0:
+            raise signals.TestAbortClass("Requires at least one iperf server")
+        self.iperf_server = self.iperf_servers[0]
+        self.iperf_server.start()
+
+        if len(self.iperf_clients) > 0:
+            self.iperf_client = self.iperf_clients[0]
+        else:
+            self.iperf_client = self.dut.create_iperf_client()
+
+    def teardown_class(self):
+        self.write_graph()
+        super().teardown_class()
+
+    def setup_test(self):
+        super().setup_test()
+        # TODO(fxb/46417): Uncomment when wlanClearCountry is implemented up any
+        # country code changes.
+        # for fd in self.fuchsia_devices:
+        #     phy_ids_response = fd.wlan_lib.wlanPhyIdList()
+        #     if phy_ids_response.get('error'):
+        #         raise ConnectionError(
+        #             'Failed to retrieve phy ids from FuchsiaDevice (%s). '
+        #             'Error: %s' % (fd.ip, phy_ids_response['error']))
+        #     for id in phy_ids_response['result']:
+        #         clear_country_response = fd.wlan_lib.wlanClearCountry(id)
+        #         if clear_country_response.get('error'):
+        #             raise EnvironmentError(
+        #                 'Failed to reset country code on FuchsiaDevice (%s). '
+        #                 'Error: %s' % (fd.ip, clear_country_response['error'])
+        #                 )
+        self.access_point.stop_all_aps()
+        for ad in self.android_devices:
+            ad.droid.wakeLockAcquireBright()
+            ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+        self.dut.disconnect()
+
+    def teardown_test(self):
+        for ad in self.android_devices:
+            ad.droid.wakeLockRelease()
+            ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_test()
+
+    def setup_ap(
+        self,
+        channel: int,
+        channel_bandwidth: int,
+        security_profile: Security,
+    ) -> str:
+        """Start network on AP with basic configuration.
+
+        Args:
+            channel: channel to use for network
+            channel_bandwidth: channel bandwidth in mhz to use for network,
+            security_profile: security type to use or None if open
+
+        Returns:
+            SSID of the newly created and running network
+
+        Raises:
+            ConnectionError if network is not started successfully.
+        """
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        try:
+            setup_ap(
+                access_point=self.access_point,
+                profile_name="whirlwind",
+                channel=channel,
+                security=security_profile,
+                force_wmm=True,
+                ssid=ssid,
+                vht_bandwidth=channel_bandwidth,
+                setup_bridge=True,
+            )
+            self.log.info(
+                f"Network (ssid: {ssid}) up on channel {channel} "
+                f"w/ channel bandwidth {channel_bandwidth} MHz"
+            )
+            return ssid
+        except Exception as err:
+            raise ConnectionError(
+                f"Failed to setup ap on channel: {channel}, "
+                f"channel bandwidth: {channel_bandwidth} MHz. "
+            ) from err
+
+    def get_and_verify_iperf_address(
+        self, channel: int, device: FuchsiaDevice | IPerfServerOverSsh, interface: str
+    ) -> str:
+        """Get ip address from a devices interface and verify it belongs to
+        expected subnet based on APs DHCP config.
+
+        Args:
+            channel: channel network is running on, to determine subnet
+            device: device to get ip address for
+            interface: interface on device to get ip address. If None, uses
+                device.test_interface.
+
+        Returns:
+            IP address of device on given interface (or test_interface)
+
+        Raises:
+            ConnectionError, if device does not have a valid ip address after
+                all retries.
+        """
+        if channel <= MAX_2_4_CHANNEL:
+            subnet = self.access_point._AP_2G_SUBNET_STR
+        else:
+            subnet = self.access_point._AP_5G_SUBNET_STR
+        end_time = time.time() + self.time_to_wait_for_ip_addr
+        while time.time() < end_time:
+            device_addresses = device.get_interface_ip_addresses(interface)
+            if device_addresses["ipv4_private"]:
+                for ip_addr in device_addresses["ipv4_private"]:
+                    if utils.ip_in_subnet(ip_addr, subnet):
+                        return ip_addr
+                    else:
+                        self.log.debug(
+                            f"Device has an ip address ({ip_addr}), but it is not in subnet {subnet}"
+                        )
+            else:
+                self.log.debug("Device does not have a valid ip address. Retrying.")
+            time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
+        raise ConnectionError("Device failed to get an ip address.")
+
+    def get_iperf_throughput(
+        self,
+        iperf_server_address: str,
+        iperf_client_address: str,
+        reverse: bool = False,
+    ) -> float:
+        """Run iperf between client and server and get the throughput.
+
+        Args:
+            iperf_server_address: IP address of running iperf server
+            iperf_client_address: IP address of iperf client (dut)
+            reverse: If True, run traffic in reverse direction, from server to client.
+
+        Returns:
+            iperf throughput or 0 if iperf fails
+        """
+        if reverse:
+            self.log.info(
+                f"Running IPerf traffic from server ({iperf_server_address}) to "
+                f"dut ({iperf_client_address})."
+            )
+            iperf_results_file = self.iperf_client.start(
+                iperf_server_address,
+                "-i 1 -t 10 -R -J",
+                "channel_sweep_rx",
+                timeout=DEFAULT_IPERF_TIMEOUT,
+            )
+        else:
+            self.log.info(
+                f"Running IPerf traffic from dut ({iperf_client_address}) to "
+                f"server ({iperf_server_address})."
+            )
+            iperf_results_file = self.iperf_client.start(
+                iperf_server_address,
+                "-i 1 -t 10 -J",
+                "channel_sweep_tx",
+                timeout=DEFAULT_IPERF_TIMEOUT,
+            )
+        if iperf_results_file:
+            iperf_results = IPerfResult(
+                iperf_results_file, reporting_speed_units=MEGABITS_PER_SECOND
+            )
+            return iperf_results.avg_send_rate or 0.0
+        return 0.0
+
+    def log_to_file_and_throughput_data(
+        self,
+        test: TestParams,
+        tx_throughput: float | None,
+        rx_throughput: float | None,
+    ):
+        """Write performance info to csv file and to throughput data.
+
+        Args:
+            channel: int, channel that test was run on
+            channel_bandwidth: int, channel bandwidth the test used
+            tx_throughput: float, throughput value from dut to iperf server
+            rx_throughput: float, throughput value from iperf server to dut
+        """
+        test_name = self.current_test_info.name
+        log_file = Path(os.path.join(self.log_path, "throughput.csv"))
+        self.log.info(f"Writing IPerf results for {test_name} to {log_file}")
+
+        if not log_file.is_file():
+            with open(log_file, "x") as csv_file:
+                csv_file.write(
+                    "country code,security,channel,channel bandwidth,tx throughput,rx throughput\n"
+                )
+
+        with open(log_file, "a") as csv_file:
+            csv_file.write(
+                f"{test.country_code},{test.security_mode},{test.channel},{test.channel_bandwidth},{tx_throughput},{rx_throughput}\n"
+            )
+
+        key = ThroughputKey.from_test(test)
+        if key not in self.channel_throughput:
+            self.channel_throughput[key] = []
+
+        self.channel_throughput[key].append(
+            ThroughputValue(
+                channel=test.channel,
+                tx_throughput_mbps=tx_throughput,
+                rx_throughput_mbps=rx_throughput,
+            )
+        )
+
+    def write_graph(self):
+        """Create graph html files from throughput data, plotting channel vs
+        tx_throughput and channel vs rx_throughput.
+        """
+        # If performance measurement is skipped
+        if not hasattr(self, "iperf_server") or not self.iperf_server:
+            return
+
+        try:
+            from bokeh.plotting import (  # type: ignore
+                ColumnDataSource,
+                figure,
+                output_file,
+                save,
+            )
+        except ImportError:
+            self.log.warn(
+                "bokeh is not installed: skipping creation of graphs. "
+                "Note CSV files are still available. If graphs are "
+                'desired, install antlion with the "bokeh" feature.'
+            )
+            return
+
+        for key in self.channel_throughput.keys():
+            output_file_name = os.path.join(
+                self.log_path,
+                f"channel_throughput_{key.country_code}_{key.security_mode}_{key.channel_bandwidth}mhz.html",
+            )
+            output_file(output_file_name)
+            channels = []
+            tx_throughputs = []
+            rx_throughputs = []
+
+            for throughput in sorted(
+                self.channel_throughput[key], key=lambda t: t.channel
+            ):
+                channels.append(str(throughput.channel))
+                tx_throughputs.append(throughput.tx_throughput_mbps)
+                rx_throughputs.append(throughput.rx_throughput_mbps)
+
+            channel_vs_throughput_data = ColumnDataSource(
+                data=dict(
+                    channels=channels,
+                    tx_throughput=tx_throughputs,
+                    rx_throughput=rx_throughputs,
+                )
+            )
+            TOOLTIPS = [
+                ("Channel", "@channels"),
+                ("TX_Throughput", "@tx_throughput"),
+                ("RX_Throughput", "@rx_throughput"),
+            ]
+            channel_vs_throughput_graph = figure(
+                title="Channels vs. Throughput",
+                x_axis_label="Channels",
+                x_range=channels,
+                y_axis_label="Throughput",
+                tooltips=TOOLTIPS,
+            )
+            channel_vs_throughput_graph.sizing_mode = "stretch_both"
+            channel_vs_throughput_graph.title.align = "center"
+            channel_vs_throughput_graph.line(
+                "channels",
+                "tx_throughput",
+                source=channel_vs_throughput_data,
+                line_width=2,
+                line_color="blue",
+                legend_label="TX_Throughput",
+            )
+            channel_vs_throughput_graph.circle(
+                "channels",
+                "tx_throughput",
+                source=channel_vs_throughput_data,
+                size=GRAPH_CIRCLE_SIZE,
+                color="blue",
+            )
+            channel_vs_throughput_graph.line(
+                "channels",
+                "rx_throughput",
+                source=channel_vs_throughput_data,
+                line_width=2,
+                line_color="red",
+                legend_label="RX_Throughput",
+            )
+            channel_vs_throughput_graph.circle(
+                "channels",
+                "rx_throughput",
+                source=channel_vs_throughput_data,
+                size=GRAPH_CIRCLE_SIZE,
+                color="red",
+            )
+
+            channel_vs_throughput_graph.legend.location = "top_left"
+            graph_file = save([channel_vs_throughput_graph])
+            self.log.info(f"Saved graph to {graph_file}")
+
+    def test_standard_deviation(self):
+        """Verify throughputs don't deviate too much across channels.
+
+        Assert the throughput standard deviation across all channels of the same
+        country, security, and bandwidth does not exceed the maximum specified in the
+        user param config. If no maximum is set, a default of 1.0 standard deviations
+        will be used (34.1% from the mean).
+
+        Raises:
+            TestFailure, if standard deviation of throughput exceeds max_std_dev
+        """
+        # If performance measurement is skipped
+        if not self.iperf_server:
+            return
+
+        max_std_dev = self.user_params.get("channel_sweep_test_params", {}).get(
+            "max_std_dev", DEFAULT_MAX_STD_DEV
+        )
+
+        self.log.info(
+            "Verifying standard deviation across channels does not exceed max standard "
+            f"deviation of {max_std_dev} Mb/s"
+        )
+
+        errors: list[str] = []
+
+        for test, throughputs in self.channel_throughput.items():
+            tx_values = []
+            rx_values = []
+            for throughput in throughputs:
+                if throughput.tx_throughput_mbps is not None:
+                    tx_values.append(throughput.tx_throughput_mbps)
+                if throughput.rx_throughput_mbps is not None:
+                    rx_values.append(throughput.rx_throughput_mbps)
+
+            tx_std_dev = pstdev(tx_values)
+            rx_std_dev = pstdev(rx_values)
+
+            if tx_std_dev > max_std_dev:
+                errors.append(
+                    f"[{test.country_code} {test.security_mode} "
+                    f"{test.channel_bandwidth}mhz] TX throughput standard deviation "
+                    f"{tx_std_dev} Mb/s exceeds expected max of {max_std_dev} Mb/s"
+                )
+            if rx_std_dev > max_std_dev:
+                errors.append(
+                    f"[{test.country_code} {test.security_mode} "
+                    f"{test.channel_bandwidth}mhz] RX throughput standard deviation "
+                    f"{rx_std_dev} Mb/s exceeds expected max of {max_std_dev} Mb/s"
+                )
+
+        if errors:
+            error_message = "\n - ".join(errors)
+            asserts.fail(
+                f"Failed to meet standard deviation expectations:\n - {error_message}"
+            )
+
+    def run_channel_performance(self, test: TestParams):
+        """Run a single channel performance test
+
+        Log results to csv file and throughput data.
+
+        1. Sets up network with test settings
+        2. Associates DUT
+        3. Runs traffic between DUT and iperf server (both directions)
+        4. Logs channel, tx_throughput (Mb/s), and rx_throughput (Mb/s) to
+           log file and throughput data.
+        5. Checks throughput values against minimum throughput thresholds.
+
+        Raises:
+            TestFailure, if throughput (either direction) is less than
+                the directions given minimum throughput threshold.
+        """
+        self.fuchsia_device.wlan_controller.set_country_code(
+            CountryCode(test.country_code)
+        )
+
+        if test.security_mode is not SecurityMode.OPEN:
+            if test.security_mode is SecurityMode.WEP:
+                password = utils.rand_hex_str(WEP_HEX_STRING_LENGTH)
+            else:
+                password = utils.rand_ascii_str(hostapd_constants.MIN_WPA_PSK_LENGTH)
+            security_profile = Security(
+                security_mode=test.security_mode, password=password
+            )
+            target_security = test.security_mode
+        else:
+            password = None
+            security_profile = Security()
+
+        ssid = self.setup_ap(test.channel, test.channel_bandwidth, security_profile)
+        associated = self.dut.associate(
+            ssid, target_pwd=password, target_security=target_security
+        )
+        if not associated:
+            if self.iperf_server:
+                self.log_to_file_and_throughput_data(test, None, None)
+            asserts.fail(f"Device failed to associate to network {ssid}")
+        self.log.info(f"DUT ({self.dut.identifier}) connected to network {ssid}.")
+        if self.iperf_server:
+            self.iperf_server.renew_test_interface_ip_address()
+            if not isinstance(self.iperf_server.test_interface, str):
+                raise TypeError(
+                    "For this test, iperf_server is required to specify the "
+                    "test_interface configuration option"
+                )
+
+            self.log.info(
+                "Getting ip address for iperf server. Will retry for "
+                f"{self.time_to_wait_for_ip_addr} seconds."
+            )
+            iperf_server_address = self.get_and_verify_iperf_address(
+                test.channel, self.iperf_server, self.iperf_server.test_interface
+            )
+            self.log.info(
+                "Getting ip address for DUT. Will retry for "
+                f"{self.time_to_wait_for_ip_addr} seconds."
+            )
+
+            if not isinstance(
+                self.iperf_client, (IPerfClientOverSsh, IPerfClientOverAdb)
+            ):
+                raise TypeError(
+                    f'Unknown iperf_client type "{type(self.iperf_client)}"'
+                )
+            if not isinstance(self.iperf_client.test_interface, str):
+                raise TypeError(
+                    "For this test, iperf_client is required to specify the "
+                    "test_interface configuration option"
+                )
+            iperf_client_address = self.get_and_verify_iperf_address(
+                test.channel, self.fuchsia_device, self.iperf_client.test_interface
+            )
+            tx_throughput = self.get_iperf_throughput(
+                iperf_server_address, iperf_client_address
+            )
+            rx_throughput = self.get_iperf_throughput(
+                iperf_server_address, iperf_client_address, reverse=True
+            )
+            self.log_to_file_and_throughput_data(test, tx_throughput, rx_throughput)
+            self.log.info(
+                f"Throughput (tx, rx): ({tx_throughput} Mb/s, {rx_throughput} Mb/s), "
+                "Minimum threshold (tx, rx): "
+                f"({test.expect_min_tx_throughput_mbps} Mb/s, "
+                f"{test.expect_min_rx_throughput_mbps} Mb/s)"
+            )
+            asserts.assert_greater(
+                tx_throughput,
+                test.expect_min_tx_throughput_mbps,
+                "tx throughput below the minimal threshold",
+            )
+            asserts.assert_greater(
+                rx_throughput,
+                test.expect_min_rx_throughput_mbps,
+                "rx throughput below the minimal threshold",
+            )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/performance/WlanRvrTest.py b/tests/wlan/performance/WlanRvrTest.py
new file mode 100644
index 0000000..d864ac5
--- /dev/null
+++ b/tests/wlan/performance/WlanRvrTest.py
@@ -0,0 +1,742 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import itertools
+import logging
+import os
+import time
+from dataclasses import dataclass
+from enum import StrEnum, auto, unique
+
+from mobly import asserts, signals, test_runner
+from mobly.config_parser import TestRunConfig
+from mobly.records import TestResultRecord
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib.hostapd_constants import BandType
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.radvd import Radvd
+from antlion.controllers.ap_lib.radvd_config import RadvdConfig
+from antlion.controllers.attenuator import Attenuator, get_attenuators_for_device
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.iperf_server import IPerfResult, IPerfServerOverSsh
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str
+from antlion.validation import MapValidator
+
+AP_11ABG_PROFILE_NAME = "whirlwind_11ag_legacy"
+REPORTING_SPEED_UNITS = "Mbps"
+
+RVR_GRAPH_SUMMARY_FILE = "rvr_summary.html"
+
+DAD_TIMEOUT_SEC = 30
+
+
+@unique
+class TrafficDirection(StrEnum):
+    RX = auto()
+    TX = auto()
+
+
+@unique
+class IPVersion(StrEnum):
+    V4 = "ipv4"
+    V6 = "ipv6"
+
+
+@dataclass
+class GraphData:
+    relative_attn: list[str]
+    throughput: list[int]
+    x_label: str
+    y_label: str
+
+
+@dataclass
+class TestParams:
+    band: BandType
+    security: Security
+    ip_version: IPVersion
+    direction: TrafficDirection
+
+
+def create_rvr_graph(
+    test_name: str, graph_path: str, graph_data: GraphData
+) -> list[object]:
+    """Creates the RvR graphs
+    Args:
+        test_name: The name of test that was run.  This is the title of the
+            graph
+        graph_path: Where to put the graph html file.
+        graph_data: A dictionary of the data to be graphed.
+    Returns:
+        A list of bokeh graph objects.
+    """
+    try:
+        from bokeh.plotting import (  # type: ignore
+            ColumnDataSource,
+            figure,
+            output_file,
+            save,
+        )
+    except ImportError:
+        logging.warn(
+            "bokeh is not installed: skipping creation of graphs. "
+            "Note CSV files are still available. If graphs are "
+            'desired, install antlion with the "bokeh" feature.'
+        )
+        return []
+
+    output_file(f"{graph_path}rvr_throughput_vs_attn_{test_name}.html", title=test_name)
+    throughput_vs_attn_data = ColumnDataSource(
+        data={
+            "relative_attn": graph_data.relative_attn,
+            "throughput": graph_data.throughput,
+        }
+    )
+    TOOLTIPS = [("Attenuation", "@relative_attn"), ("Throughput", "@throughput")]
+
+    throughput_vs_attn_graph = figure(
+        title=f"Throughput vs Relative Attenuation (Test Case: {test_name})",
+        x_axis_label=graph_data.x_label,
+        y_axis_label=graph_data.y_label,
+        x_range=graph_data.relative_attn,
+        tooltips=TOOLTIPS,
+    )
+    throughput_vs_attn_graph.sizing_mode = "stretch_width"
+    throughput_vs_attn_graph.title.align = "center"
+    throughput_vs_attn_graph.line(
+        "relative_attn", "throughput", source=throughput_vs_attn_data, line_width=2
+    )
+    throughput_vs_attn_graph.circle(
+        "relative_attn", "throughput", source=throughput_vs_attn_data, size=10
+    )
+    save([throughput_vs_attn_graph])
+    return [throughput_vs_attn_graph]
+
+
+def write_csv_rvr_data(test_name: str, csv_path: str, graph_data: GraphData) -> None:
+    """Writes the CSV data for the RvR test
+    Args:
+        test_name: The name of test that was run.
+        csv_path: Where to put the csv file.
+        csv_data: A dictionary of the data to be put in the csv file.
+    """
+    csv_file_name = f"{csv_path}rvr_throughput_vs_attn_{test_name}.csv"
+    throughput = graph_data.throughput
+    relative_attn = graph_data.relative_attn
+    with open(csv_file_name, "w+") as csv_fileId:
+        csv_fileId.write(f"{graph_data.x_label},{graph_data.y_label}\n")
+        for csv_loop_counter in range(0, len(relative_attn)):
+            csv_fileId.write(
+                f"{int(relative_attn[csv_loop_counter])},{throughput[csv_loop_counter]}\n"
+            )
+
+
+class WlanRvrTest(base_test.WifiBaseTest):
+    """Tests running WLAN RvR.
+
+    Test Bed Requirement:
+    * One Android device or Fuchsia device
+    * One Access Point
+    * One attenuator
+    * One Linux iPerf Server
+    """
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.rvr_graph_summary: list[object] = []
+
+    def pre_run(self) -> None:
+        test_params: list[TestParams] = []
+
+        for (
+            band,
+            security_mode,
+            ip_version,
+            direction,
+        ) in itertools.product(
+            [e for e in BandType],
+            [SecurityMode.OPEN, SecurityMode.WPA2],
+            [e for e in IPVersion],
+            [e for e in TrafficDirection],
+        ):
+            password: str | None = None
+            if security_mode is not SecurityMode.OPEN:
+                password = rand_ascii_str(20)
+            security = Security(security_mode, password)
+            test_params.append(
+                TestParams(
+                    band,
+                    security,
+                    ip_version,
+                    direction,
+                )
+            )
+
+        def generate_test_name(t: TestParams) -> str:
+            # TODO(http://b/303659781): Keep mode in sync with hostapd.
+            mode = "11n" if t.band is BandType.BAND_2G else "11ac"
+            frequency = "20mhz" if t.band is BandType.BAND_2G else "80mhz"
+            return (
+                f"test_rvr_{mode}_{t.band}_{frequency}_{t.security}_"
+                f"{t.direction}_{t.ip_version}"
+            )
+
+        self.generate_tests(
+            self._test_rvr, generate_test_name, [(p,) for p in test_params]
+        )
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        params = MapValidator(self.user_params["rvr_settings"])
+        self.starting_attn = params.get(int, "starting_attn", 0)
+        self.ending_attn = params.get(int, "ending_attn", 95)
+        self.step_size_in_db = params.get(int, "step_size_in_db", 1)
+        self.dwell_time_in_secs = params.get(int, "dwell_time_in_secs", 10)
+
+        self.reverse_rvr_after_forward = params.get(
+            bool, "reverse_rvr_after_forward", False
+        )
+        self.iperf_flags = params.get(str, "iperf_flags", "-i 1")
+        self.iperf_flags += f" -t {self.dwell_time_in_secs} -J"
+        self.debug_loop_count = params.get(int, "debug_loop_count", 1)
+
+        self.router_adv_daemon: Radvd | None = None
+
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
+        self.access_point = self.access_points[0]
+
+        self.attenuators_2g = get_attenuators_for_device(
+            self.controller_configs["AccessPoint"][0]["Attenuator"],
+            self.attenuators,
+            "attenuator_ports_wifi_2g",
+        )
+        self.attenuators_5g = get_attenuators_for_device(
+            self.controller_configs["AccessPoint"][0]["Attenuator"],
+            self.attenuators,
+            "attenuator_ports_wifi_5g",
+        )
+
+        self.iperf_server = self.iperf_servers[0]
+
+        if hasattr(self, "iperf_clients") and self.iperf_clients:
+            self.dut_iperf_client = self.iperf_clients[0]
+        else:
+            self.dut_iperf_client = self.dut.create_iperf_client()
+
+        self.access_point.stop_all_aps()
+
+    def setup_test(self) -> None:
+        super().setup_test()
+        if self.iperf_server:
+            self.iperf_server.start()
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockAcquireBright()
+                ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self) -> None:
+        self.cleanup_tests()
+        super().teardown_test()
+
+    def teardown_class(self) -> None:
+        if self.router_adv_daemon:
+            self.router_adv_daemon.stop()
+        try:
+            from bokeh.plotting import output_file, save  # type: ignore
+
+            output_file(f"{self.log_path}/rvr_summary.html", title="RvR Summary")
+            save(list(self.rvr_graph_summary))
+        except ImportError:
+            logging.warn(
+                "bokeh is not installed: skipping creation of graphs. "
+                "Note CSV files are still available. If graphs are "
+                'desired, install antlion with the "bokeh" feature.'
+            )
+        except Exception as e:
+            self.log.error(f"Unable to generate RvR summary file: {e}")
+
+        super().teardown_class()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        super().on_fail(record)
+        self.cleanup_tests()
+
+    def cleanup_tests(self) -> None:
+        """Cleans up all the dangling pieces of the tests, for example, the
+        iperf server, radvd, all the currently running APs, and the various
+        clients running during the tests.
+        """
+
+        if self.router_adv_daemon:
+            full_output_path = os.path.join(self.log_path, "radvd_log.txt")
+            with open(full_output_path, "w") as file:
+                file.write(self.router_adv_daemon.pull_logs())
+            self.router_adv_daemon.stop()
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockRelease()
+                ad.droid.goToSleepNow()
+        if self.iperf_server:
+            self.iperf_server.stop()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+
+    def _wait_for_ipv4_addrs(self) -> str:
+        """Wait for an IPv4 addresses to become available on the DUT and iperf
+        server.
+
+        Returns:
+           The private IPv4 address of the iperf server.
+
+        Raises:
+            TestFailure: If unable to acquire a IPv4 address.
+        """
+        ip_address_checker_counter = 0
+        ip_address_checker_max_attempts = 3
+        while ip_address_checker_counter < ip_address_checker_max_attempts:
+            self.iperf_server.renew_test_interface_ip_address()
+            iperf_server_ip_addresses = self.iperf_server.get_interface_ip_addresses(
+                self.iperf_server.test_interface
+            )
+            assert self.dut_iperf_client.test_interface is not None
+            dut_ip_addresses = self.fuchsia_device.get_interface_ip_addresses(
+                self.dut_iperf_client.test_interface
+            )
+
+            self.log.info(f"IPerf server IP info: {iperf_server_ip_addresses}")
+            self.log.info(f"DUT IP info: {dut_ip_addresses}")
+
+            if not iperf_server_ip_addresses["ipv4_private"]:
+                self.log.warn(
+                    "Unable to get the iperf server IPv4 " "address. Retrying..."
+                )
+                ip_address_checker_counter += 1
+                time.sleep(1)
+                continue
+
+            if dut_ip_addresses["ipv4_private"]:
+                return iperf_server_ip_addresses["ipv4_private"][0]
+
+            self.log.warn(
+                "Unable to get the DUT IPv4 address starting at "
+                f'attenuation "{self.starting_attn}". Retrying...'
+            )
+            ip_address_checker_counter += 1
+            time.sleep(1)
+
+        raise signals.TestFailure(
+            "IPv4 addresses are not available on both the DUT and iperf server."
+        )
+
+    # TODO (b/258264565): Merge with fuchsia_device wait_for_ipv6_addr.
+    def _wait_for_dad(
+        self, device: FuchsiaDevice | IPerfServerOverSsh, test_interface: str
+    ) -> str:
+        """Wait for Duplicate Address Detection to resolve so that an
+        private-local IPv6 address is available for test.
+
+        Args:
+            device: implementor of get_interface_ip_addresses
+            test_interface: name of interface that DAD is operating on
+
+        Returns:
+            A string containing the private-local IPv6 address of the device.
+
+        Raises:
+            TestFailure: If unable to acquire an IPv6 address.
+        """
+        now = time.time()
+        start = now
+        elapsed = now - start
+
+        while elapsed < DAD_TIMEOUT_SEC:
+            addrs = device.get_interface_ip_addresses(test_interface)
+            now = time.time()
+            elapsed = now - start
+            if addrs["ipv6_private_local"]:
+                # DAD has completed
+                addr = addrs["ipv6_private_local"][0]
+                self.log.info(f'DAD resolved with "{addr}" after {elapsed}s')
+                return addr
+            time.sleep(1)
+
+        raise signals.TestFailure(
+            "Unable to acquire a private-local IPv6 address for testing "
+            f"after {elapsed}s"
+        )
+
+    def run_rvr(
+        self,
+        ssid: str,
+        security: Security | None,
+        band: BandType,
+        traffic_dir: TrafficDirection,
+        ip_version: IPVersion,
+    ) -> GraphData:
+        """Setups and runs the RvR test
+
+        Args:
+            ssid: The SSID for the client to associate to.
+            security: Security of the AP
+            band: 2g or 5g
+            traffic_dir: rx or tx, bi is not supported by iperf3
+            ip_version: 4 or 6
+
+        Returns:
+            The bokeh graph data.
+        """
+        throughput: list[int] = []
+        relative_attn: list[str] = []
+        if band == "2g":
+            rvr_attenuators = self.attenuators_2g
+        elif band == "5g":
+            rvr_attenuators = self.attenuators_5g
+        else:
+            raise ValueError(f"Invalid WLAN band specified: {band}")
+        if ip_version is IPVersion.V6:
+            self.router_adv_daemon = Radvd(
+                self.access_point.ssh,
+                self.access_point.interfaces.get_bridge_interface()[0],
+            )
+            radvd_config = RadvdConfig()
+            self.router_adv_daemon.start(radvd_config)
+
+        for _ in range(0, self.debug_loop_count):
+            for rvr_attenuator in rvr_attenuators:
+                rvr_attenuator.set_atten(self.starting_attn)
+
+            associate_counter = 0
+            associate_max_attempts = 3
+            while associate_counter < associate_max_attempts:
+                if self.dut.associate(
+                    ssid,
+                    target_pwd=security.password if security else None,
+                    target_security=(
+                        security.security_mode if security else SecurityMode.OPEN
+                    ),
+                    check_connectivity=False,
+                ):
+                    break
+                else:
+                    associate_counter += 1
+            else:
+                asserts.fail(
+                    f"Unable to associate at starting attenuation: {self.starting_attn}"
+                )
+
+            if ip_version is IPVersion.V4:
+                iperf_server_ip_address = self._wait_for_ipv4_addrs()
+            elif ip_version is IPVersion.V6:
+                self.iperf_server.renew_test_interface_ip_address()
+                self.log.info(
+                    "Waiting for iperf server to complete Duplicate "
+                    "Address Detection..."
+                )
+                iperf_server_ip_address = self._wait_for_dad(
+                    self.iperf_server, self.iperf_server.test_interface
+                )
+
+                self.log.info(
+                    "Waiting for DUT to complete Duplicate Address Detection "
+                    f'on interface "{self.dut_iperf_client.test_interface}"...'
+                )
+                assert self.dut_iperf_client.test_interface is not None
+                _ = self._wait_for_dad(
+                    self.fuchsia_device, self.dut_iperf_client.test_interface
+                )
+            else:
+                raise ValueError(f"Invalid IP version: {ip_version}")
+
+            throughput, relative_attn = self.rvr_loop(
+                traffic_dir,
+                rvr_attenuators,
+                iperf_server_ip_address,
+                ip_version,
+                ssid,
+                security=security,
+                reverse=False,
+                throughput=throughput,
+                relative_attn=relative_attn,
+            )
+            if self.reverse_rvr_after_forward:
+                throughput, relative_attn = self.rvr_loop(
+                    traffic_dir,
+                    rvr_attenuators,
+                    iperf_server_ip_address,
+                    ip_version,
+                    ssid=ssid,
+                    security=security,
+                    reverse=True,
+                    throughput=throughput,
+                    relative_attn=relative_attn,
+                )
+            self.dut.disconnect()
+
+        return GraphData(
+            relative_attn=relative_attn,
+            throughput=throughput,
+            x_label="Attenuation(db)",
+            y_label=f"Throughput({REPORTING_SPEED_UNITS})",
+        )
+
+    def rvr_loop(
+        self,
+        traffic_dir: TrafficDirection,
+        rvr_attenuators: list[Attenuator],
+        iperf_server_ip_address: str,
+        ip_version: IPVersion,
+        ssid: str,
+        security: Security | None,
+        reverse: bool,
+        throughput: list[int],
+        relative_attn: list[str],
+    ) -> tuple[list[int], list[str]]:
+        """The loop that goes through each attenuation level and runs the iperf
+        throughput pair.
+        Args:
+            traffic_dir: The traffic direction from the perspective of the DUT.
+            rvr_attenuators: A list of attenuators to set.
+            iperf_server_ip_address: The IP address of the iperf server.
+            ssid: The ssid of the wireless network that the should associated
+                to.
+            password: Password of the wireless network.
+            reverse: Whether to run RvR test starting from the highest
+                attenuation and going to the lowest.  This is run after the
+                normal low attenuation to high attenuation RvR test.
+            throughput: The list of throughput data for the test.
+            relative_attn: The list of attenuation data for the test.
+
+        Returns:
+            throughput: The list of throughput data for the test.
+            relative_attn: The list of attenuation data for the test.
+        """
+        iperf_flags = self.iperf_flags
+        if traffic_dir is TrafficDirection.RX:
+            iperf_flags = f"{self.iperf_flags} -R"
+        starting_attn = self.starting_attn
+        ending_attn = self.ending_attn
+        step_size_in_db = self.step_size_in_db
+        if reverse:
+            starting_attn = self.ending_attn
+            ending_attn = self.starting_attn
+            step_size_in_db = step_size_in_db * -1
+            self.dut.disconnect()
+        for step in range(starting_attn, ending_attn, step_size_in_db):
+            try:
+                for attenuator in rvr_attenuators:
+                    attenuator.set_atten(step)
+            except ValueError as e:
+                self.log.error(
+                    f"{step} is beyond the max or min of the testbed "
+                    f"attenuator's capability. Stopping. {e}"
+                )
+                break
+            self.log.info(f"Set relative attenuation to {step} db")
+
+            associated = self.dut.is_connected()
+            if associated:
+                self.log.info("DUT is currently associated.")
+            else:
+                self.log.info("DUT is not currently associated.")
+
+            if reverse:
+                if not associated:
+                    self.log.info(
+                        f"Trying to associate at relative attenuation of {step} db"
+                    )
+                    if self.dut.associate(
+                        ssid,
+                        target_pwd=security.password if security else None,
+                        target_security=(
+                            security.security_mode if security else SecurityMode.OPEN
+                        ),
+                        check_connectivity=False,
+                    ):
+                        associated = True
+                        self.log.info("Successfully associated.")
+                    else:
+                        associated = False
+                        self.log.info(
+                            f"Association failed. Marking a 0 {REPORTING_SPEED_UNITS} "
+                            "for throughput. Skipping running traffic."
+                        )
+            attn_value_inserted = False
+            value_to_insert = str(step)
+            while not attn_value_inserted:
+                if value_to_insert in relative_attn:
+                    value_to_insert = f"{value_to_insert} "
+                else:
+                    relative_attn.append(value_to_insert)
+
+            assert self.dut_iperf_client.test_interface is not None
+            dut_ip_addresses = self.fuchsia_device.get_interface_ip_addresses(
+                self.dut_iperf_client.test_interface
+            )
+            if ip_version is IPVersion.V4:
+                if not dut_ip_addresses["ipv4_private"]:
+                    self.log.info(
+                        "DUT does not have an IPv4 address. "
+                        "Traffic attempt to be run if the server "
+                        "is pingable."
+                    )
+                else:
+                    ipv4_private = dut_ip_addresses["ipv4_private"][0]
+                    self.log.info(
+                        f'DUT has the following IPv4 address: "{ipv4_private}"'
+                    )
+            elif ip_version is IPVersion.V6:
+                if not dut_ip_addresses["ipv6_private_local"]:
+                    self.log.info(
+                        "DUT does not have an IPv6 address. "
+                        "Traffic attempt to be run if the server "
+                        "is pingable."
+                    )
+                else:
+                    ipv6_private_local = dut_ip_addresses["ipv6_private_local"][0]
+                    self.log.info(
+                        f'DUT has the following IPv6 address: "{ipv6_private_local}"'
+                    )
+            ping_result = self.dut.ping(iperf_server_ip_address)
+            if not ping_result.success:
+                self.log.info(
+                    f'Iperf server "{iperf_server_ip_address}" is not pingable. '
+                    f"Marking a 0 {REPORTING_SPEED_UNITS} for throughput. "
+                    "Skipping running traffic."
+                )
+            else:
+                self.log.info(f'Iperf server "{iperf_server_ip_address}" is pingable.')
+            if ping_result.success:
+                if traffic_dir is TrafficDirection.TX:
+                    self.log.info(
+                        f"Running traffic DUT to {iperf_server_ip_address} at relative "
+                        f"attenuation of {step}"
+                    )
+                elif traffic_dir is TrafficDirection.RX:
+                    self.log.info(
+                        f"Running traffic {iperf_server_ip_address} to DUT at relative "
+                        f"attenuation of {step}"
+                    )
+                else:
+                    raise ValueError("Invalid traffic direction")
+                try:
+                    iperf_tag = "decreasing"
+                    if reverse:
+                        iperf_tag = "increasing"
+                    iperf_results_file = self.dut_iperf_client.start(
+                        iperf_server_ip_address,
+                        iperf_flags,
+                        f"{iperf_tag}_{traffic_dir}_{self.starting_attn}",
+                        timeout=(self.dwell_time_in_secs * 2),
+                    )
+                except TimeoutError as e:
+                    iperf_results_file = None
+                    self.log.error(
+                        f"Iperf traffic timed out. Marking 0 {REPORTING_SPEED_UNITS} for "
+                        f"throughput. {e}"
+                    )
+
+                if not iperf_results_file:
+                    throughput.append(0)
+                else:
+                    try:
+                        iperf_results = IPerfResult(
+                            iperf_results_file,
+                            reporting_speed_units=REPORTING_SPEED_UNITS,
+                        )
+                        if iperf_results.error:
+                            self.iperf_server.stop()
+                            self.iperf_server.start()
+                            self.log.error(
+                                f"Errors in iperf logs:\n{iperf_results.error}"
+                            )
+                        if not iperf_results.avg_send_rate:
+                            throughput.append(0)
+                        else:
+                            throughput.append(iperf_results.avg_send_rate)
+                    except ValueError as e:
+                        self.iperf_server.stop()
+                        self.iperf_server.start()
+                        self.log.error(
+                            f"No data in iPerf3 file. Marking 0 {REPORTING_SPEED_UNITS} "
+                            f"for throughput: {e}"
+                        )
+                        throughput.append(0)
+                    except Exception as e:
+                        self.iperf_server.stop()
+                        self.iperf_server.start()
+                        self.log.error(
+                            f"Unknown exception. Marking 0 {REPORTING_SPEED_UNITS} for "
+                            f"throughput: {e}"
+                        )
+                        self.log.error(e)
+                        throughput.append(0)
+
+                self.log.info(
+                    f"Iperf traffic complete. {traffic_dir} traffic received at "
+                    f"{throughput[-1]} {REPORTING_SPEED_UNITS} at relative attenuation "
+                    f"of {str(relative_attn[-1]).strip()} db"
+                )
+
+            else:
+                self.log.debug(f"DUT Associated: {associated}")
+                self.log.debug(f"{iperf_server_ip_address} pingable: {ping_result}")
+                throughput.append(0)
+        return throughput, relative_attn
+
+    def _test_rvr(self, t: TestParams) -> None:
+        ssid = rand_ascii_str(20)
+        setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            channel=t.band.default_channel(),
+            ssid=ssid,
+            setup_bridge=True,
+        )
+        graph_data = self.run_rvr(
+            ssid,
+            security=t.security,
+            band=t.band,
+            traffic_dir=t.direction,
+            ip_version=t.ip_version,
+        )
+        for rvr_graph in create_rvr_graph(
+            self.current_test_info.name,
+            self.current_test_info.output_path(),
+            graph_data,
+        ):
+            self.rvr_graph_summary.append(rvr_graph)
+        write_csv_rvr_data(
+            self.current_test_info.name,
+            self.current_test_info.output_path(),
+            graph_data,
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/performance/WlanWmmTest.py b/tests/wlan/performance/WlanWmmTest.py
new file mode 100644
index 0000000..aebdaa5
--- /dev/null
+++ b/tests/wlan/performance/WlanWmmTest.py
@@ -0,0 +1,904 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import operator
+import time
+
+from mobly import asserts, test_runner
+
+from antlion import context, utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
+from antlion.test_utils.abstract_devices import wmm_transceiver
+from antlion.test_utils.abstract_devices.wlan_device import (
+    AssociationMode,
+    create_wlan_device,
+)
+from antlion.test_utils.fuchsia import wmm_test_cases
+from antlion.test_utils.wifi import base_test
+
+DEFAULT_N_CAPABILITIES_20_MHZ = [
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_TX_STBC,
+    hostapd_constants.N_CAPABILITY_RX_STBC1,
+    hostapd_constants.N_CAPABILITY_HT20,
+]
+
+DEFAULT_AP_PARAMS = {
+    "profile_name": "whirlwind",
+    "channel": hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+    "n_capabilities": DEFAULT_N_CAPABILITIES_20_MHZ,
+    "ac_capabilities": None,
+}
+
+DEFAULT_BW_PERCENTAGE = 1
+DEFAULT_STREAM_TIMEOUT = 60
+DEFAULT_STREAM_TIME = 10
+
+OPERATORS = {
+    ">": operator.gt,
+    ">=": operator.ge,
+    "<": operator.lt,
+    "<=": operator.le,
+    "==": operator.eq,
+}
+
+GRAPH_COLOR_LEN = 10
+GRAPH_DEFAULT_LINE_WIDTH = 2
+GRAPH_DEFAULT_CIRCLE_SIZE = 10
+
+
+def eval_operator(
+    operator_string,
+    actual_value,
+    expected_value,
+    max_bw,
+    rel_tolerance=0,
+    abs_tolerance=0,
+    max_bw_rel_tolerance=0,
+):
+    """
+    Determines if an inequality evaluates to True, given relative and absolute
+    tolerance.
+
+    Args:
+        operator_string: string, the operator to use for the comparison
+        actual_value: the value to compare to some expected value
+        expected_value: the value the actual value is compared to
+        rel_tolerance: decimal representing the percent tolerance, relative to
+            the expected value. E.g. (101 <= 100) w/ rel_tol=0.01 is True
+        abs_tolerance: the lowest actual (not percent) tolerance for error.
+            E.g. (101 == 100) w/ rel_tol=0.005 is False, but
+            (101 == 100) w/ rel_tol=0.005 and abs_tol=1 is True
+        max_bw_rel_tolerance: decimal representing the percent tolerance,
+            relative to the maximimum allowed bandwidth.
+            E.g. (101 <= max bw of 100) w/ max_bw_rel_tol=0.01 is True
+
+
+    Returns:
+        True, if inequality evaluates to True within tolerances
+        False, otherwise
+    """
+    op = OPERATORS[operator_string]
+    if op(actual_value, expected_value):
+        return True
+
+    error = abs(actual_value - expected_value)
+    accepted_error = max(
+        expected_value * rel_tolerance, abs_tolerance, max_bw * max_bw_rel_tolerance
+    )
+    return error <= accepted_error
+
+
+class WlanWmmTest(base_test.WifiBaseTest):
+    """Tests WMM QoS Functionality (Station only)
+
+    Testbed Requirements:
+    * One ACTS compatible wlan_device (staut)
+    * One Whirlwind Access Point
+    * For some tests, One additional ACTS compatible device (secondary_sta)
+
+    For accurate results, must be performed in an RF isolated environment.
+    """
+
+    def setup_class(self):
+        super().setup_class()
+        self.log = logging.getLogger()
+
+        try:
+            self.wmm_test_params = self.user_params["wmm_test_params"]
+            self._wmm_transceiver_configs = self.wmm_test_params["wmm_transceivers"]
+        except KeyError:
+            raise AttributeError(
+                "Must provide at least 2 WmmTransceivers in "
+                '"wmm_test_params" field of ACTS config.'
+            )
+
+        if len(self._wmm_transceiver_configs) < 2:
+            raise AttributeError("At least 2 WmmTransceivers must be provided.")
+
+        self.android_devices = self.android_devices
+        self.fuchsia_devices = self.fuchsia_devices
+
+        self.wlan_devices = [
+            create_wlan_device(device, AssociationMode.POLICY)
+            for device in self.android_devices + self.fuchsia_devices
+        ]
+
+        # Create STAUT transceiver
+        if "staut" not in self._wmm_transceiver_configs:
+            raise AttributeError(
+                'Must provide a WmmTransceiver labeled "staut" with a ' "wlan_device."
+            )
+        self.staut = wmm_transceiver.create(
+            self._wmm_transceiver_configs["staut"],
+            identifier="staut",
+            wlan_devices=self.wlan_devices,
+        )
+
+        # Required to for automated power cycling
+        self.dut = self.staut.wlan_device
+
+        # Create AP transceiver
+        if "access_point" not in self._wmm_transceiver_configs:
+            raise AttributeError(
+                'Must provide a WmmTransceiver labeled "access_point" with a '
+                "access_point."
+            )
+        self.access_point_transceiver = wmm_transceiver.create(
+            self._wmm_transceiver_configs["access_point"],
+            identifier="access_point",
+            access_points=self.access_points,
+        )
+
+        self.wmm_transceivers = [self.staut, self.access_point_transceiver]
+
+        # Create secondary station transceiver, if present
+        if "secondary_sta" in self._wmm_transceiver_configs:
+            self.secondary_sta = wmm_transceiver.create(
+                self._wmm_transceiver_configs["secondary_sta"],
+                identifier="secondary_sta",
+                wlan_devices=self.wlan_devices,
+            )
+            self.wmm_transceivers.append(self.secondary_sta)
+        else:
+            self.secondary_sta = None
+
+        self.wmm_transceiver_map = {tc.identifier: tc for tc in self.wmm_transceivers}
+
+    def setup_test(self):
+        super().setup_test()
+        for tc in self.wmm_transceivers:
+            if tc.wlan_device:
+                tc.wlan_device.wifi_toggle_state(True)
+                tc.wlan_device.disconnect()
+            if tc.access_point:
+                tc.access_point.stop_all_aps()
+
+    def teardown_test(self):
+        for tc in self.wmm_transceivers:
+            tc.cleanup_asynchronous_streams()
+            if tc.wlan_device:
+                tc.wlan_device.disconnect()
+                tc.wlan_device.reset_wifi()
+            self.download_logs()
+            if tc.access_point:
+                tc.access_point.stop_all_aps()
+        super().teardown_test()
+
+    def teardown_class(self):
+        for tc in self.wmm_transceivers:
+            tc.destroy_resources()
+        super().teardown_class()
+
+    def start_ap_with_wmm_params(self, ap_parameters, wmm_parameters):
+        """Sets up WMM network on AP.
+
+        Args:
+            ap_parameters: a dictionary of kwargs to set up on ap
+            wmm_parameters: a dictionary of wmm_params to set up on ap
+
+        Returns:
+            String, subnet of the network setup (e.g. '192.168.1.0/24')
+        """
+        # Defaults for required parameters
+        ap_parameters["force_wmm"] = True
+        if "ssid" not in ap_parameters:
+            ap_parameters["ssid"] = utils.rand_ascii_str(
+                hostapd_constants.AP_SSID_LENGTH_2G
+            )
+
+        if "profile_name" not in ap_parameters:
+            ap_parameters["profile_name"] = "whirlwind"
+
+        if "channel" not in ap_parameters:
+            ap_parameters["channel"] = 6
+
+        if "n_capabilities" not in ap_parameters:
+            ap_parameters["n_capabilities"] = DEFAULT_N_CAPABILITIES_20_MHZ
+
+        if "additional_ap_parameters" in ap_parameters:
+            ap_parameters["additional_ap_parameters"].update(wmm_parameters)
+        else:
+            ap_parameters["additional_ap_parameters"] = wmm_parameters
+
+        # Optional security
+        security_config = ap_parameters.get("security_config", None)
+        if security_config:
+            ap_parameters["security"] = hostapd_security.Security(**security_config)
+            ap_parameters.pop("security_config")
+
+        # Start AP with kwargs
+        self.log.info(f"Setting up WMM network: {ap_parameters['ssid']}")
+        setup_ap(self.access_point_transceiver.access_point, **ap_parameters)
+        self.log.info(f"Network ({ap_parameters['ssid']}) is up.")
+
+        # Return subnet
+        if ap_parameters["channel"] < hostapd_constants.LOWEST_5G_CHANNEL:
+            return self.access_point_transceiver.access_point._AP_2G_SUBNET_STR
+        else:
+            return self.access_point_transceiver.access_point._AP_5G_SUBNET_STR
+
+    def associate_transceiver(self, wmm_transceiver, ap_params):
+        """Associates a WmmTransceiver that has a wlan_device.
+
+        Args:
+            wmm_transceiver: transceiver to associate
+            ap_params: dict, contains ssid and password, if any, for network
+        """
+        if not wmm_transceiver.wlan_device:
+            raise AttributeError(
+                "Cannot associate a WmmTransceiver that does not have a WLAN device."
+            )
+        ssid = ap_params["ssid"]
+        password = None
+        target_security = None
+        security = ap_params.get("security")
+        if security:
+            password = security.password
+            target_security = (
+                hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
+                    security.security_mode_string
+                )
+            )
+        associated = wmm_transceiver.wlan_device.associate(
+            target_ssid=ssid, target_pwd=password, target_security=target_security
+        )
+        if not associated:
+            raise ConnectionError(
+                f"Failed to associate WmmTransceiver {wmm_transceiver.identifier}."
+            )
+        self.log.info(f"WmmTransceiver {wmm_transceiver.identifier} associated.")
+
+    def validate_streams_in_phase(self, phase_id, phases, max_bw):
+        """Validates any stream in a phase that has validation criteria.
+
+        Args:
+            phase_id: identifier of the phase to check
+            phases: dictionary containing phases for retrieving stream
+                transmitters, expected bandwidths, etc.
+            max_bw: the max link bandwidth, measured in the test
+
+        Returns:
+            True, if ALL validation criteria for ALL streams in phase pass
+            False, otherwise
+        """
+        pass_val = True
+        for stream_id, stream in phases[phase_id].items():
+            if "validation" in stream:
+                transmitter = stream["transmitter"]
+                uuid = stream["uuid"]
+                actual_bw = transmitter.get_results(uuid).avg_rate
+                if not actual_bw:
+                    raise ConnectionError(
+                        "(Phase: %s, Stream: %s) - Stream results show "
+                        "bandwidth: None" % (phase_id, stream_id)
+                    )
+                for check in stream["validation"]:
+                    operator_str = check["operator"]
+                    rel_tolerance = check.get("rel_tolerance", 0)
+                    abs_tolerance = check.get("abs_tolerance", 0)
+                    max_bw_rel_tolerance = check.get("max_bw_rel_tolerance", 0)
+                    expected_bw_percentage = check.get(
+                        "bandwidth_percentage", DEFAULT_BW_PERCENTAGE
+                    )
+                    # Explicit Bandwidth Validation
+                    if "bandwidth" in check:
+                        comp_bw = check["bandwidth"]
+                        log_msg = (
+                            "Expected Bandwidth: %s (explicit validation "
+                            "bandwidth [%s] x expected bandwidth "
+                            "percentage [%s])"
+                            % (
+                                expected_bw_percentage * comp_bw,
+                                comp_bw,
+                                expected_bw_percentage,
+                            )
+                        )
+
+                    # Stream Comparison Validation
+                    elif "phase" in check and "stream" in check:
+                        comp_phase_id = check["phase"]
+                        comp_stream_id = check["stream"]
+                        comp_stream = phases[comp_phase_id][comp_stream_id]
+                        comp_transmitter = comp_stream["transmitter"]
+                        comp_uuid = comp_stream["uuid"]
+                        comp_bw = comp_transmitter.get_results(comp_uuid).avg_rate
+                        log_msg = (
+                            "Expected Bandwidth: %s (bandwidth for phase: %s, "
+                            "stream: %s [%s] x expected bandwidth percentage "
+                            "[%s])"
+                            % (
+                                expected_bw_percentage * comp_bw,
+                                comp_phase_id,
+                                comp_stream_id,
+                                comp_bw,
+                                expected_bw_percentage,
+                            )
+                        )
+
+                    # Expected Bandwidth Validation
+                    else:
+                        if "bandwidth" in stream:
+                            comp_bw = stream["bandwidth"]
+                            log_msg = (
+                                "Expected Bandwidth: %s (expected stream "
+                                "bandwidth [%s] x expected bandwidth "
+                                "percentage [%s])"
+                                % (
+                                    expected_bw_percentage * comp_bw,
+                                    comp_bw,
+                                    expected_bw_percentage,
+                                )
+                            )
+                        else:
+                            max_bw_percentage = stream.get(
+                                "max_bandwidth_percentage", DEFAULT_BW_PERCENTAGE
+                            )
+                            comp_bw = max_bw * max_bw_percentage
+                            log_msg = (
+                                "Expected Bandwidth: %s (max bandwidth [%s] x "
+                                "stream bandwidth percentage [%s] x expected "
+                                "bandwidth percentage [%s])"
+                                % (
+                                    expected_bw_percentage * comp_bw,
+                                    max_bw,
+                                    max_bw_percentage,
+                                    expected_bw_percentage,
+                                )
+                            )
+
+                    self.log.info(
+                        "Validation criteria - Stream: %s, "
+                        "Actual Bandwidth: %s, Operator: %s, %s, "
+                        "Relative Tolerance: %s, Absolute Tolerance: %s, Max "
+                        "Bandwidth Relative Tolerance: %s"
+                        % (
+                            stream_id,
+                            actual_bw,
+                            operator_str,
+                            log_msg,
+                            rel_tolerance,
+                            abs_tolerance,
+                            max_bw_rel_tolerance,
+                        )
+                    )
+
+                    if eval_operator(
+                        operator_str,
+                        actual_bw,
+                        comp_bw * expected_bw_percentage,
+                        max_bw,
+                        rel_tolerance=rel_tolerance,
+                        abs_tolerance=abs_tolerance,
+                        max_bw_rel_tolerance=max_bw_rel_tolerance,
+                    ):
+                        self.log.info(
+                            "(Phase: %s, Stream: %s) - PASSES validation check!"
+                            % (phase_id, stream_id)
+                        )
+                    else:
+                        self.log.info(
+                            "(Phase: %s, Stream: %s) - Stream FAILS validation "
+                            "check." % (phase_id, stream_id)
+                        )
+                        pass_val = False
+        if pass_val:
+            self.log.info(
+                f"(Phase {phase_id}) - All streams' validation criteria were met."
+            )
+            return True
+        else:
+            self.log.error(
+                "(Phase %s) - At least one stream validation criterion was not "
+                "met." % phase_id
+            )
+            return False
+
+    def graph_test(self, phases, max_bw):
+        """Outputs a bokeh html graph of the streams. Saves to ACTS log
+        directory.
+
+        Args:
+            phases: dictionary containing phases for retrieving stream
+                transmitters, expected bandwidths, etc.
+            max_bw: the max link bandwidth, measured in the test
+
+        """
+
+        try:
+            from bokeh.models import Label, Span
+            from bokeh.palettes import Category10
+            from bokeh.plotting import ColumnDataSource, figure, output_file, save
+        except ImportError:
+            self.log.warn(
+                "bokeh is not installed: skipping creation of graphs. "
+                "Note CSV files are still available. If graphs are "
+                'desired, install antlion with the "bokeh" feature.'
+            )
+            return
+
+        output_path = context.get_current_context().get_base_output_path()
+        output_file_name = "%s/WlanWmmTest/%s.html" % (
+            output_path,
+            self.current_test_info.name,
+        )
+        output_file(output_file_name)
+
+        start_time = 0
+        graph_lines = []
+
+        # Used for scaling
+        highest_stream_bw = 0
+        lowest_stream_bw = 100000
+
+        for phase_id, phase in phases.items():
+            longest_stream_time = 0
+            for stream_id, stream in phase.items():
+                transmitter = stream["transmitter"]
+                uuid = stream["uuid"]
+
+                if "bandwidth" in stream:
+                    stream_bw = f"{stream['bandwidth']:.3f}"
+                    stream_bw_formula_str = f"{stream_bw}Mb/s"
+                elif "max_bandwidth_percentage" in stream:
+                    max_bw_percentage = stream["max_bandwidth_percentage"]
+                    stream_bw = f"{max_bw * max_bw_percentage:.3f}"
+                    stream_bw_formula_str = "%sMb/s (%s%% of max bandwidth)" % (
+                        stream_bw,
+                        str(max_bw_percentage * 100),
+                    )
+                else:
+                    raise AttributeError(
+                        "Stream %s must have either a bandwidth or "
+                        "max_bandwidth_percentage parameter." % stream_id
+                    )
+
+                stream_time = stream.get("time", DEFAULT_STREAM_TIME)
+                longest_stream_time = max(longest_stream_time, stream_time)
+
+                avg_rate = transmitter.get_results(uuid).avg_rate
+
+                instantaneous_rates = transmitter.get_results(uuid).instantaneous_rates
+                highest_stream_bw = max(highest_stream_bw, max(instantaneous_rates))
+                lowest_stream_bw = min(lowest_stream_bw, min(instantaneous_rates))
+
+                stream_data = ColumnDataSource(
+                    dict(
+                        time=[x for x in range(start_time, start_time + stream_time)],
+                        instantaneous_bws=instantaneous_rates,
+                        avg_bw=[avg_rate for _ in range(stream_time)],
+                        stream_id=[stream_id for _ in range(stream_time)],
+                        attempted_bw=[
+                            stream_bw_formula_str for _ in range(stream_time)
+                        ],
+                    )
+                )
+                line = {
+                    "x_axis": "time",
+                    "y_axis": "instantaneous_bws",
+                    "source": stream_data,
+                    "line_width": GRAPH_DEFAULT_LINE_WIDTH,
+                    "legend_label": f"{phase_id}:{stream_id}",
+                }
+                graph_lines.append(line)
+
+            start_time = start_time + longest_stream_time
+        TOOLTIPS = [
+            ("Time", "@time"),
+            ("Attempted Bandwidth", "@attempted_bw"),
+            ("Instantaneous Bandwidth", "@instantaneous_bws"),
+            ("Stream Average Bandwidth", "@avg_bw"),
+            ("Stream", "@stream_id"),
+        ]
+
+        # Create and scale graph appropriately
+        time_vs_bandwidth_graph = figure(
+            title=f"Bandwidth for {self.current_test_info.name}",
+            x_axis_label="Time",
+            y_axis_label="Bandwidth",
+            tooltips=TOOLTIPS,
+            y_range=(
+                lowest_stream_bw - (0.5 * (highest_stream_bw - lowest_stream_bw)),
+                1.05 * max_bw,
+            ),
+        )
+        time_vs_bandwidth_graph.sizing_mode = "stretch_both"
+        time_vs_bandwidth_graph.title.align = "center"
+        colors = Category10[GRAPH_COLOR_LEN]
+        color_ind = 0
+
+        # Draw max bandwidth line
+        max_bw_span = Span(
+            location=max_bw,
+            dimension="width",
+            line_color="black",
+            line_dash="dashed",
+            line_width=GRAPH_DEFAULT_LINE_WIDTH,
+        )
+        max_bw_label = Label(
+            x=(0.5 * start_time),
+            y=max_bw,
+            text=f"Max Bandwidth: {max_bw}Mb/s",
+            text_align="center",
+        )
+        time_vs_bandwidth_graph.add_layout(max_bw_span)
+        time_vs_bandwidth_graph.add_layout(max_bw_label)
+
+        # Draw stream lines
+        for line in graph_lines:
+            time_vs_bandwidth_graph.line(
+                line["x_axis"],
+                line["y_axis"],
+                source=line["source"],
+                line_width=line["line_width"],
+                legend_label=line["legend_label"],
+                color=colors[color_ind],
+            )
+            time_vs_bandwidth_graph.circle(
+                line["x_axis"],
+                line["y_axis"],
+                source=line["source"],
+                size=GRAPH_DEFAULT_CIRCLE_SIZE,
+                legend_label=line["legend_label"],
+                color=colors[color_ind],
+            )
+            color_ind = (color_ind + 1) % GRAPH_COLOR_LEN
+        time_vs_bandwidth_graph.legend.location = "top_left"
+        time_vs_bandwidth_graph.legend.click_policy = "hide"
+        graph_file = save([time_vs_bandwidth_graph])
+        self.log.info(f"Saved graph to {graph_file}")
+
+    def run_wmm_test(
+        self,
+        phases,
+        ap_parameters=DEFAULT_AP_PARAMS,
+        wmm_parameters=hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS,
+        stream_timeout=DEFAULT_STREAM_TIMEOUT,
+    ):
+        """Runs a WMM test case.
+
+        Args:
+            phases: dictionary of phases of streams to run in parallel,
+                including any validation critera (see example below).
+            ap_parameters: dictionary of custom kwargs to setup on AP (see
+                start_ap_with_wmm_parameters)
+            wmm_parameters: dictionary of WMM AC parameters
+            stream_timeout: int, time in seconds to wait before force joining
+                parallel streams
+
+        Asserts:
+            PASS, if all validation criteria for all phases are met
+            FAIL, otherwise
+        """
+        # Setup AP
+        subnet_str = self.start_ap_with_wmm_params(ap_parameters, wmm_parameters)
+        # Determine transmitters and receivers used in test case
+        transmitters = set()
+        receivers = set()
+        for phase in phases.values():
+            for stream in phase.values():
+                transmitter = self.wmm_transceiver_map[stream["transmitter_str"]]
+                transmitters.add(transmitter)
+                stream["transmitter"] = transmitter
+                receiver = self.wmm_transceiver_map[stream["receiver_str"]]
+                receivers.add(receiver)
+                stream["receiver"] = receiver
+        transceivers = transmitters.union(receivers)
+
+        # Associate all transceivers with wlan_devices
+        for tc in transceivers:
+            if tc.wlan_device:
+                self.associate_transceiver(tc, ap_parameters)
+
+        # Determine link max bandwidth
+        self.log.info("Determining link maximum bandwidth.")
+        uuid = self.staut.run_synchronous_traffic_stream(
+            {"receiver": self.access_point_transceiver}, subnet_str
+        )
+        max_bw = self.staut.get_results(uuid).avg_send_rate
+        self.log.info(f"Link maximum bandwidth: {max_bw} Mb/s")
+
+        # Run parallel phases
+        pass_test = True
+        for phase_id, phase in phases.items():
+            self.log.info(f"Setting up phase: {phase_id}")
+
+            for stream_id, stream in phase.items():
+                transmitter = stream["transmitter"]
+                receiver = stream["receiver"]
+                access_category = stream.get("access_category", None)
+                stream_time = stream.get("time", DEFAULT_STREAM_TIME)
+
+                # Determine stream type
+                if "bandwidth" in stream:
+                    bw = stream["bandwidth"]
+                elif "max_bandwidth_percentage" in stream:
+                    max_bw_percentage = stream["max_bandwidth_percentage"]
+                    bw = max_bw * max_bw_percentage
+                else:
+                    raise AttributeError(
+                        "Stream %s must have either a bandwidth or "
+                        "max_bandwidth_percentage parameter." % stream_id
+                    )
+
+                stream_params = {
+                    "receiver": receiver,
+                    "access_category": access_category,
+                    "bandwidth": bw,
+                    "time": stream_time,
+                }
+
+                uuid = transmitter.prepare_asynchronous_stream(
+                    stream_params, subnet_str
+                )
+                stream["uuid"] = uuid
+
+            # Start all streams in phase
+            start_time = time.time() + 5
+            for transmitter in transmitters:
+                transmitter.start_asynchronous_streams(start_time=start_time)
+
+            # Wait for streams to join
+            for transmitter in transmitters:
+                end_time = time.time() + stream_timeout
+                while transmitter.has_active_streams:
+                    if time.time() > end_time:
+                        raise ConnectionError(
+                            "Transmitter's (%s) active streams are not finishing."
+                            % transmitter.identifier
+                        )
+                    time.sleep(1)
+
+            # Cleanup all streams
+            for transmitter in transmitters:
+                transmitter.cleanup_asynchronous_streams()
+
+            # Validate streams
+            pass_test = pass_test and self.validate_streams_in_phase(
+                phase_id, phases, max_bw
+            )
+
+        self.graph_test(phases, max_bw)
+        if pass_test:
+            asserts.explicit_pass(
+                "Validation criteria met for all streams in all phases."
+            )
+        else:
+            asserts.fail("At least one stream failed to meet validation criteria.")
+
+    # Test Cases
+
+    # Internal Traffic Differentiation
+
+    def test_internal_traffic_diff_VO_VI(self):
+        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_VI)
+
+    def test_internal_traffic_diff_VO_BE(self):
+        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_BE)
+
+    def test_internal_traffic_diff_VO_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VO_BK)
+
+    def test_internal_traffic_diff_VI_BE(self):
+        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VI_BE)
+
+    def test_internal_traffic_diff_VI_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_VI_BK)
+
+    def test_internal_traffic_diff_BE_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_internal_traffic_diff_BE_BK)
+
+    # External Traffic Differentiation
+
+    """Single station, STAUT transmits high priority"""
+
+    def test_external_traffic_diff_staut_VO_ap_VI(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_VI)
+
+    def test_external_traffic_diff_staut_VO_ap_BE(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BE)
+
+    def test_external_traffic_diff_staut_VO_ap_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VO_ap_BK)
+
+    def test_external_traffic_diff_staut_VI_ap_BE(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BE)
+
+    def test_external_traffic_diff_staut_VI_ap_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_BK)
+
+    def test_external_traffic_diff_staut_BE_ap_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_BK)
+
+    """Single station, STAUT transmits low priority"""
+
+    def test_external_traffic_diff_staut_VI_ap_VO(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_VI_ap_VO)
+
+    def test_external_traffic_diff_staut_BE_ap_VO(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VO)
+
+    def test_external_traffic_diff_staut_BK_ap_VO(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VO)
+
+    def test_external_traffic_diff_staut_BE_ap_VI(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BE_ap_VI)
+
+    def test_external_traffic_diff_staut_BK_ap_VI(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_VI)
+
+    def test_external_traffic_diff_staut_BK_ap_BE(self):
+        self.run_wmm_test(wmm_test_cases.test_external_traffic_diff_staut_BK_ap_BE)
+
+    # # Dual Internal/External Traffic Differentiation (Single station)
+
+    def test_dual_traffic_diff_staut_VO_VI_ap_VI(self):
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_VI_ap_VI)
+
+    def test_dual_traffic_diff_staut_VO_BE_ap_BE(self):
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_BE_ap_BE)
+
+    def test_dual_traffic_diff_staut_VO_BK_ap_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VO_BK_ap_BK)
+
+    def test_dual_traffic_diff_staut_VI_BE_ap_BE(self):
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VI_BE_ap_BE)
+
+    def test_dual_traffic_diff_staut_VI_BK_ap_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_VI_BK_ap_BK)
+
+    def test_dual_traffic_diff_staut_BE_BK_ap_BK(self):
+        self.run_wmm_test(wmm_test_cases.test_dual_traffic_diff_staut_BE_BK_ap_BK)
+
+    # ACM Bit Conformance Tests (Single station, as WFA test below uses two)
+
+    def test_acm_bit_on_VI(self):
+        wmm_params_VI_ACM = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_VI
+        )
+        self.run_wmm_test(
+            wmm_test_cases.test_acm_bit_on_VI, wmm_parameters=wmm_params_VI_ACM
+        )
+
+    # AC Parameter Modificiation Tests (Single station, as WFA test below uses two)
+
+    def test_ac_param_degrade_VO(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_ac_param_degrade_VO,
+            wmm_parameters=hostapd_constants.WMM_DEGRADED_VO_PARAMS,
+        )
+
+    def test_ac_param_degrade_VI(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_ac_param_degrade_VI,
+            wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS,
+        )
+
+    def test_ac_param_improve_BE(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_ac_param_improve_BE,
+            wmm_parameters=hostapd_constants.WMM_IMPROVE_BE_PARAMS,
+        )
+
+    def test_ac_param_improve_BK(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_ac_param_improve_BK,
+            wmm_parameters=hostapd_constants.WMM_IMPROVE_BK_PARAMS,
+        )
+
+    # WFA Test Plan Tests
+
+    """Traffic Differentiation in Single BSS (Single Station)"""
+
+    def test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_BE_ap_VI_BE
+        )
+
+    def test_wfa_traffic_diff_single_station_staut_VI_BE(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE
+        )
+
+    def test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VI_BE_ap_BE
+        )
+
+    def test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_BE_BK_ap_BK
+        )
+
+    def test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI(self):
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_single_station_staut_VO_VI_ap_VI
+        )
+
+    """Traffic Differentiation in Single BSS (Two Stations)"""
+
+    def test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE(self):
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_BE_secondary_VI_BE
+        )
+
+    def test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE(self):
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_VI_secondary_BE
+        )
+
+    def test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK(self):
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_BK_secondary_BE_BK
+        )
+
+    def test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI(self):
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_traffic_diff_two_stations_staut_VI_secondary_VO_VI
+        )
+
+    """Test ACM Bit Conformance (Two Stations)"""
+
+    def test_wfa_acm_bit_on_VI(self):
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
+        wmm_params_VI_ACM = (
+            hostapd_constants.WMM_PHYS_11A_11G_11N_11AC_DEFAULT_PARAMS
+            | hostapd_constants.WMM_ACM_VI
+        )
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_acm_bit_on_VI, wmm_parameters=wmm_params_VI_ACM
+        )
+
+    """Test the AC Parameter Modification"""
+
+    def test_wfa_ac_param_degrade_VI(self):
+        asserts.skip_if(not self.secondary_sta, "No secondary station.")
+        self.run_wmm_test(
+            wmm_test_cases.test_wfa_ac_param_degrade_VI,
+            wmm_parameters=hostapd_constants.WMM_DEGRADED_VI_PARAMS,
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan/performance/rvr_settings.yaml b/tests/wlan/performance/rvr_settings.yaml
new file mode 100644
index 0000000..620a48d
--- /dev/null
+++ b/tests/wlan/performance/rvr_settings.yaml
@@ -0,0 +1,7 @@
+rvr_settings:
+  starting_attn: 10
+  ending_attn: 61
+  step_size_in_db: 3
+  dwell_time_in_secs: 10
+  reverse_rvr_after_forward: true
+  iperf_flags: "-i 1"
diff --git a/tests/wlan_policy/BUILD.gn b/tests/wlan_policy/BUILD.gn
new file mode 100644
index 0000000..89f757a
--- /dev/null
+++ b/tests/wlan_policy/BUILD.gn
@@ -0,0 +1,93 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+antlion_host_test("hidden_networks_test") {
+  main_source = "HiddenNetworksTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("hidden_networks_test_hd") {
+  main_source = "HiddenNetworksTest.py"
+  environments = [vim3_ap_env]
+  enable_honeydew = true
+}
+
+antlion_host_test("policy_scan_test") {
+  main_source = "PolicyScanTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("policy_scan_test_hd") {
+  main_source = "PolicyScanTest.py"
+  environments = [vim3_ap_env]
+  enable_honeydew = true
+}
+
+antlion_host_test("regulatory_recovery_test") {
+  main_source = "RegulatoryRecoveryTest.py"
+  environments = display_envs
+}
+
+antlion_host_test("regulatory_recovery_test_hd") {
+  main_source = "RegulatoryRecoveryTest.py"
+  environments = [vim3_env]
+  enable_honeydew = true
+}
+
+antlion_host_test("saved_networks_test") {
+  main_source = "SavedNetworksTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("saved_networks_test_hd") {
+  main_source = "SavedNetworksTest.py"
+  environments = [vim3_ap_env]
+  enable_honeydew = true
+}
+
+antlion_host_test("start_stop_client_connections_test") {
+  main_source = "StartStopClientConnectionsTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("start_stop_client_connections_test_hd") {
+  main_source = "StartStopClientConnectionsTest.py"
+  environments = [vim3_ap_env]
+  enable_honeydew = true
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":hidden_networks_test($host_toolchain)",
+    ":policy_scan_test($host_toolchain)",
+    ":regulatory_recovery_test($host_toolchain)",
+    ":saved_networks_test($host_toolchain)",
+    ":start_stop_client_connections_test($host_toolchain)",
+
+    # TODO(http://b/336559254): Uncomment when TypeError is fixed.
+    #
+    # ":hidden_networks_test_hd($host_toolchain)",
+    # ":policy_scan_test_hd($host_toolchain)",
+    # ":regulatory_recovery_test_hd($host_toolchain)",
+    # ":saved_networks_test_hd($host_toolchain)",
+    # ":start_stop_client_connections_test_hd($host_toolchain)",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [
+    ":hidden_networks_test($host_toolchain)",
+
+    # TODO(http://b/336559254): Uncomment when TypeError is fixed.
+    #
+    # ":hidden_networks_test_hd($host_toolchain)",
+  ]
+}
diff --git a/tests/wlan_policy/HiddenNetworksTest.py b/tests/wlan_policy/HiddenNetworksTest.py
new file mode 100644
index 0000000..5c671ba
--- /dev/null
+++ b/tests/wlan_policy/HiddenNetworksTest.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+
+from honeydew.typing.wlan import ConnectionState, WlanClientState
+from mobly import signals, test_runner
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyControllerError,
+)
+from antlion.controllers.fuchsia_lib.wlan_policy_lib import WlanPolicyError
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str
+
+# These tests should have a longer timeout for connecting than normal connect
+# tests because the device should probabilistically perform active scans for
+# hidden networks. Multiple scans are necessary to verify a very low chance of
+# random failure.
+TIME_WAIT_FOR_CONNECT = 90
+TIME_ATTEMPT_SCANS = 90
+
+
+class HiddenNetworksTest(base_test.WifiBaseTest):
+    """Tests that WLAN Policy will detect hidden networks
+
+    Test Bed Requirement:
+    * One or more Fuchsia devices
+    * One Access Point
+    """
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+        # Start an AP with a hidden network
+        self.hidden_ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        self.access_point = self.access_points[0]
+        self.hidden_password = rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
+        self.hidden_security = SecurityMode.WPA2.fuchsia_security_type()
+        security = Security(
+            security_mode=SecurityMode.WPA2,
+            password=self.hidden_password,
+        )
+
+        self.access_point.stop_all_aps()
+        setup_ap(
+            self.access_point,
+            "whirlwind",
+            hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            self.hidden_ssid,
+            hidden=True,
+            security=security,
+        )
+
+        if len(self.fuchsia_devices) < 1:
+            raise EnvironmentError("No Fuchsia devices found.")
+        for fd in self.fuchsia_devices:
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
+
+    def setup_test(self) -> None:
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.remove_all_networks()
+            fd.wlan_policy_controller.wait_for_no_connections()
+
+    def teardown_class(self) -> None:
+        self.access_point.stop_all_aps()
+
+    # Tests
+
+    def test_scan_hidden_networks(self) -> None:
+        """Probabilistic test to see if we can see hidden networks with a scan.
+
+        Scan a few times and check that we see the hidden networks in the results at
+        least once. We stop client connections to not trigger a connect when saving,
+        which would interfere with requested scans.
+
+        Raises:
+            TestFailure if we fail to see hidden network in scans before timing out.
+        """
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.stop_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_DISABLED
+            )
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.hidden_ssid, self.hidden_security, self.hidden_password
+            )
+            fd.sl4f.wlan_policy_lib.start_client_connections()
+            start_time = time.time()
+            num_performed_scans = 0
+
+            while time.time() < start_time + TIME_ATTEMPT_SCANS:
+                num_performed_scans = num_performed_scans + 1
+                try:
+                    scan_result = fd.sl4f.wlan_policy_lib.scan_for_networks()
+                except WlanPolicyError:
+                    continue
+
+                if self.hidden_ssid in scan_result:
+                    self.log.info(
+                        f"SSID of hidden network seen after {num_performed_scans} scans"
+                    )
+                    return
+                # Don't overload SL4F with scan requests
+                time.sleep(1)
+
+            self.log.error(f"Failed to see SSID after {num_performed_scans} scans")
+            raise signals.TestFailure("Failed to see hidden network in scans")
+
+    def test_auto_connect_hidden_on_startup(self) -> None:
+        """Test auto connect on startup.
+
+        This test checks that if we are not connected to anything but have a hidden
+        network saved, we will eventually actively scan for it and connect.
+
+        Raises:
+            TestFailure if the client fails to auto connect to the hidden network.
+        """
+        # Start up AP with an open network with a random SSID
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.stop_client_connections()
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.hidden_ssid, self.hidden_security, self.hidden_password
+            )
+
+            # Reboot the device and check that it auto connects.
+            fd.reboot()
+            try:
+                fd.wlan_policy_controller.wait_for_network_state(
+                    self.hidden_ssid,
+                    ConnectionState.CONNECTED,
+                    timeout_sec=TIME_WAIT_FOR_CONNECT,
+                )
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure(
+                    "Failed to auto connect to hidden network on startup"
+                ) from e
+
+    def test_auto_connect_hidden_on_save(self) -> None:
+        """Test auto connect to hidden network on save.
+
+        This test checks that if we save a hidden network and are not connected to
+        anything, the device will connect to the hidden network that was just saved.
+
+        Raises:
+            TestFailure if client fails to auto connect to a hidden network after saving
+            it.
+        """
+        for fd in self.fuchsia_devices:
+            fd.wlan_policy_controller.wait_for_no_connections()
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.hidden_ssid, self.hidden_security, self.hidden_password
+            )
+            try:
+                fd.wlan_policy_controller.wait_for_network_state(
+                    self.hidden_ssid,
+                    ConnectionState.CONNECTED,
+                    timeout_sec=TIME_WAIT_FOR_CONNECT,
+                )
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure(
+                    "Failed to auto connect to hidden network on save"
+                ) from e
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan_policy/PolicyScanTest.py b/tests/wlan_policy/PolicyScanTest.py
new file mode 100644
index 0000000..8fcf69b
--- /dev/null
+++ b/tests/wlan_policy/PolicyScanTest.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import logging
+
+from honeydew.typing.wlan import ConnectionState
+from mobly import asserts, signals, test_runner
+
+from antlion.controllers.ap_lib import (
+    hostapd_ap_preset,
+    hostapd_bss_settings,
+    hostapd_constants,
+    hostapd_security,
+)
+from antlion.test_utils.wifi import base_test
+
+
+class PolicyScanTest(base_test.WifiBaseTest):
+    """WLAN policy scan test class.
+
+    This test exercises the scan functionality for the WLAN Policy API.
+
+    Test Bed Requirement:
+    * One or more Fuchsia devices
+    * One Whirlwind Access Point
+    """
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+
+        if len(self.fuchsia_devices) < 1:
+            raise signals.TestFailure("No fuchsia devices found.")
+        for fd in self.fuchsia_devices:
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
+        if len(self.access_points) < 1:
+            raise signals.TestFailure("No access points found.")
+        # Prepare the AP
+        self.access_point = self.access_points[0]
+        self.access_point.stop_all_aps()
+        # Generate network params.
+        bss_settings_2g: list[hostapd_bss_settings.BssSettings] = []
+        bss_settings_5g: list[hostapd_bss_settings.BssSettings] = []
+        open_network = self.get_open_network(False, [])
+        self.open_network_2g = open_network["2g"]
+        self.open_network_5g = open_network["5g"]
+        wpa2_settings = self.get_psk_network(False, [])
+        self.wpa2_network_2g = wpa2_settings["2g"]
+        self.wpa2_network_5g = wpa2_settings["5g"]
+        bss_settings_2g.append(
+            hostapd_bss_settings.BssSettings(
+                name=self.wpa2_network_2g["SSID"],
+                ssid=self.wpa2_network_2g["SSID"],
+                security=hostapd_security.Security(
+                    security_mode=self.wpa2_network_2g["security"],
+                    password=self.wpa2_network_2g["password"],
+                ),
+            )
+        )
+        bss_settings_5g.append(
+            hostapd_bss_settings.BssSettings(
+                name=self.wpa2_network_5g["SSID"],
+                ssid=self.wpa2_network_5g["SSID"],
+                security=hostapd_security.Security(
+                    security_mode=self.wpa2_network_5g["security"],
+                    password=self.wpa2_network_5g["password"],
+                ),
+            )
+        )
+        self.ap_2g = hostapd_ap_preset.create_ap_preset(
+            iface_wlan_2g=self.access_points[0].wlan_2g,
+            iface_wlan_5g=self.access_points[0].wlan_5g,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+            ssid=self.open_network_2g["SSID"],
+            bss_settings=bss_settings_2g,
+        )
+        self.ap_5g = hostapd_ap_preset.create_ap_preset(
+            iface_wlan_2g=self.access_points[0].wlan_2g,
+            iface_wlan_5g=self.access_points[0].wlan_5g,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            ssid=self.open_network_5g["SSID"],
+            bss_settings=bss_settings_5g,
+        )
+        # Start the networks
+        self.access_point.start_ap(hostapd_config=self.ap_2g)
+        self.access_point.start_ap(hostapd_config=self.ap_5g)
+        # List of test SSIDs started by APs
+        self.all_ssids = [
+            self.open_network_2g["SSID"],
+            self.wpa2_network_2g["SSID"],
+            self.open_network_5g["SSID"],
+            self.wpa2_network_5g["SSID"],
+        ]
+
+    def setup_test(self) -> None:
+        super().setup_test()
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.remove_all_networks()
+            fd.wlan_policy_controller.wait_for_no_connections()
+
+    def teardown_test(self) -> None:
+        self.download_logs()
+        super().teardown_test()
+
+    def _assert_network_is_in_results(self, scan_results: list[str], ssid: str) -> None:
+        """Verified scan results contain a specified network
+
+        Args:
+            scan_results: Scan results from a fuchsia Policy API scan.
+            ssid: SSID for network that should be in the results.
+
+        Raises:
+            signals.TestFailure: if the network is not present in the scan results
+        """
+        asserts.assert_true(
+            ssid in scan_results,
+            f'Network "{ssid}" was not found in scan results: {scan_results}',
+        )
+
+    def test_basic_scan_request(self) -> None:
+        """Verify a scan returns all expected networks"""
+        for fd in self.fuchsia_devices:
+            scan_results = fd.sl4f.wlan_policy_lib.scan_for_networks()
+            if len(scan_results) == 0:
+                raise signals.TestFailure("Scan did not find any networks")
+            for ssid in self.all_ssids:
+                self._assert_network_is_in_results(scan_results, ssid)
+
+    def test_scan_while_connected_open_network_2g(self) -> None:
+        """Connect to an open 2g network and perform a scan"""
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.open_network_2g["SSID"],
+                self.open_network_2g["security"].fuchsia_security_type(),
+                self.open_network_2g["password"],
+            )
+            fd.sl4f.wlan_policy_lib.connect(
+                self.open_network_2g["SSID"],
+                self.open_network_2g["security"].fuchsia_security_type(),
+            )
+            fd.wlan_policy_controller.wait_for_network_state(
+                self.open_network_2g["SSID"], ConnectionState.CONNECTED
+            )
+
+            scan_results = fd.sl4f.wlan_policy_lib.scan_for_networks()
+            for ssid in self.all_ssids:
+                self._assert_network_is_in_results(scan_results, ssid)
+
+    def test_scan_while_connected_wpa2_network_2g(self) -> None:
+        """Connect to a WPA2 2g network and perform a scan"""
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.wpa2_network_2g["SSID"],
+                self.wpa2_network_2g["security"].fuchsia_security_type(),
+                self.wpa2_network_2g["password"],
+            )
+            fd.sl4f.wlan_policy_lib.connect(
+                self.wpa2_network_2g["SSID"],
+                self.wpa2_network_2g["security"].fuchsia_security_type(),
+            )
+            fd.wlan_policy_controller.wait_for_network_state(
+                self.wpa2_network_2g["SSID"], ConnectionState.CONNECTED
+            )
+
+            scan_results = fd.sl4f.wlan_policy_lib.scan_for_networks()
+            for ssid in self.all_ssids:
+                self._assert_network_is_in_results(scan_results, ssid)
+
+    def test_scan_while_connected_open_network_5g(self) -> None:
+        """Connect to an open 5g network and perform a scan"""
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.open_network_5g["SSID"],
+                self.open_network_5g["security"].fuchsia_security_type(),
+                self.open_network_5g["password"],
+            )
+            fd.sl4f.wlan_policy_lib.connect(
+                self.open_network_5g["SSID"],
+                self.open_network_5g["security"].fuchsia_security_type(),
+            )
+            fd.wlan_policy_controller.wait_for_network_state(
+                self.open_network_5g["SSID"], ConnectionState.CONNECTED
+            )
+
+            scan_results = fd.sl4f.wlan_policy_lib.scan_for_networks()
+            for ssid in self.all_ssids:
+                self._assert_network_is_in_results(scan_results, ssid)
+
+    def test_scan_while_connected_wpa2_network_5g(self) -> None:
+        """Connect to a WPA2 5g network and perform a scan"""
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.wpa2_network_5g["SSID"],
+                self.wpa2_network_5g["security"].fuchsia_security_type(),
+                self.wpa2_network_5g["password"],
+            )
+            fd.sl4f.wlan_policy_lib.connect(
+                self.wpa2_network_5g["SSID"],
+                self.wpa2_network_5g["security"].fuchsia_security_type(),
+            )
+            fd.wlan_policy_controller.wait_for_network_state(
+                self.wpa2_network_5g["SSID"], ConnectionState.CONNECTED
+            )
+
+            scan_results = fd.sl4f.wlan_policy_lib.scan_for_networks()
+            for ssid in self.all_ssids:
+                self._assert_network_is_in_results(scan_results, ssid)
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan_policy/RegulatoryRecoveryTest.py b/tests/wlan_policy/RegulatoryRecoveryTest.py
new file mode 100644
index 0000000..260e5c3
--- /dev/null
+++ b/tests/wlan_policy/RegulatoryRecoveryTest.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from honeydew.typing.wlan import CountryCode, WlanClientState
+from mobly import signals, test_runner
+
+from antlion.controllers.ap_lib.hostapd_security import FuchsiaSecurityType
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyControllerError,
+)
+from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import (
+    ConnectivityMode,
+    OperatingBand,
+)
+from antlion.test_utils.wifi import base_test
+
+
+class RegulatoryRecoveryTest(base_test.WifiBaseTest):
+    """Tests the policy layer's response to setting country code.
+
+    Test Bed Requirements:
+    * One Fuchsia device that is capable of operating as a WLAN client and AP.
+
+    Example Config:
+    "regulatory_recovery_test_params": {
+        "country_code": "US"
+    }
+
+    If no configuration information is provided, the test will default to
+    toggling between WW and US.
+    """
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        if len(self.fuchsia_devices) < 1:
+            raise EnvironmentError("No Fuchsia devices found.")
+
+        self.config_test_params = self.user_params.get(
+            "regulatory_recovery_test_params", {}
+        )
+        self.country_code = self.config_test_params.get("country_code", "US")
+        self.negative_test = self.config_test_params.get("negative_test", False)
+
+        for fd in self.fuchsia_devices:
+            fd.configure_wlan(association_mechanism="policy")
+
+    def teardown_class(self) -> None:
+        if not self.negative_test:
+            for fd in self.fuchsia_devices:
+                fd.wlan_controller.set_country_code(self.country_code)
+
+        super().teardown_class()
+
+    def setup_test(self) -> None:
+        """Set PHYs to world-wide mode and disable AP and client connections."""
+        for fd in self.fuchsia_devices:
+            fd.wlan_controller.set_country_code(CountryCode.WORLDWIDE)
+            fd.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
+
+    def _set_country_code_check(self, fd: FuchsiaDevice) -> None:
+        """Set the country code and check if successful.
+
+        Args:
+            fd: Fuchsia device to set country code on.
+
+        Raises:
+            EnvironmentError on failure to set country code or success setting country
+                code when it should be a failure case.
+        """
+        try:
+            fd.wlan_controller.set_country_code(self.country_code)
+        except EnvironmentError as e:
+            if self.negative_test:
+                # In the negative case, setting the country code for an
+                # invalid country should fail.
+                pass
+            else:
+                # If this is not a negative test case, re-raise the
+                # exception.
+                raise e
+        else:
+            # The negative test case should have failed to set the country
+            # code and the positive test case should succeed.
+            if self.negative_test:
+                raise EnvironmentError("Setting invalid country code succeeded.")
+            else:
+                pass
+
+    def test_interfaces_not_recreated_when_initially_disabled(self) -> None:
+        """Test after applying new region no new interfaces are automatically recreated.
+
+        We start with client connections and access points disabled. There should be no
+        state change after applying a new regulatory region.
+
+        Raises:
+            TestFailure if client or AP are in unexpected state.
+        """
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.stop_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_DISABLED
+            )
+
+            self._set_country_code_check(fd)
+
+            # Verify that the client is still stopped.
+            try:
+                fd.wlan_policy_controller.wait_for_client_state(
+                    WlanClientState.CONNECTIONS_DISABLED
+                )
+            except WlanPolicyControllerError:
+                raise signals.TestFailure("Client policy layer is in unexpected state")
+
+            # Verify that the AP is still stopped.
+            fd.sl4f.wlan_ap_policy_lib.wlanSetNewListener()
+            ap_state = fd.sl4f.wlan_ap_policy_lib.wlanGetUpdate()
+            if ap_state["error"]:
+                raise signals.TestFailure(
+                    f"error querying AP state: {ap_state['error']}"
+                )
+
+            ap_updates = ap_state["result"]
+            if ap_updates:
+                raise signals.TestFailure(f"AP in unexpected state: {ap_updates}")
+
+    def test_interfaces_recreated_when_initially_enabled(self) -> None:
+        """Test after applying new region interfaces are automatically recreated.
+
+        After enabling client connections and access points we check that all interfaces
+        are recreated.
+
+        Raises:
+            TestFailure if client or AP are in unexpected state.
+        """
+        test_ssid = "test_ssid"
+        security_type = FuchsiaSecurityType.NONE
+        for fd in self.fuchsia_devices:
+            # Start client connections and start an AP before setting the country code.
+            fd.sl4f.wlan_policy_lib.start_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_ENABLED
+            )
+            fd.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
+                test_ssid,
+                security_type,
+                None,
+                ConnectivityMode.LOCAL_ONLY,
+                OperatingBand.ANY,
+            )
+
+            # Set the country code.
+            self._set_country_code_check(fd)
+
+            # Verify that the client connections are enabled.
+            try:
+                fd.wlan_policy_controller.wait_for_client_state(
+                    WlanClientState.CONNECTIONS_ENABLED
+                )
+            except WlanPolicyControllerError:
+                raise signals.TestFailure("Client policy layer is in unexpected state")
+
+            # Verify that the AP is brought up again.
+            fd.sl4f.wlan_ap_policy_lib.wlanSetNewListener()
+            ap_state = fd.sl4f.wlan_ap_policy_lib.wlanGetUpdate()
+            if ap_state["error"]:
+                raise signals.TestFailure(
+                    f"error querying AP state: {ap_state['error']}"
+                )
+
+            ap_updates = ap_state["result"]
+            if len(ap_updates) != 1:
+                raise signals.TestFailure(f"No APs are running: {ap_updates}")
+            else:
+                if (
+                    ap_updates[0]["id"]["ssid"] != test_ssid
+                    or ap_updates[0]["id"]["type_"].lower() != security_type
+                ):
+                    raise signals.TestFailure(
+                        f"AP in unexpected state: {ap_updates[0]}"
+                    )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan_policy/SavedNetworksTest.py b/tests/wlan_policy/SavedNetworksTest.py
new file mode 100644
index 0000000..3cdcded
--- /dev/null
+++ b/tests/wlan_policy/SavedNetworksTest.py
@@ -0,0 +1,447 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from honeydew.typing.wlan import (
+    ConnectionState,
+    NetworkConfig,
+    SecurityType,
+    WlanClientState,
+)
+from mobly import asserts, signals, test_runner
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyControllerError,
+)
+from antlion.controllers.fuchsia_lib.wlan_policy_lib import WlanPolicyError
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str, rand_hex_str
+
+PSK_LEN = 64
+CREDENTIAL_TYPE_PSK = "Psk"
+CREDENTIAL_TYPE_NONE = "None"
+CREDENTIAL_TYPE_PASSWORD = "Password"
+CREDENTIAL_VALUE_NONE = ""
+
+
+class SavedNetworksTest(base_test.WifiBaseTest):
+    """WLAN policy commands test class.
+
+    A test that saves various networks and verifies the behavior of save, get, and
+    remove through the ClientController API of WLAN policy.
+
+    Test Bed Requirement:
+    * One or more Fuchsia devices
+    * One Access Point
+    """
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+        # Keep track of whether we have started an access point in a test
+        if len(self.fuchsia_devices) < 1:
+            raise EnvironmentError("No Fuchsia devices found.")
+        for fd in self.fuchsia_devices:
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
+
+    def setup_test(self) -> None:
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.remove_all_networks()
+            fd.wlan_policy_controller.wait_for_no_connections()
+        self.access_points[0].stop_all_aps()
+
+    def teardown_class(self) -> None:
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.remove_all_networks()
+        self.access_points[0].stop_all_aps()
+
+    def _has_saved_network(self, fd: FuchsiaDevice, network: NetworkConfig) -> bool:
+        """Verify that the network is present in saved networks.
+
+        Args:
+            fd: Fuchsia device to run on.
+            network: Network to check for.
+
+        Returns:
+            True if network is found in saved networks, otherwise False.
+        """
+        networks: list[NetworkConfig] = fd.sl4f.wlan_policy_lib.get_saved_networks()
+        if network in networks:
+            return True
+        else:
+            return False
+
+    def _start_ap(
+        self,
+        ssid: str,
+        security_type: SecurityMode,
+        password: str | None = None,
+    ) -> None:
+        """Starts an access point.
+
+        Args:
+            ssid: The SSID of the network to broadcast
+            security_type: The security type of the network to be broadcasted
+            password: The password to connect to the broadcasted network. The password
+                is ignored if security type is none.
+
+        Raises:
+            EnvironmentError if it fails to set up AP for test.
+        """
+        # Put together the security configuration of the network to be broadcasted.
+        security = Security(security_mode=security_type, password=password)
+
+        if len(self.access_points) > 0:
+            # Create an AP with default values other than the specified values.
+            setup_ap(
+                self.access_points[0],
+                "whirlwind",
+                hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+                ssid,
+                security=security,
+            )
+        else:
+            self.log.error("No access point available for test, please check config")
+            raise EnvironmentError("Failed to set up AP for test")
+
+    def test_open_network_with_password(self) -> None:
+        """Save an open network with a password and verify that it fails to save."""
+        test_network = NetworkConfig(
+            rand_ascii_str(10),
+            SecurityType.NONE,
+            CREDENTIAL_TYPE_NONE,
+            rand_ascii_str(8),
+        )
+
+        for fd in self.fuchsia_devices:
+            try:
+                fd.sl4f.wlan_policy_lib.save_network(
+                    test_network.ssid,
+                    test_network.security_type,
+                    test_network.credential_value,
+                )
+                asserts.fail("Unexpectedly succeeded to save network")
+            except WlanPolicyError:
+                networks = fd.sl4f.wlan_policy_lib.get_saved_networks()
+                if test_network in networks:
+                    asserts.fail("Got an unexpected saved network")
+                # Successfully failed to save network.
+                return
+
+            asserts.fail("Failed to get error saving bad network")
+
+    def test_open_network(self) -> None:
+        """Save an open network and verify presence."""
+        test_network = NetworkConfig(
+            rand_ascii_str(10),
+            SecurityType.NONE,
+            CREDENTIAL_TYPE_NONE,
+            CREDENTIAL_VALUE_NONE,
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+            if not self._has_saved_network(fd, test_network):
+                asserts.fail("Saved network not present")
+
+    def test_network_with_psk(self) -> None:
+        """Save a network with a PSK and verify presence.
+
+        PSK are translated from hex to bytes when saved, and when returned by
+        get_saved_networks it will be lower case.
+        """
+        test_network = NetworkConfig(
+            rand_ascii_str(11),
+            SecurityType.WPA2,
+            CREDENTIAL_TYPE_PSK,
+            rand_hex_str(PSK_LEN).lower(),
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+            if not self._has_saved_network(fd, test_network):
+                asserts.fail("Saved network not present")
+
+    def test_wep_network(self) -> None:
+        """Save a wep network and verify presence."""
+        test_network = NetworkConfig(
+            rand_ascii_str(12),
+            SecurityType.WEP,
+            CREDENTIAL_TYPE_PASSWORD,
+            rand_ascii_str(13),
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+            if not self._has_saved_network(fd, test_network):
+                asserts.fail("Saved network not present")
+
+    def test_wpa2_network(self) -> None:
+        """Save a wpa2 network and verify presence."""
+        test_network = NetworkConfig(
+            rand_ascii_str(9),
+            SecurityType.WPA2,
+            CREDENTIAL_TYPE_PASSWORD,
+            rand_ascii_str(15),
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+            if not self._has_saved_network(fd, test_network):
+                asserts.fail("Saved network not present")
+
+    def test_wpa_network(self) -> None:
+        """Save a wpa network and verify presence."""
+        test_network = NetworkConfig(
+            rand_ascii_str(16),
+            SecurityType.WPA,
+            CREDENTIAL_TYPE_PASSWORD,
+            rand_ascii_str(9),
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+            if not self._has_saved_network(fd, test_network):
+                asserts.fail("Saved network not present")
+
+    def test_wpa3_network(self) -> None:
+        """Save a wpa3 network and verify presence."""
+        test_network = NetworkConfig(
+            rand_ascii_str(9),
+            SecurityType.WPA3,
+            CREDENTIAL_TYPE_PASSWORD,
+            rand_ascii_str(15),
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+            if not self._has_saved_network(fd, test_network):
+                asserts.fail("Saved network not present")
+
+    def test_save_network_persists(self) -> None:
+        """Save a network and verify after reboot network is present."""
+        test_network = NetworkConfig(
+            rand_ascii_str(10),
+            SecurityType.WPA2,
+            CREDENTIAL_TYPE_PASSWORD,
+            rand_ascii_str(10),
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+
+            if not self._has_saved_network(fd, test_network):
+                asserts.fail("Saved network not present")
+
+            fd.reboot()
+
+            if not self._has_saved_network(fd, test_network):
+                asserts.fail("Saved network did not persist through reboot")
+
+    def test_same_ssid_diff_security(self) -> None:
+        """Save two networks with the same ssids but different security types.
+
+        Both networks should be saved and present in network state since they have
+        different security types and therefore different network identifiers.
+        """
+        ssid = rand_ascii_str(19)
+        test_network_wpa2 = NetworkConfig(
+            ssid,
+            SecurityType.WPA2,
+            CREDENTIAL_TYPE_PASSWORD,
+            rand_ascii_str(12),
+        )
+        test_network_open = NetworkConfig(
+            ssid,
+            SecurityType.NONE,
+            CREDENTIAL_TYPE_NONE,
+            CREDENTIAL_VALUE_NONE,
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network_wpa2.ssid,
+                test_network_wpa2.security_type,
+                test_network_wpa2.credential_value,
+            )
+
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network_open.ssid,
+                test_network_open.security_type,
+                test_network_open.credential_value,
+            )
+
+            if not (
+                self._has_saved_network(fd, test_network_wpa2)
+                and self._has_saved_network(fd, test_network_open)
+            ):
+                asserts.fail("Both saved networks not present")
+
+    def test_remove_disconnects(self) -> None:
+        """Connect to network, remove it while still connected, and verify disconnect.
+
+        This test requires a wpa2 network. Remove all other networks first so that we
+        don't auto connect to them.
+        """
+        test_network = NetworkConfig(
+            rand_ascii_str(10),
+            SecurityType.WPA2,
+            CREDENTIAL_TYPE_PASSWORD,
+            rand_ascii_str(10),
+        )
+
+        self._start_ap(
+            test_network.ssid, SecurityMode.WPA2, test_network.credential_value
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.wlan_policy_controller.wait_for_no_connections()
+            # Make sure client connections are enabled
+            fd.sl4f.wlan_policy_lib.start_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_ENABLED
+            )
+            # Save and verify we connect to network
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+
+            fd.wlan_policy_controller.wait_for_network_state(
+                test_network.ssid, ConnectionState.CONNECTED
+            )
+            # Remove network and verify we disconnect
+            fd.sl4f.wlan_policy_lib.remove_network(
+                test_network.ssid,
+                test_network.security_type,
+                test_network.credential_value,
+            )
+            try:
+                fd.wlan_policy_controller.wait_for_no_connections()
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure("Failed to remove network") from e
+
+    def test_auto_connect_open(self) -> None:
+        """Save an open network and verify it auto connects.
+
+        Start up AP with an open network and verify that the client auto connects to
+        that network after we save it.
+        """
+        test_network = NetworkConfig(
+            rand_ascii_str(10),
+            SecurityType.NONE,
+            CREDENTIAL_TYPE_NONE,
+            CREDENTIAL_VALUE_NONE,
+        )
+
+        self._start_ap(
+            test_network.ssid, SecurityMode.OPEN, test_network.credential_value
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.wlan_policy_controller.wait_for_no_connections()
+            # Make sure client connections are enabled
+            fd.sl4f.wlan_policy_lib.start_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_ENABLED
+            )
+            # Save the network and make sure that we see the device auto connect to it.
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid, test_network.security_type
+            )
+            try:
+                fd.wlan_policy_controller.wait_for_network_state(
+                    test_network.ssid, ConnectionState.CONNECTED
+                )
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure("network is not in connected state") from e
+
+    def test_auto_connect_wpa3(self) -> None:
+        """Save an wpa3 network and verify it auto connects.
+
+        Start up AP with a wpa3 network and verify that the client auto connects to
+        that network after we save it.
+        """
+        test_network = NetworkConfig(
+            rand_ascii_str(10),
+            SecurityType.WPA3,
+            CREDENTIAL_TYPE_PASSWORD,
+            rand_ascii_str(10),
+        )
+
+        self._start_ap(
+            test_network.ssid, SecurityMode.WPA3, test_network.credential_value
+        )
+
+        for fd in self.fuchsia_devices:
+            fd.wlan_policy_controller.wait_for_no_connections()
+            # Make sure client connections are enabled
+            fd.sl4f.wlan_policy_lib.start_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_ENABLED
+            )
+            # Save the network and make sure that we see the device auto connect to it.
+            fd.sl4f.wlan_policy_lib.save_network(
+                test_network.ssid,
+                SecurityType.WPA3,
+                test_network.credential_value,
+            )
+            try:
+                fd.wlan_policy_controller.wait_for_network_state(
+                    test_network.ssid, ConnectionState.CONNECTED
+                )
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure("network is not in connected state") from e
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/tests/wlan_policy/StartStopClientConnectionsTest.py b/tests/wlan_policy/StartStopClientConnectionsTest.py
new file mode 100644
index 0000000..509bec1
--- /dev/null
+++ b/tests/wlan_policy/StartStopClientConnectionsTest.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from honeydew.typing.wlan import (
+    ConnectionState,
+    DisconnectStatus,
+    RequestStatus,
+    SecurityType,
+    WlanClientState,
+)
+from mobly import signals, test_runner
+
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyControllerError,
+)
+from antlion.test_utils.wifi import base_test
+from antlion.utils import rand_ascii_str
+
+
+class StartStopClientConnectionsTest(base_test.WifiBaseTest):
+    """Tests that we see the expected behavior with enabling and disabling
+        client connections
+
+    Test Bed Requirement:
+    * One or more Fuchsia devices
+    * One Access Point
+    """
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.log = logging.getLogger()
+        # Start an AP with a hidden network
+        self.ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        self.access_point = self.access_points[0]
+        self.password = rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
+        self.security_type = SecurityType.WPA2
+        security = hostapd_security.Security(
+            security_mode=hostapd_security.SecurityMode.WPA2, password=self.password
+        )
+
+        self.access_point.stop_all_aps()
+        # TODO(63719) use varying values for AP that shouldn't affect the test.
+        setup_ap(
+            self.access_point,
+            "whirlwind",
+            hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            self.ssid,
+            security=security,
+        )
+
+        if len(self.fuchsia_devices) < 1:
+            raise EnvironmentError("No Fuchsia devices found.")
+        for fd in self.fuchsia_devices:
+            fd.configure_wlan(
+                association_mechanism="policy", preserve_saved_networks=True
+            )
+
+    def setup_test(self) -> None:
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.remove_all_networks()
+            fd.wlan_policy_controller.wait_for_no_connections()
+
+    def teardown_class(self) -> None:
+        self.access_point.stop_all_aps()
+
+    def test_stop_client_connections_update(self) -> None:
+        """Test that we can stop client connections.
+
+        The fuchsia device always starts client connections during configure_wlan. We
+        verify first that we are in a client connections enabled state.
+        """
+        for fd in self.fuchsia_devices:
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_ENABLED
+            )
+            fd.sl4f.wlan_policy_lib.stop_client_connections()
+            try:
+                fd.wlan_policy_controller.wait_for_client_state(
+                    WlanClientState.CONNECTIONS_DISABLED
+                )
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure("Failed to stop client connections.") from e
+
+    def test_start_client_connections_update(self) -> None:
+        """Test that we can start client connections."""
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.stop_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_DISABLED
+            )
+            fd.sl4f.wlan_policy_lib.start_client_connections()
+            try:
+                fd.wlan_policy_controller.wait_for_client_state(
+                    WlanClientState.CONNECTIONS_ENABLED
+                )
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure("Failed to start client connections.") from e
+
+    def test_stop_client_connections_rejects_connections(self) -> None:
+        """Test that if client connections are disabled connection attempts fail."""
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.ssid, self.security_type, self.password
+            )
+            fd.sl4f.wlan_policy_lib.stop_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_DISABLED
+            )
+            request_status = fd.sl4f.wlan_policy_lib.connect(
+                self.ssid, self.security_type
+            )
+            if request_status is not RequestStatus.REJECTED_INCOMPATIBLE_MODE:
+                raise signals.TestFailure(
+                    "Connection request not rejected as incompatible."
+                )
+
+    def test_start_stop_client_connections(self) -> None:
+        """Test automated behavior when starting/stoping client connections.
+
+        When starting and stopping the client connections the device should connect and
+        disconnect from the saved network.
+        """
+        for fd in self.fuchsia_devices:
+            fd.sl4f.wlan_policy_lib.save_network(
+                self.ssid, self.security_type, self.password
+            )
+            fd.sl4f.wlan_policy_lib.start_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_ENABLED
+            )
+            fd.sl4f.wlan_policy_lib.connect(self.ssid, self.security_type)
+            try:
+                fd.wlan_policy_controller.wait_for_network_state(
+                    self.ssid, ConnectionState.CONNECTED
+                )
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure(
+                    "network not in connected state after client connections started"
+                ) from e
+
+            fd.sl4f.wlan_policy_lib.stop_client_connections()
+            fd.wlan_policy_controller.wait_for_client_state(
+                WlanClientState.CONNECTIONS_DISABLED
+            )
+            try:
+                fd.wlan_policy_controller.wait_for_network_state(
+                    self.ssid,
+                    ConnectionState.DISCONNECTED,
+                    DisconnectStatus.CONNECTION_STOPPED,
+                )
+            except WlanPolicyControllerError as e:
+                raise signals.TestFailure(
+                    "network not in disconnected state after client connections stopped"
+                ) from e
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/third_party/github.com/jd/tenacity/BUILD.gn b/third_party/github.com/jd/tenacity/BUILD.gn
new file mode 100644
index 0000000..a0f0dc7
--- /dev/null
+++ b/third_party/github.com/jd/tenacity/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/python/python_library.gni")
+
+python_library("tenacity") {
+  source_root = "//third_party/github.com/jd/tenacity/src/tenacity"
+  sources = [
+    "__init__.py",
+    "_asyncio.py",
+    "_utils.py",
+    "after.py",
+    "before.py",
+    "before_sleep.py",
+    "nap.py",
+    "retry.py",
+    "stop.py",
+    "tornadoweb.py",
+    "wait.py",
+  ]
+}
diff --git a/third_party/github.com/jd/tenacity/OWNERS b/third_party/github.com/jd/tenacity/OWNERS
new file mode 100644
index 0000000..1f83792
--- /dev/null
+++ b/third_party/github.com/jd/tenacity/OWNERS
@@ -0,0 +1,2 @@
+sbalana@google.com
+patricklu@google.com
diff --git a/third_party/github.com/jd/tenacity/README.fuchsia b/third_party/github.com/jd/tenacity/README.fuchsia
new file mode 100644
index 0000000..6b0076d
--- /dev/null
+++ b/third_party/github.com/jd/tenacity/README.fuchsia
@@ -0,0 +1,9 @@
+Name: tenacity
+License: Apache 2.0
+License File: LICENSE
+Upstream Git: https://github.com/jd/tenacity
+Description:
+
+Tenacity is an Apache 2.0 licensed general-purpose retrying library,
+written in Python, to simplify the task of adding retry behavior to
+just about anything.